id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
47,204
def encryptAES(s): key = helpers.randomKey() cipher = AES.new(key) encrypted = EncodeAES(cipher, s) return (encrypted, key)
[ "def", "encryptAES", "(", "s", ")", ":", "key", "=", "helpers", ".", "randomKey", "(", ")", "cipher", "=", "AES", ".", "new", "(", "key", ")", "encrypted", "=", "EncodeAES", "(", "cipher", ",", "s", ")", "return", "(", "encrypted", ",", "key", ")" ]
generates a random aes key .
train
false
47,206
def mimify(infile, outfile): if (type(infile) == type('')): ifile = open(infile) if ((type(outfile) == type('')) and (infile == outfile)): import os (d, f) = os.path.split(infile) os.rename(infile, os.path.join(d, (',' + f))) else: ifile = infile if (type(outfile) == type('')): ofile = open(outfile, 'w') else: ofile = outfile nifile = File(ifile, None) mimify_part(nifile, ofile, 0) ofile.flush()
[ "def", "mimify", "(", "infile", ",", "outfile", ")", ":", "if", "(", "type", "(", "infile", ")", "==", "type", "(", "''", ")", ")", ":", "ifile", "=", "open", "(", "infile", ")", "if", "(", "(", "type", "(", "outfile", ")", "==", "type", "(", "''", ")", ")", "and", "(", "infile", "==", "outfile", ")", ")", ":", "import", "os", "(", "d", ",", "f", ")", "=", "os", ".", "path", ".", "split", "(", "infile", ")", "os", ".", "rename", "(", "infile", ",", "os", ".", "path", ".", "join", "(", "d", ",", "(", "','", "+", "f", ")", ")", ")", "else", ":", "ifile", "=", "infile", "if", "(", "type", "(", "outfile", ")", "==", "type", "(", "''", ")", ")", ":", "ofile", "=", "open", "(", "outfile", ",", "'w'", ")", "else", ":", "ofile", "=", "outfile", "nifile", "=", "File", "(", "ifile", ",", "None", ")", "mimify_part", "(", "nifile", ",", "ofile", ",", "0", ")", "ofile", ".", "flush", "(", ")" ]
convert 8bit parts of a mime mail message to quoted-printable .
train
false
47,207
def get_subpages(stub): ignore_chars = ['/', '?'] image_chars = ['.jpg', '.png'] confs = {} images = [] subs = [] r = requests.get(URL_BASE) t = r.text subs_raw = [x[:x.index('"')] for x in t.split('<a href="') if ('"' in x)] for sub in subs_raw: for ig in ignore_chars: if (ig in sub): break else: subs.append(sub) for sub in subs: for img in image_chars: if (img in sub): images.append(sub) break else: confs[sub] = None for image in images: cnf = image.replace('.png', '').replace('.jpg') if (cnf in confs): confs[cnf] = image return confs
[ "def", "get_subpages", "(", "stub", ")", ":", "ignore_chars", "=", "[", "'/'", ",", "'?'", "]", "image_chars", "=", "[", "'.jpg'", ",", "'.png'", "]", "confs", "=", "{", "}", "images", "=", "[", "]", "subs", "=", "[", "]", "r", "=", "requests", ".", "get", "(", "URL_BASE", ")", "t", "=", "r", ".", "text", "subs_raw", "=", "[", "x", "[", ":", "x", ".", "index", "(", "'\"'", ")", "]", "for", "x", "in", "t", ".", "split", "(", "'<a href=\"'", ")", "if", "(", "'\"'", "in", "x", ")", "]", "for", "sub", "in", "subs_raw", ":", "for", "ig", "in", "ignore_chars", ":", "if", "(", "ig", "in", "sub", ")", ":", "break", "else", ":", "subs", ".", "append", "(", "sub", ")", "for", "sub", "in", "subs", ":", "for", "img", "in", "image_chars", ":", "if", "(", "img", "in", "sub", ")", ":", "images", ".", "append", "(", "sub", ")", "break", "else", ":", "confs", "[", "sub", "]", "=", "None", "for", "image", "in", "images", ":", "cnf", "=", "image", ".", "replace", "(", "'.png'", ",", "''", ")", ".", "replace", "(", "'.jpg'", ")", "if", "(", "cnf", "in", "confs", ")", ":", "confs", "[", "cnf", "]", "=", "image", "return", "confs" ]
returns a dictionary of conf files found in the sub-menu along with images if there are any .
train
false
47,208
def plugin(): return SwapQuotes
[ "def", "plugin", "(", ")", ":", "return", "SwapQuotes" ]
make plugin available .
train
false
47,210
def cms_sign_data(data_to_sign, signing_cert_file_name, signing_key_file_name, outform=PKI_ASN1_FORM, message_digest=DEFAULT_TOKEN_DIGEST_ALGORITHM): _ensure_subprocess() if isinstance(data_to_sign, six.string_types): data = bytearray(data_to_sign, encoding='utf-8') else: data = data_to_sign process = subprocess.Popen(['openssl', 'cms', '-sign', '-signer', signing_cert_file_name, '-inkey', signing_key_file_name, '-outform', 'PEM', '-nosmimecap', '-nodetach', '-nocerts', '-noattr', '-md', message_digest], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True) (output, err, retcode) = _process_communicate_handle_oserror(process, data, (signing_cert_file_name, signing_key_file_name)) if ((retcode != OpensslCmsExitStatus.SUCCESS) or ('Error' in err)): if (retcode == OpensslCmsExitStatus.CREATE_CMS_READ_MIME_ERROR): LOG.error(_LE('Signing error: Unable to load certificate - ensure you have configured PKI with "keystone-manage pki_setup"')) else: LOG.error(_LE('Signing error: %s'), err) raise subprocess.CalledProcessError(retcode, 'openssl') if (outform == PKI_ASN1_FORM): return output.decode('utf-8') else: return output
[ "def", "cms_sign_data", "(", "data_to_sign", ",", "signing_cert_file_name", ",", "signing_key_file_name", ",", "outform", "=", "PKI_ASN1_FORM", ",", "message_digest", "=", "DEFAULT_TOKEN_DIGEST_ALGORITHM", ")", ":", "_ensure_subprocess", "(", ")", "if", "isinstance", "(", "data_to_sign", ",", "six", ".", "string_types", ")", ":", "data", "=", "bytearray", "(", "data_to_sign", ",", "encoding", "=", "'utf-8'", ")", "else", ":", "data", "=", "data_to_sign", "process", "=", "subprocess", ".", "Popen", "(", "[", "'openssl'", ",", "'cms'", ",", "'-sign'", ",", "'-signer'", ",", "signing_cert_file_name", ",", "'-inkey'", ",", "signing_key_file_name", ",", "'-outform'", ",", "'PEM'", ",", "'-nosmimecap'", ",", "'-nodetach'", ",", "'-nocerts'", ",", "'-noattr'", ",", "'-md'", ",", "message_digest", "]", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "close_fds", "=", "True", ")", "(", "output", ",", "err", ",", "retcode", ")", "=", "_process_communicate_handle_oserror", "(", "process", ",", "data", ",", "(", "signing_cert_file_name", ",", "signing_key_file_name", ")", ")", "if", "(", "(", "retcode", "!=", "OpensslCmsExitStatus", ".", "SUCCESS", ")", "or", "(", "'Error'", "in", "err", ")", ")", ":", "if", "(", "retcode", "==", "OpensslCmsExitStatus", ".", "CREATE_CMS_READ_MIME_ERROR", ")", ":", "LOG", ".", "error", "(", "_LE", "(", "'Signing error: Unable to load certificate - ensure you have configured PKI with \"keystone-manage pki_setup\"'", ")", ")", "else", ":", "LOG", ".", "error", "(", "_LE", "(", "'Signing error: %s'", ")", ",", "err", ")", "raise", "subprocess", ".", "CalledProcessError", "(", "retcode", ",", "'openssl'", ")", "if", "(", "outform", "==", "PKI_ASN1_FORM", ")", ":", "return", "output", ".", "decode", "(", "'utf-8'", ")", "else", ":", "return", "output" ]
use openssl to sign a document .
train
false
47,211
def _get_scope_name(scope): return rtnl.rt_scope.get(scope, scope)
[ "def", "_get_scope_name", "(", "scope", ")", ":", "return", "rtnl", ".", "rt_scope", ".", "get", "(", "scope", ",", "scope", ")" ]
return the name of the scope .
train
false
47,212
def volunteer(): return s3db.vol_volunteer_controller()
[ "def", "volunteer", "(", ")", ":", "return", "s3db", ".", "vol_volunteer_controller", "(", ")" ]
restful controller for community volunteers .
train
false
47,213
def get_block_edges(source_file): block_edges = [] with open(source_file) as f: token_iter = tokenize.generate_tokens(f.readline) for token_tuple in token_iter: (t_id, t_str, (srow, scol), (erow, ecol), src_line) = token_tuple if ((token.tok_name[t_id] == 'STRING') and (scol == 0)): block_edges.extend((srow, (erow + 1))) idx_first_text_block = 0 if (not (block_edges[0] == 1)): block_edges.insert(0, 1) idx_first_text_block = 1 if (not (block_edges[(-1)] == erow)): block_edges.append(erow) return (block_edges, idx_first_text_block)
[ "def", "get_block_edges", "(", "source_file", ")", ":", "block_edges", "=", "[", "]", "with", "open", "(", "source_file", ")", "as", "f", ":", "token_iter", "=", "tokenize", ".", "generate_tokens", "(", "f", ".", "readline", ")", "for", "token_tuple", "in", "token_iter", ":", "(", "t_id", ",", "t_str", ",", "(", "srow", ",", "scol", ")", ",", "(", "erow", ",", "ecol", ")", ",", "src_line", ")", "=", "token_tuple", "if", "(", "(", "token", ".", "tok_name", "[", "t_id", "]", "==", "'STRING'", ")", "and", "(", "scol", "==", "0", ")", ")", ":", "block_edges", ".", "extend", "(", "(", "srow", ",", "(", "erow", "+", "1", ")", ")", ")", "idx_first_text_block", "=", "0", "if", "(", "not", "(", "block_edges", "[", "0", "]", "==", "1", ")", ")", ":", "block_edges", ".", "insert", "(", "0", ",", "1", ")", "idx_first_text_block", "=", "1", "if", "(", "not", "(", "block_edges", "[", "(", "-", "1", ")", "]", "==", "erow", ")", ")", ":", "block_edges", ".", "append", "(", "erow", ")", "return", "(", "block_edges", ",", "idx_first_text_block", ")" ]
return starting line numbers of code and text blocks returns block_edges : list of int line number for the start of each block .
train
false
47,214
def get_clipboard(selection=False): global fake_clipboard if (selection and (not supports_selection())): raise SelectionUnsupportedError if (fake_clipboard is not None): data = fake_clipboard fake_clipboard = None else: mode = (QClipboard.Selection if selection else QClipboard.Clipboard) data = QApplication.clipboard().text(mode=mode) target = ('Primary selection' if selection else 'Clipboard') if (not data.strip()): raise ClipboardEmptyError('{} is empty.'.format(target)) log.misc.debug('{} contained: {!r}'.format(target, data)) return data
[ "def", "get_clipboard", "(", "selection", "=", "False", ")", ":", "global", "fake_clipboard", "if", "(", "selection", "and", "(", "not", "supports_selection", "(", ")", ")", ")", ":", "raise", "SelectionUnsupportedError", "if", "(", "fake_clipboard", "is", "not", "None", ")", ":", "data", "=", "fake_clipboard", "fake_clipboard", "=", "None", "else", ":", "mode", "=", "(", "QClipboard", ".", "Selection", "if", "selection", "else", "QClipboard", ".", "Clipboard", ")", "data", "=", "QApplication", ".", "clipboard", "(", ")", ".", "text", "(", "mode", "=", "mode", ")", "target", "=", "(", "'Primary selection'", "if", "selection", "else", "'Clipboard'", ")", "if", "(", "not", "data", ".", "strip", "(", ")", ")", ":", "raise", "ClipboardEmptyError", "(", "'{} is empty.'", ".", "format", "(", "target", ")", ")", "log", ".", "misc", ".", "debug", "(", "'{} contained: {!r}'", ".", "format", "(", "target", ",", "data", ")", ")", "return", "data" ]
get data from the clipboard .
train
false
47,215
def blink(clip, d_on, d_off): newclip = copy(clip) if (newclip.mask is None): newclip = newclip.with_mask() D = (d_on + d_off) newclip.mask = newclip.mask.fl((lambda gf, t: (gf(t) * ((t % D) < d_on)))) return newclip
[ "def", "blink", "(", "clip", ",", "d_on", ",", "d_off", ")", ":", "newclip", "=", "copy", "(", "clip", ")", "if", "(", "newclip", ".", "mask", "is", "None", ")", ":", "newclip", "=", "newclip", ".", "with_mask", "(", ")", "D", "=", "(", "d_on", "+", "d_off", ")", "newclip", ".", "mask", "=", "newclip", ".", "mask", ".", "fl", "(", "(", "lambda", "gf", ",", "t", ":", "(", "gf", "(", "t", ")", "*", "(", "(", "t", "%", "D", ")", "<", "d_on", ")", ")", ")", ")", "return", "newclip" ]
makes the clip blink .
train
false
47,216
def addNewSite(self, site, stype, cache, path, enabled=True, ssl=False, fs='ext4', db='mysql', db_name=None, db_user=None, db_password=None, db_host='localhost', hhvm=0, pagespeed=0, php_version='5.5'): try: newRec = SiteDB(site, stype, cache, path, enabled, ssl, fs, db, db_name, db_user, db_password, db_host, hhvm, pagespeed, php_version) db_session.add(newRec) db_session.commit() except Exception as e: Log.debug(self, '{0}'.format(e)) Log.error(self, 'Unable to add site to database')
[ "def", "addNewSite", "(", "self", ",", "site", ",", "stype", ",", "cache", ",", "path", ",", "enabled", "=", "True", ",", "ssl", "=", "False", ",", "fs", "=", "'ext4'", ",", "db", "=", "'mysql'", ",", "db_name", "=", "None", ",", "db_user", "=", "None", ",", "db_password", "=", "None", ",", "db_host", "=", "'localhost'", ",", "hhvm", "=", "0", ",", "pagespeed", "=", "0", ",", "php_version", "=", "'5.5'", ")", ":", "try", ":", "newRec", "=", "SiteDB", "(", "site", ",", "stype", ",", "cache", ",", "path", ",", "enabled", ",", "ssl", ",", "fs", ",", "db", ",", "db_name", ",", "db_user", ",", "db_password", ",", "db_host", ",", "hhvm", ",", "pagespeed", ",", "php_version", ")", "db_session", ".", "add", "(", "newRec", ")", "db_session", ".", "commit", "(", ")", "except", "Exception", "as", "e", ":", "Log", ".", "debug", "(", "self", ",", "'{0}'", ".", "format", "(", "e", ")", ")", "Log", ".", "error", "(", "self", ",", "'Unable to add site to database'", ")" ]
add new site record information into ee database .
train
false
47,217
def is_valid_imdb_person_id(value): if (not isinstance(value, basestring)): raise TypeError(u'is_valid_imdb_person_id expects a string but got {0}'.format(type(value))) return (re.match(u'nm[\\d]{7}', value) is not None)
[ "def", "is_valid_imdb_person_id", "(", "value", ")", ":", "if", "(", "not", "isinstance", "(", "value", ",", "basestring", ")", ")", ":", "raise", "TypeError", "(", "u'is_valid_imdb_person_id expects a string but got {0}'", ".", "format", "(", "type", "(", "value", ")", ")", ")", "return", "(", "re", ".", "match", "(", "u'nm[\\\\d]{7}'", ",", "value", ")", "is", "not", "None", ")" ]
return true if value is a valid imdb id for a person .
train
false
47,218
def pagingResponse(): a = TpPd(pd=6) b = MessageType(mesType=39) c = CiphKeySeqNrAndSpareHalfOctets() d = MobileStationClassmark2() e = MobileId() packet = ((((a / b) / c) / d) / e) return packet
[ "def", "pagingResponse", "(", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "6", ")", "b", "=", "MessageType", "(", "mesType", "=", "39", ")", "c", "=", "CiphKeySeqNrAndSpareHalfOctets", "(", ")", "d", "=", "MobileStationClassmark2", "(", ")", "e", "=", "MobileId", "(", ")", "packet", "=", "(", "(", "(", "(", "a", "/", "b", ")", "/", "c", ")", "/", "d", ")", "/", "e", ")", "return", "packet" ]
paging response section 9 .
train
true
47,219
@requires_sklearn def test_gat_plot_times(): gat = _get_data() gat.plot_times(gat.train_times_['times'][0]) gat.plot_times(gat.train_times_['times']) n_times = len(gat.train_times_['times']) colors = np.tile(['r', 'g', 'b'], int(np.ceil((n_times / 3))))[:n_times] gat.plot_times(gat.train_times_['times'], color=colors) assert_raises(ValueError, gat.plot_times, (-1.0)) assert_raises(ValueError, gat.plot_times, 1) assert_raises(ValueError, gat.plot_times, 'diagonal') del gat.scores_ assert_raises(RuntimeError, gat.plot)
[ "@", "requires_sklearn", "def", "test_gat_plot_times", "(", ")", ":", "gat", "=", "_get_data", "(", ")", "gat", ".", "plot_times", "(", "gat", ".", "train_times_", "[", "'times'", "]", "[", "0", "]", ")", "gat", ".", "plot_times", "(", "gat", ".", "train_times_", "[", "'times'", "]", ")", "n_times", "=", "len", "(", "gat", ".", "train_times_", "[", "'times'", "]", ")", "colors", "=", "np", ".", "tile", "(", "[", "'r'", ",", "'g'", ",", "'b'", "]", ",", "int", "(", "np", ".", "ceil", "(", "(", "n_times", "/", "3", ")", ")", ")", ")", "[", ":", "n_times", "]", "gat", ".", "plot_times", "(", "gat", ".", "train_times_", "[", "'times'", "]", ",", "color", "=", "colors", ")", "assert_raises", "(", "ValueError", ",", "gat", ".", "plot_times", ",", "(", "-", "1.0", ")", ")", "assert_raises", "(", "ValueError", ",", "gat", ".", "plot_times", ",", "1", ")", "assert_raises", "(", "ValueError", ",", "gat", ".", "plot_times", ",", "'diagonal'", ")", "del", "gat", ".", "scores_", "assert_raises", "(", "RuntimeError", ",", "gat", ".", "plot", ")" ]
test gat times plot .
train
false
47,220
def index_skills(request): query = Skill.objects.filter(members__is_vouched=True) template = 'groups/index_skills.html' return _list_groups(request, template, query)
[ "def", "index_skills", "(", "request", ")", ":", "query", "=", "Skill", ".", "objects", ".", "filter", "(", "members__is_vouched", "=", "True", ")", "template", "=", "'groups/index_skills.html'", "return", "_list_groups", "(", "request", ",", "template", ",", "query", ")" ]
lists all public skills on mozillians .
train
false
47,221
def teardown_test_environment(): Template._render = Template._original_render del Template._original_render settings.EMAIL_BACKEND = mail._original_email_backend del mail._original_email_backend settings.ALLOWED_HOSTS = request._original_allowed_hosts del request._original_allowed_hosts del mail.outbox
[ "def", "teardown_test_environment", "(", ")", ":", "Template", ".", "_render", "=", "Template", ".", "_original_render", "del", "Template", ".", "_original_render", "settings", ".", "EMAIL_BACKEND", "=", "mail", ".", "_original_email_backend", "del", "mail", ".", "_original_email_backend", "settings", ".", "ALLOWED_HOSTS", "=", "request", ".", "_original_allowed_hosts", "del", "request", ".", "_original_allowed_hosts", "del", "mail", ".", "outbox" ]
this method is called by nose_runner when the tests all finish .
train
false
47,222
def reverification_info(statuses): reverifications = defaultdict(list) for status in statuses: if reverifications[status]: reverifications[status].sort(key=(lambda x: x.date)) return reverifications
[ "def", "reverification_info", "(", "statuses", ")", ":", "reverifications", "=", "defaultdict", "(", "list", ")", "for", "status", "in", "statuses", ":", "if", "reverifications", "[", "status", "]", ":", "reverifications", "[", "status", "]", ".", "sort", "(", "key", "=", "(", "lambda", "x", ":", "x", ".", "date", ")", ")", "return", "reverifications" ]
returns reverification-related information for *all* of users enrollments whose reverification status is in statuses .
train
false
47,223
def authorized_cube(cube_name, locale): try: cube = workspace.cube(cube_name, g.auth_identity, locale=locale) except NotAuthorized: ident = (("'%s'" % g.auth_identity) if g.auth_identity else 'unspecified identity') raise NotAuthorizedError(("Authorization of cube '%s' failed for %s" % (cube_name, ident))) return cube
[ "def", "authorized_cube", "(", "cube_name", ",", "locale", ")", ":", "try", ":", "cube", "=", "workspace", ".", "cube", "(", "cube_name", ",", "g", ".", "auth_identity", ",", "locale", "=", "locale", ")", "except", "NotAuthorized", ":", "ident", "=", "(", "(", "\"'%s'\"", "%", "g", ".", "auth_identity", ")", "if", "g", ".", "auth_identity", "else", "'unspecified identity'", ")", "raise", "NotAuthorizedError", "(", "(", "\"Authorization of cube '%s' failed for %s\"", "%", "(", "cube_name", ",", "ident", ")", ")", ")", "return", "cube" ]
returns a cube cube_name .
train
false
47,224
def dir_list(load): gitfs = salt.utils.gitfs.GitFS(__opts__) gitfs.init_remotes(__opts__['gitfs_remotes'], PER_REMOTE_OVERRIDES, PER_REMOTE_ONLY) return gitfs.dir_list(load)
[ "def", "dir_list", "(", "load", ")", ":", "gitfs", "=", "salt", ".", "utils", ".", "gitfs", ".", "GitFS", "(", "__opts__", ")", "gitfs", ".", "init_remotes", "(", "__opts__", "[", "'gitfs_remotes'", "]", ",", "PER_REMOTE_OVERRIDES", ",", "PER_REMOTE_ONLY", ")", "return", "gitfs", ".", "dir_list", "(", "load", ")" ]
return a list of directories in the given environment saltenv : base the salt fileserver environment to be listed backend narrow fileserver backends to a subset of the enabled ones .
train
false
47,225
def make_homog(points): return vstack((points, ones((1, points.shape[1]))))
[ "def", "make_homog", "(", "points", ")", ":", "return", "vstack", "(", "(", "points", ",", "ones", "(", "(", "1", ",", "points", ".", "shape", "[", "1", "]", ")", ")", ")", ")" ]
convert a set of points to homogeneous coordinates .
train
false
47,227
def kill(coro): return KillEvent(coro)
[ "def", "kill", "(", "coro", ")", ":", "return", "KillEvent", "(", "coro", ")" ]
kill greenlet with exception .
train
false
47,228
def inherits_from(obj, parent): if callable(obj): obj_paths = [('%s.%s' % (mod.__module__, mod.__name__)) for mod in obj.mro()] else: obj_paths = [('%s.%s' % (mod.__module__, mod.__name__)) for mod in obj.__class__.mro()] if isinstance(parent, basestring): parent_path = parent elif callable(parent): parent_path = ('%s.%s' % (parent.__module__, parent.__name__)) else: parent_path = ('%s.%s' % (parent.__class__.__module__, parent.__class__.__name__)) return any((1 for obj_path in obj_paths if (obj_path == parent_path)))
[ "def", "inherits_from", "(", "obj", ",", "parent", ")", ":", "if", "callable", "(", "obj", ")", ":", "obj_paths", "=", "[", "(", "'%s.%s'", "%", "(", "mod", ".", "__module__", ",", "mod", ".", "__name__", ")", ")", "for", "mod", "in", "obj", ".", "mro", "(", ")", "]", "else", ":", "obj_paths", "=", "[", "(", "'%s.%s'", "%", "(", "mod", ".", "__module__", ",", "mod", ".", "__name__", ")", ")", "for", "mod", "in", "obj", ".", "__class__", ".", "mro", "(", ")", "]", "if", "isinstance", "(", "parent", ",", "basestring", ")", ":", "parent_path", "=", "parent", "elif", "callable", "(", "parent", ")", ":", "parent_path", "=", "(", "'%s.%s'", "%", "(", "parent", ".", "__module__", ",", "parent", ".", "__name__", ")", ")", "else", ":", "parent_path", "=", "(", "'%s.%s'", "%", "(", "parent", ".", "__class__", ".", "__module__", ",", "parent", ".", "__class__", ".", "__name__", ")", ")", "return", "any", "(", "(", "1", "for", "obj_path", "in", "obj_paths", "if", "(", "obj_path", "==", "parent_path", ")", ")", ")" ]
takes an object and tries to determine if it inherits at *any* distance from parent .
train
false
47,229
def gc_helper(node_list): last_user = {} computed = set() for node in node_list: for input in node.inputs: last_user[input] = node for output in node.outputs: computed.add(output) return (computed, last_user)
[ "def", "gc_helper", "(", "node_list", ")", ":", "last_user", "=", "{", "}", "computed", "=", "set", "(", ")", "for", "node", "in", "node_list", ":", "for", "input", "in", "node", ".", "inputs", ":", "last_user", "[", "input", "]", "=", "node", "for", "output", "in", "node", ".", "outputs", ":", "computed", ".", "add", "(", "output", ")", "return", "(", "computed", ",", "last_user", ")" ]
return the set of variable instances which are computed by node_list .
train
false
47,230
def test_extra(): try: import galatea except ImportError: return from galatea.dbm.pylearn2_bridge import run_unit_tests run_unit_tests()
[ "def", "test_extra", "(", ")", ":", "try", ":", "import", "galatea", "except", "ImportError", ":", "return", "from", "galatea", ".", "dbm", ".", "pylearn2_bridge", "import", "run_unit_tests", "run_unit_tests", "(", ")" ]
test functionality that remains private .
train
false
47,231
def unblockable(function): function.unblockable = True return function
[ "def", "unblockable", "(", "function", ")", ":", "function", ".", "unblockable", "=", "True", "return", "function" ]
decorator which exempts the function from nickname and hostname blocking .
train
false
47,232
def GetSharedSecretsManager(can_prompt=None): global _shared_secrets_manager if (_shared_secrets_manager is None): _shared_secrets_manager = SecretsManager('shared', options.options.domain, options.options.secrets_dir) prompt = (can_prompt if (can_prompt is not None) else sys.stderr.isatty()) _shared_secrets_manager.Init(can_prompt=prompt) return _shared_secrets_manager
[ "def", "GetSharedSecretsManager", "(", "can_prompt", "=", "None", ")", ":", "global", "_shared_secrets_manager", "if", "(", "_shared_secrets_manager", "is", "None", ")", ":", "_shared_secrets_manager", "=", "SecretsManager", "(", "'shared'", ",", "options", ".", "options", ".", "domain", ",", "options", ".", "options", ".", "secrets_dir", ")", "prompt", "=", "(", "can_prompt", "if", "(", "can_prompt", "is", "not", "None", ")", "else", "sys", ".", "stderr", ".", "isatty", "(", ")", ")", "_shared_secrets_manager", ".", "Init", "(", "can_prompt", "=", "prompt", ")", "return", "_shared_secrets_manager" ]
returns the shared secrets manager .
train
false
47,236
def save_instance(form, instance, fields=None, fail_message=u'saved', commit=True, exclude=None, construct=True): if construct: instance = construct_instance(form, instance, fields, exclude) opts = instance._meta if form.errors: raise ValueError((u"The %s could not be %s because the data didn't validate." % (opts.object_name, fail_message))) def save_m2m(): cleaned_data = form.cleaned_data for f in opts.many_to_many: if (fields and (f.name not in fields)): continue if (f.name in cleaned_data): f.save_form_data(instance, cleaned_data[f.name]) if commit: instance.save() save_m2m() else: form.save_m2m = save_m2m return instance
[ "def", "save_instance", "(", "form", ",", "instance", ",", "fields", "=", "None", ",", "fail_message", "=", "u'saved'", ",", "commit", "=", "True", ",", "exclude", "=", "None", ",", "construct", "=", "True", ")", ":", "if", "construct", ":", "instance", "=", "construct_instance", "(", "form", ",", "instance", ",", "fields", ",", "exclude", ")", "opts", "=", "instance", ".", "_meta", "if", "form", ".", "errors", ":", "raise", "ValueError", "(", "(", "u\"The %s could not be %s because the data didn't validate.\"", "%", "(", "opts", ".", "object_name", ",", "fail_message", ")", ")", ")", "def", "save_m2m", "(", ")", ":", "cleaned_data", "=", "form", ".", "cleaned_data", "for", "f", "in", "opts", ".", "many_to_many", ":", "if", "(", "fields", "and", "(", "f", ".", "name", "not", "in", "fields", ")", ")", ":", "continue", "if", "(", "f", ".", "name", "in", "cleaned_data", ")", ":", "f", ".", "save_form_data", "(", "instance", ",", "cleaned_data", "[", "f", ".", "name", "]", ")", "if", "commit", ":", "instance", ".", "save", "(", ")", "save_m2m", "(", ")", "else", ":", "form", ".", "save_m2m", "=", "save_m2m", "return", "instance" ]
saves bound form forms cleaned_data into model instance instance .
train
false
47,237
@should_profile_memory def start_memory_profiling(): dump_data_every_thread(dump_objects, DELAY_MINUTES, SAVE_THREAD_PTR)
[ "@", "should_profile_memory", "def", "start_memory_profiling", "(", ")", ":", "dump_data_every_thread", "(", "dump_objects", ",", "DELAY_MINUTES", ",", "SAVE_THREAD_PTR", ")" ]
if the environment variable w3af_profiling is set to 1 .
train
false
47,238
def _iter_requests(wsgi_input, boundary): boundary = ('--' + boundary) if (wsgi_input.readline().strip() != boundary): raise FormInvalid('invalid starting boundary') boundary = ('\r\n' + boundary) input_buffer = '' done = False while (not done): it = _IterRequestsFileLikeObject(wsgi_input, boundary, input_buffer) (yield it) done = it.no_more_files input_buffer = it.input_buffer
[ "def", "_iter_requests", "(", "wsgi_input", ",", "boundary", ")", ":", "boundary", "=", "(", "'--'", "+", "boundary", ")", "if", "(", "wsgi_input", ".", "readline", "(", ")", ".", "strip", "(", ")", "!=", "boundary", ")", ":", "raise", "FormInvalid", "(", "'invalid starting boundary'", ")", "boundary", "=", "(", "'\\r\\n'", "+", "boundary", ")", "input_buffer", "=", "''", "done", "=", "False", "while", "(", "not", "done", ")", ":", "it", "=", "_IterRequestsFileLikeObject", "(", "wsgi_input", ",", "boundary", ",", "input_buffer", ")", "(", "yield", "it", ")", "done", "=", "it", ".", "no_more_files", "input_buffer", "=", "it", ".", "input_buffer" ]
given a multi-part mime encoded input file object and boundary .
train
false
47,239
def rotate_r(L, k): ll = list(L) if (ll == []): return [] for i in range(k): el = ll.pop((len(ll) - 1)) ll.insert(0, el) return (ll if (type(L) is list) else Matrix([ll]))
[ "def", "rotate_r", "(", "L", ",", "k", ")", ":", "ll", "=", "list", "(", "L", ")", "if", "(", "ll", "==", "[", "]", ")", ":", "return", "[", "]", "for", "i", "in", "range", "(", "k", ")", ":", "el", "=", "ll", ".", "pop", "(", "(", "len", "(", "ll", ")", "-", "1", ")", ")", "ll", ".", "insert", "(", "0", ",", "el", ")", "return", "(", "ll", "if", "(", "type", "(", "L", ")", "is", "list", ")", "else", "Matrix", "(", "[", "ll", "]", ")", ")" ]
rotates right by k .
train
false
47,240
def _django_setup(): if (os.environ.get('APPENGINE_RUNTIME') != 'python27'): __django_version_setup() import django import django.conf try: raise ImportError except (ImportError, EnvironmentError) as e: if os.getenv(django.conf.ENVIRONMENT_VARIABLE): logging.warning(e) try: django.conf.settings.configure(DEBUG=False, TEMPLATE_DEBUG=False, TEMPLATE_LOADERS=('django.template.loaders.filesystem.load_template_source',)) except (EnvironmentError, RuntimeError): pass
[ "def", "_django_setup", "(", ")", ":", "if", "(", "os", ".", "environ", ".", "get", "(", "'APPENGINE_RUNTIME'", ")", "!=", "'python27'", ")", ":", "__django_version_setup", "(", ")", "import", "django", "import", "django", ".", "conf", "try", ":", "raise", "ImportError", "except", "(", "ImportError", ",", "EnvironmentError", ")", "as", "e", ":", "if", "os", ".", "getenv", "(", "django", ".", "conf", ".", "ENVIRONMENT_VARIABLE", ")", ":", "logging", ".", "warning", "(", "e", ")", "try", ":", "django", ".", "conf", ".", "settings", ".", "configure", "(", "DEBUG", "=", "False", ",", "TEMPLATE_DEBUG", "=", "False", ",", "TEMPLATE_LOADERS", "=", "(", "'django.template.loaders.filesystem.load_template_source'", ",", ")", ")", "except", "(", "EnvironmentError", ",", "RuntimeError", ")", ":", "pass" ]
imports and configures django .
train
false
47,241
def find_strings(filename): d = {} prev_ttype = token.INDENT f = open(filename) for (ttype, tstr, start, end, line) in tokenize.generate_tokens(f.readline): if (ttype == token.STRING): if (prev_ttype == token.INDENT): (sline, scol) = start (eline, ecol) = end for i in range(sline, (eline + 1)): d[i] = 1 prev_ttype = ttype f.close() return d
[ "def", "find_strings", "(", "filename", ")", ":", "d", "=", "{", "}", "prev_ttype", "=", "token", ".", "INDENT", "f", "=", "open", "(", "filename", ")", "for", "(", "ttype", ",", "tstr", ",", "start", ",", "end", ",", "line", ")", "in", "tokenize", ".", "generate_tokens", "(", "f", ".", "readline", ")", ":", "if", "(", "ttype", "==", "token", ".", "STRING", ")", ":", "if", "(", "prev_ttype", "==", "token", ".", "INDENT", ")", ":", "(", "sline", ",", "scol", ")", "=", "start", "(", "eline", ",", "ecol", ")", "=", "end", "for", "i", "in", "range", "(", "sline", ",", "(", "eline", "+", "1", ")", ")", ":", "d", "[", "i", "]", "=", "1", "prev_ttype", "=", "ttype", "f", ".", "close", "(", ")", "return", "d" ]
return a dict of possible docstring positions .
train
false
47,242
@require_context def instance_type_get(context, id, session=None): result = _instance_type_get_query(context, session=session).filter_by(id=id).first() if (not result): raise exception.InstanceTypeNotFound(instance_type_id=id) return _dict_with_extra_specs(result)
[ "@", "require_context", "def", "instance_type_get", "(", "context", ",", "id", ",", "session", "=", "None", ")", ":", "result", "=", "_instance_type_get_query", "(", "context", ",", "session", "=", "session", ")", ".", "filter_by", "(", "id", "=", "id", ")", ".", "first", "(", ")", "if", "(", "not", "result", ")", ":", "raise", "exception", ".", "InstanceTypeNotFound", "(", "instance_type_id", "=", "id", ")", "return", "_dict_with_extra_specs", "(", "result", ")" ]
get instance type by id .
train
false
47,243
def print_accounts(accounts_response): print('------ Account Collection -------') print_pagination_info(accounts_response) print() for account in accounts_response.get('items', []): print(('Account ID = %s' % account.get('id'))) print(('Kind = %s' % account.get('kind'))) print(('Self Link = %s' % account.get('selfLink'))) print(('Account Name = %s' % account.get('name'))) print(('Created = %s' % account.get('created'))) print(('Updated = %s' % account.get('updated'))) child_link = account.get('childLink') print(('Child link href = %s' % child_link.get('href'))) print(('Child link type = %s' % child_link.get('type'))) print() if (not accounts_response.get('items')): print('No accounts found.\n')
[ "def", "print_accounts", "(", "accounts_response", ")", ":", "print", "(", "'------ Account Collection -------'", ")", "print_pagination_info", "(", "accounts_response", ")", "print", "(", ")", "for", "account", "in", "accounts_response", ".", "get", "(", "'items'", ",", "[", "]", ")", ":", "print", "(", "(", "'Account ID = %s'", "%", "account", ".", "get", "(", "'id'", ")", ")", ")", "print", "(", "(", "'Kind = %s'", "%", "account", ".", "get", "(", "'kind'", ")", ")", ")", "print", "(", "(", "'Self Link = %s'", "%", "account", ".", "get", "(", "'selfLink'", ")", ")", ")", "print", "(", "(", "'Account Name = %s'", "%", "account", ".", "get", "(", "'name'", ")", ")", ")", "print", "(", "(", "'Created = %s'", "%", "account", ".", "get", "(", "'created'", ")", ")", ")", "print", "(", "(", "'Updated = %s'", "%", "account", ".", "get", "(", "'updated'", ")", ")", ")", "child_link", "=", "account", ".", "get", "(", "'childLink'", ")", "print", "(", "(", "'Child link href = %s'", "%", "child_link", ".", "get", "(", "'href'", ")", ")", ")", "print", "(", "(", "'Child link type = %s'", "%", "child_link", ".", "get", "(", "'type'", ")", ")", ")", "print", "(", ")", "if", "(", "not", "accounts_response", ".", "get", "(", "'items'", ")", ")", ":", "print", "(", "'No accounts found.\\n'", ")" ]
prints all the account info in the accounts collection .
train
false
47,245
def statsd_increment(counter, val=1): def wrapper(func): @wraps(func) def wrapped_func(*args, **kwargs): ret = func(*args, **kwargs) statsd.incr(counter, val) return ret return wrapped_func return wrapper
[ "def", "statsd_increment", "(", "counter", ",", "val", "=", "1", ")", ":", "def", "wrapper", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapped_func", "(", "*", "args", ",", "**", "kwargs", ")", ":", "ret", "=", "func", "(", "*", "args", ",", "**", "kwargs", ")", "statsd", ".", "incr", "(", "counter", ",", "val", ")", "return", "ret", "return", "wrapped_func", "return", "wrapper" ]
increments a statsd counter on completion of the decorated function .
train
false
47,246
@treeio_login_required @handle_response_format def lead_add(request, lead_id=None, response_format='html'): all_leads = Object.filter_by_request(request, Lead.objects) if request.POST: if ('cancel' not in request.POST): lead = Lead() form = LeadForm(request.user.profile, request.POST, instance=lead) if form.is_valid(): lead = form.save() lead.set_user_from_request(request) return HttpResponseRedirect(reverse('sales_lead_view', args=[lead.id])) else: return HttpResponseRedirect(reverse('sales_lead_index')) else: form = LeadForm(request.user.profile) return render_to_response('sales/lead_add', {'form': form, 'leads': all_leads}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "lead_add", "(", "request", ",", "lead_id", "=", "None", ",", "response_format", "=", "'html'", ")", ":", "all_leads", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Lead", ".", "objects", ")", "if", "request", ".", "POST", ":", "if", "(", "'cancel'", "not", "in", "request", ".", "POST", ")", ":", "lead", "=", "Lead", "(", ")", "form", "=", "LeadForm", "(", "request", ".", "user", ".", "profile", ",", "request", ".", "POST", ",", "instance", "=", "lead", ")", "if", "form", ".", "is_valid", "(", ")", ":", "lead", "=", "form", ".", "save", "(", ")", "lead", ".", "set_user_from_request", "(", "request", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'sales_lead_view'", ",", "args", "=", "[", "lead", ".", "id", "]", ")", ")", "else", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'sales_lead_index'", ")", ")", "else", ":", "form", "=", "LeadForm", "(", "request", ".", "user", ".", "profile", ")", "return", "render_to_response", "(", "'sales/lead_add'", ",", "{", "'form'", ":", "form", ",", "'leads'", ":", "all_leads", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
lead add .
train
false
47,247
def _pid_file(name): return wf().cachefile(u'{0}.pid'.format(name))
[ "def", "_pid_file", "(", "name", ")", ":", "return", "wf", "(", ")", ".", "cachefile", "(", "u'{0}.pid'", ".", "format", "(", "name", ")", ")" ]
return path to pid file for name .
train
false
47,248
def add64(a, b): return np.add(a, b, dtype=np.uint64)
[ "def", "add64", "(", "a", ",", "b", ")", ":", "return", "np", ".", "add", "(", "a", ",", "b", ",", "dtype", "=", "np", ".", "uint64", ")" ]
return a 64-bit integer sum of a and b .
train
false
47,249
def aggregate_flags(flags): on = any([(flag is ON) for flag in flags]) off = any([(flag is OFF) for flag in flags]) if on: if off: raise ValueError('ON and OFF flags cannot be mixed.') else: return ON else: return (OFF if off else AUTO)
[ "def", "aggregate_flags", "(", "flags", ")", ":", "on", "=", "any", "(", "[", "(", "flag", "is", "ON", ")", "for", "flag", "in", "flags", "]", ")", "off", "=", "any", "(", "[", "(", "flag", "is", "OFF", ")", "for", "flag", "in", "flags", "]", ")", "if", "on", ":", "if", "off", ":", "raise", "ValueError", "(", "'ON and OFF flags cannot be mixed.'", ")", "else", ":", "return", "ON", "else", ":", "return", "(", "OFF", "if", "off", "else", "AUTO", ")" ]
returns an aggregated flag given a sequence of flags .
train
false
47,250
def compile_models(folder): path = pjoin(folder, 'models') for fname in listdir(path, '.+\\.py$'): data = read_file(pjoin(path, fname)) modelfile = ('models.' + fname.replace(os.path.sep, '.')) filename = pjoin(folder, 'compiled', modelfile) mktree(filename) write_file(filename, data) save_pyc(filename) os.unlink(filename)
[ "def", "compile_models", "(", "folder", ")", ":", "path", "=", "pjoin", "(", "folder", ",", "'models'", ")", "for", "fname", "in", "listdir", "(", "path", ",", "'.+\\\\.py$'", ")", ":", "data", "=", "read_file", "(", "pjoin", "(", "path", ",", "fname", ")", ")", "modelfile", "=", "(", "'models.'", "+", "fname", ".", "replace", "(", "os", ".", "path", ".", "sep", ",", "'.'", ")", ")", "filename", "=", "pjoin", "(", "folder", ",", "'compiled'", ",", "modelfile", ")", "mktree", "(", "filename", ")", "write_file", "(", "filename", ",", "data", ")", "save_pyc", "(", "filename", ")", "os", ".", "unlink", "(", "filename", ")" ]
compiles all the models in the application specified by folder .
train
false
47,251
def _replace_wspace(text): for char in (' DCTB ', '\n', '\r'): if (char in text): text = text.replace(char, ' ') return text
[ "def", "_replace_wspace", "(", "text", ")", ":", "for", "char", "in", "(", "' DCTB '", ",", "'\\n'", ",", "'\\r'", ")", ":", "if", "(", "char", "in", "text", ")", ":", "text", "=", "text", ".", "replace", "(", "char", ",", "' '", ")", "return", "text" ]
replace tab .
train
false
47,252
def machine(): mach = platform.machine() if mach.startswith('arm'): return 'arm' else: return None
[ "def", "machine", "(", ")", ":", "mach", "=", "platform", ".", "machine", "(", ")", "if", "mach", ".", "startswith", "(", "'arm'", ")", ":", "return", "'arm'", "else", ":", "return", "None" ]
return machine suffix to use in directory name when looking for bootloader .
train
false
47,253
def server_by_name(name, profile=None): conn = _auth(profile) return conn.server_by_name(name)
[ "def", "server_by_name", "(", "name", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "server_by_name", "(", "name", ")" ]
return information about a server name server name cli example: .
train
true
47,254
def paginate(context, window=DEFAULT_WINDOW, hashtag=''): try: paginator = context['paginator'] page_obj = context['page_obj'] page_suffix = context.get('page_suffix', '') page_range = paginator.page_range records = {'first': (1 + ((page_obj.number - 1) * paginator.per_page))} records['last'] = ((records['first'] + paginator.per_page) - 1) if ((records['last'] + paginator.orphans) >= paginator.count): records['last'] = paginator.count first = set(page_range[:window]) last = set(page_range[(- window):]) current_start = ((page_obj.number - 1) - window) if (current_start < 0): current_start = 0 current_end = ((page_obj.number - 1) + window) if (current_end < 0): current_end = 0 current = set(page_range[current_start:current_end]) pages = [] if (len(first.intersection(current)) == 0): first_list = list(first) first_list.sort() second_list = list(current) second_list.sort() pages.extend(first_list) diff = (second_list[0] - first_list[(-1)]) if (diff == 2): pages.append((second_list[0] - 1)) elif (diff == 1): pass else: pages.append(None) pages.extend(second_list) else: unioned = list(first.union(current)) unioned.sort() pages.extend(unioned) if (len(current.intersection(last)) == 0): second_list = list(last) second_list.sort() diff = (second_list[0] - pages[(-1)]) if (diff == 2): pages.append((second_list[0] - 1)) elif (diff == 1): pass else: pages.append(None) pages.extend(second_list) else: differenced = list(last.difference(current)) differenced.sort() pages.extend(differenced) to_return = {'MEDIA_URL': settings.MEDIA_URL, 'request': context['request'], 'pages': pages, 'records': records, 'page_obj': page_obj, 'paginator': paginator, 'hashtag': hashtag, 'is_paginated': (paginator.count > paginator.per_page), 'page_suffix': page_suffix} if ('request' in context): getvars = context['request'].GET.copy() if (('page%s' % page_suffix) in getvars): del getvars[('page%s' % page_suffix)] if (len(getvars.keys()) > 0): to_return['getvars'] = ('&%s' % getvars.urlencode()) else: to_return['getvars'] = '' return to_return except KeyError as AttributeError: return {}
[ "def", "paginate", "(", "context", ",", "window", "=", "DEFAULT_WINDOW", ",", "hashtag", "=", "''", ")", ":", "try", ":", "paginator", "=", "context", "[", "'paginator'", "]", "page_obj", "=", "context", "[", "'page_obj'", "]", "page_suffix", "=", "context", ".", "get", "(", "'page_suffix'", ",", "''", ")", "page_range", "=", "paginator", ".", "page_range", "records", "=", "{", "'first'", ":", "(", "1", "+", "(", "(", "page_obj", ".", "number", "-", "1", ")", "*", "paginator", ".", "per_page", ")", ")", "}", "records", "[", "'last'", "]", "=", "(", "(", "records", "[", "'first'", "]", "+", "paginator", ".", "per_page", ")", "-", "1", ")", "if", "(", "(", "records", "[", "'last'", "]", "+", "paginator", ".", "orphans", ")", ">=", "paginator", ".", "count", ")", ":", "records", "[", "'last'", "]", "=", "paginator", ".", "count", "first", "=", "set", "(", "page_range", "[", ":", "window", "]", ")", "last", "=", "set", "(", "page_range", "[", "(", "-", "window", ")", ":", "]", ")", "current_start", "=", "(", "(", "page_obj", ".", "number", "-", "1", ")", "-", "window", ")", "if", "(", "current_start", "<", "0", ")", ":", "current_start", "=", "0", "current_end", "=", "(", "(", "page_obj", ".", "number", "-", "1", ")", "+", "window", ")", "if", "(", "current_end", "<", "0", ")", ":", "current_end", "=", "0", "current", "=", "set", "(", "page_range", "[", "current_start", ":", "current_end", "]", ")", "pages", "=", "[", "]", "if", "(", "len", "(", "first", ".", "intersection", "(", "current", ")", ")", "==", "0", ")", ":", "first_list", "=", "list", "(", "first", ")", "first_list", ".", "sort", "(", ")", "second_list", "=", "list", "(", "current", ")", "second_list", ".", "sort", "(", ")", "pages", ".", "extend", "(", "first_list", ")", "diff", "=", "(", "second_list", "[", "0", "]", "-", "first_list", "[", "(", "-", "1", ")", "]", ")", "if", "(", "diff", "==", "2", ")", ":", "pages", ".", "append", "(", "(", "second_list", "[", "0", "]", "-", "1", ")", ")", "elif", "(", "diff", "==", "1", ")", ":", "pass", "else", ":", "pages", ".", "append", "(", "None", ")", "pages", ".", "extend", "(", "second_list", ")", "else", ":", "unioned", "=", "list", "(", "first", ".", "union", "(", "current", ")", ")", "unioned", ".", "sort", "(", ")", "pages", ".", "extend", "(", "unioned", ")", "if", "(", "len", "(", "current", ".", "intersection", "(", "last", ")", ")", "==", "0", ")", ":", "second_list", "=", "list", "(", "last", ")", "second_list", ".", "sort", "(", ")", "diff", "=", "(", "second_list", "[", "0", "]", "-", "pages", "[", "(", "-", "1", ")", "]", ")", "if", "(", "diff", "==", "2", ")", ":", "pages", ".", "append", "(", "(", "second_list", "[", "0", "]", "-", "1", ")", ")", "elif", "(", "diff", "==", "1", ")", ":", "pass", "else", ":", "pages", ".", "append", "(", "None", ")", "pages", ".", "extend", "(", "second_list", ")", "else", ":", "differenced", "=", "list", "(", "last", ".", "difference", "(", "current", ")", ")", "differenced", ".", "sort", "(", ")", "pages", ".", "extend", "(", "differenced", ")", "to_return", "=", "{", "'MEDIA_URL'", ":", "settings", ".", "MEDIA_URL", ",", "'request'", ":", "context", "[", "'request'", "]", ",", "'pages'", ":", "pages", ",", "'records'", ":", "records", ",", "'page_obj'", ":", "page_obj", ",", "'paginator'", ":", "paginator", ",", "'hashtag'", ":", "hashtag", ",", "'is_paginated'", ":", "(", "paginator", ".", "count", ">", "paginator", ".", "per_page", ")", ",", "'page_suffix'", ":", "page_suffix", "}", "if", "(", "'request'", "in", "context", ")", ":", "getvars", "=", "context", "[", "'request'", "]", ".", "GET", ".", "copy", "(", ")", "if", "(", "(", "'page%s'", "%", "page_suffix", ")", "in", "getvars", ")", ":", "del", "getvars", "[", "(", "'page%s'", "%", "page_suffix", ")", "]", "if", "(", "len", "(", "getvars", ".", "keys", "(", ")", ")", ">", "0", ")", ":", "to_return", "[", "'getvars'", "]", "=", "(", "'&%s'", "%", "getvars", ".", "urlencode", "(", ")", ")", "else", ":", "to_return", "[", "'getvars'", "]", "=", "''", "return", "to_return", "except", "KeyError", "as", "AttributeError", ":", "return", "{", "}" ]
return a paginated page for the given objects .
train
true
47,255
def list_stopped(): return sorted((set(list_all()) - set(list_running())))
[ "def", "list_stopped", "(", ")", ":", "return", "sorted", "(", "(", "set", "(", "list_all", "(", ")", ")", "-", "set", "(", "list_running", "(", ")", ")", ")", ")" ]
lists stopped nspawn containers cli example: .
train
false
47,256
def everygrams(sequence, min_len=1, max_len=(-1), **kwargs): if (max_len == (-1)): max_len = len(sequence) for n in range(min_len, (max_len + 1)): for ng in ngrams(sequence, n, **kwargs): (yield ng)
[ "def", "everygrams", "(", "sequence", ",", "min_len", "=", "1", ",", "max_len", "=", "(", "-", "1", ")", ",", "**", "kwargs", ")", ":", "if", "(", "max_len", "==", "(", "-", "1", ")", ")", ":", "max_len", "=", "len", "(", "sequence", ")", "for", "n", "in", "range", "(", "min_len", ",", "(", "max_len", "+", "1", ")", ")", ":", "for", "ng", "in", "ngrams", "(", "sequence", ",", "n", ",", "**", "kwargs", ")", ":", "(", "yield", "ng", ")" ]
returns all possible ngrams generated from a sequence of items .
train
false
47,258
def PLUS_DI(barDs, count, timeperiod=(- (2 ** 31))): return call_talib_with_hlc(barDs, count, talib.PLUS_DI, timeperiod)
[ "def", "PLUS_DI", "(", "barDs", ",", "count", ",", "timeperiod", "=", "(", "-", "(", "2", "**", "31", ")", ")", ")", ":", "return", "call_talib_with_hlc", "(", "barDs", ",", "count", ",", "talib", ".", "PLUS_DI", ",", "timeperiod", ")" ]
plus directional indicator .
train
false
47,260
def describe_snapshots(kwargs=None, call=None): if (call != 'function'): log.error('The describe_snapshot function must be called with -f or --function.') return False params = {'Action': 'DescribeSnapshots'} if ('snapshot_ids' in kwargs): kwargs['snapshot_id'] = kwargs['snapshot_ids'] if ('snapshot_id' in kwargs): snapshot_ids = kwargs['snapshot_id'].split(',') for (snapshot_index, snapshot_id) in enumerate(snapshot_ids): params['SnapshotId.{0}'.format(snapshot_index)] = snapshot_id if ('owner' in kwargs): owners = kwargs['owner'].split(',') for (owner_index, owner) in enumerate(owners): params['Owner.{0}'.format(owner_index)] = owner if ('restorable_by' in kwargs): restorable_bys = kwargs['restorable_by'].split(',') for (restorable_by_index, restorable_by) in enumerate(restorable_bys): params['RestorableBy.{0}'.format(restorable_by_index)] = restorable_by log.debug(params) data = aws.query(params, return_url=True, location=get_location(), provider=get_provider(), opts=__opts__, sigver='4') return data
[ "def", "describe_snapshots", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "log", ".", "error", "(", "'The describe_snapshot function must be called with -f or --function.'", ")", "return", "False", "params", "=", "{", "'Action'", ":", "'DescribeSnapshots'", "}", "if", "(", "'snapshot_ids'", "in", "kwargs", ")", ":", "kwargs", "[", "'snapshot_id'", "]", "=", "kwargs", "[", "'snapshot_ids'", "]", "if", "(", "'snapshot_id'", "in", "kwargs", ")", ":", "snapshot_ids", "=", "kwargs", "[", "'snapshot_id'", "]", ".", "split", "(", "','", ")", "for", "(", "snapshot_index", ",", "snapshot_id", ")", "in", "enumerate", "(", "snapshot_ids", ")", ":", "params", "[", "'SnapshotId.{0}'", ".", "format", "(", "snapshot_index", ")", "]", "=", "snapshot_id", "if", "(", "'owner'", "in", "kwargs", ")", ":", "owners", "=", "kwargs", "[", "'owner'", "]", ".", "split", "(", "','", ")", "for", "(", "owner_index", ",", "owner", ")", "in", "enumerate", "(", "owners", ")", ":", "params", "[", "'Owner.{0}'", ".", "format", "(", "owner_index", ")", "]", "=", "owner", "if", "(", "'restorable_by'", "in", "kwargs", ")", ":", "restorable_bys", "=", "kwargs", "[", "'restorable_by'", "]", ".", "split", "(", "','", ")", "for", "(", "restorable_by_index", ",", "restorable_by", ")", "in", "enumerate", "(", "restorable_bys", ")", ":", "params", "[", "'RestorableBy.{0}'", ".", "format", "(", "restorable_by_index", ")", "]", "=", "restorable_by", "log", ".", "debug", "(", "params", ")", "data", "=", "aws", ".", "query", "(", "params", ",", "return_url", "=", "True", ",", "location", "=", "get_location", "(", ")", ",", "provider", "=", "get_provider", "(", ")", ",", "opts", "=", "__opts__", ",", "sigver", "=", "'4'", ")", "return", "data" ]
describe a snapshot snapshot_id one or more snapshot ids .
train
true
47,261
def check_dependencies(): if (not HAS_VIRTUALENV): raise Exception(('Virtualenv not found. ' + 'Try installing python-virtualenv')) print 'done.'
[ "def", "check_dependencies", "(", ")", ":", "if", "(", "not", "HAS_VIRTUALENV", ")", ":", "raise", "Exception", "(", "(", "'Virtualenv not found. '", "+", "'Try installing python-virtualenv'", ")", ")", "print", "'done.'" ]
make sure virtualenv is in the path .
train
false
47,262
def create_legacy_graph_tasks(symbol_table_cls): symbol_table_constraint = symbol_table_cls.constraint() return [(HydratedTargets, [SelectDependencies(HydratedTarget, Addresses, field_types=(Address,), transitive=True)], HydratedTargets), (HydratedTarget, [Select(symbol_table_constraint), SelectDependencies(HydratedField, symbol_table_constraint, u'field_adaptors', field_types=(SourcesField, BundlesField))], hydrate_target), (HydratedField, [Select(SourcesField), SelectProjection(FilesDigest, PathGlobs, (u'path_globs',), SourcesField), SelectProjection(Files, PathGlobs, (u'excluded_path_globs',), SourcesField)], hydrate_sources), (HydratedField, [Select(BundlesField), SelectDependencies(FilesDigest, BundlesField, u'path_globs_list', field_types=(PathGlobs,)), SelectDependencies(Files, BundlesField, u'excluded_path_globs_list', field_types=(PathGlobs,))], hydrate_bundles)]
[ "def", "create_legacy_graph_tasks", "(", "symbol_table_cls", ")", ":", "symbol_table_constraint", "=", "symbol_table_cls", ".", "constraint", "(", ")", "return", "[", "(", "HydratedTargets", ",", "[", "SelectDependencies", "(", "HydratedTarget", ",", "Addresses", ",", "field_types", "=", "(", "Address", ",", ")", ",", "transitive", "=", "True", ")", "]", ",", "HydratedTargets", ")", ",", "(", "HydratedTarget", ",", "[", "Select", "(", "symbol_table_constraint", ")", ",", "SelectDependencies", "(", "HydratedField", ",", "symbol_table_constraint", ",", "u'field_adaptors'", ",", "field_types", "=", "(", "SourcesField", ",", "BundlesField", ")", ")", "]", ",", "hydrate_target", ")", ",", "(", "HydratedField", ",", "[", "Select", "(", "SourcesField", ")", ",", "SelectProjection", "(", "FilesDigest", ",", "PathGlobs", ",", "(", "u'path_globs'", ",", ")", ",", "SourcesField", ")", ",", "SelectProjection", "(", "Files", ",", "PathGlobs", ",", "(", "u'excluded_path_globs'", ",", ")", ",", "SourcesField", ")", "]", ",", "hydrate_sources", ")", ",", "(", "HydratedField", ",", "[", "Select", "(", "BundlesField", ")", ",", "SelectDependencies", "(", "FilesDigest", ",", "BundlesField", ",", "u'path_globs_list'", ",", "field_types", "=", "(", "PathGlobs", ",", ")", ")", ",", "SelectDependencies", "(", "Files", ",", "BundlesField", ",", "u'excluded_path_globs_list'", ",", "field_types", "=", "(", "PathGlobs", ",", ")", ")", "]", ",", "hydrate_bundles", ")", "]" ]
create tasks to recursively parse the legacy graph .
train
false
47,263
def idd_snorm(m, n, matvect, matvec, its=20): (snorm, v) = _id.idd_snorm(m, n, matvect, matvec, its) return snorm
[ "def", "idd_snorm", "(", "m", ",", "n", ",", "matvect", ",", "matvec", ",", "its", "=", "20", ")", ":", "(", "snorm", ",", "v", ")", "=", "_id", ".", "idd_snorm", "(", "m", ",", "n", ",", "matvect", ",", "matvec", ",", "its", ")", "return", "snorm" ]
estimate spectral norm of a real matrix by the randomized power method .
train
false
47,264
def get_entry_info(dist, group, name): return get_distribution(dist).get_entry_info(group, name)
[ "def", "get_entry_info", "(", "dist", ",", "group", ",", "name", ")", ":", "return", "get_distribution", "(", "dist", ")", ".", "get_entry_info", "(", "group", ",", "name", ")" ]
return the entrypoint object for group+name .
train
false
47,266
def dmp_diff_eval_in(f, m, a, j, u, K): if (j > u): raise IndexError(('-%s <= j < %s expected, got %s' % (u, u, j))) if (not j): return dmp_eval(dmp_diff(f, m, u, K), a, u, K) return _rec_diff_eval(f, m, a, u, 0, j, K)
[ "def", "dmp_diff_eval_in", "(", "f", ",", "m", ",", "a", ",", "j", ",", "u", ",", "K", ")", ":", "if", "(", "j", ">", "u", ")", ":", "raise", "IndexError", "(", "(", "'-%s <= j < %s expected, got %s'", "%", "(", "u", ",", "u", ",", "j", ")", ")", ")", "if", "(", "not", "j", ")", ":", "return", "dmp_eval", "(", "dmp_diff", "(", "f", ",", "m", ",", "u", ",", "K", ")", ",", "a", ",", "u", ",", "K", ")", "return", "_rec_diff_eval", "(", "f", ",", "m", ",", "a", ",", "u", ",", "0", ",", "j", ",", "K", ")" ]
differentiate and evaluate a polynomial in x_j at a in k[x] .
train
false
47,267
def get_decades(year): if year: try: decade = (year[2:3] + '0') decade2 = (year[:3] + '0') except: decade = '' decade2 = '' else: decade = '' decade2 = '' return (decade, decade2)
[ "def", "get_decades", "(", "year", ")", ":", "if", "year", ":", "try", ":", "decade", "=", "(", "year", "[", "2", ":", "3", "]", "+", "'0'", ")", "decade2", "=", "(", "year", "[", ":", "3", "]", "+", "'0'", ")", "except", ":", "decade", "=", "''", "decade2", "=", "''", "else", ":", "decade", "=", "''", "decade2", "=", "''", "return", "(", "decade", ",", "decade2", ")" ]
return 4 digit and 2 digit decades given year .
train
false
47,268
def remove_move(name): try: delattr(_MovedItems, name) except AttributeError: try: del moves.__dict__[name] except KeyError: raise AttributeError(('no such move, %r' % (name,)))
[ "def", "remove_move", "(", "name", ")", ":", "try", ":", "delattr", "(", "_MovedItems", ",", "name", ")", "except", "AttributeError", ":", "try", ":", "del", "moves", ".", "__dict__", "[", "name", "]", "except", "KeyError", ":", "raise", "AttributeError", "(", "(", "'no such move, %r'", "%", "(", "name", ",", ")", ")", ")" ]
remove item from webassets .
train
true
47,269
def isqrt(n): if (n < 17984395633462800708566937239552L): return int(_sqrt(n)) return integer_nthroot(int(n), 2)[0]
[ "def", "isqrt", "(", "n", ")", ":", "if", "(", "n", "<", "17984395633462800708566937239552", "L", ")", ":", "return", "int", "(", "_sqrt", "(", "n", ")", ")", "return", "integer_nthroot", "(", "int", "(", "n", ")", ",", "2", ")", "[", "0", "]" ]
return the largest integer less than or equal to sqrt(n) .
train
false
47,271
def _get_ironic_client(): kwargs = {'os_username': CONF.ironic.admin_username, 'os_password': CONF.ironic.admin_password, 'os_auth_url': CONF.ironic.admin_url, 'os_tenant_name': CONF.ironic.admin_tenant_name, 'os_service_type': 'baremetal', 'os_endpoint_type': 'public', 'insecure': 'true', 'ironic_url': CONF.ironic.api_endpoint} icli = ironic_client.get_client(1, **kwargs) return icli
[ "def", "_get_ironic_client", "(", ")", ":", "kwargs", "=", "{", "'os_username'", ":", "CONF", ".", "ironic", ".", "admin_username", ",", "'os_password'", ":", "CONF", ".", "ironic", ".", "admin_password", ",", "'os_auth_url'", ":", "CONF", ".", "ironic", ".", "admin_url", ",", "'os_tenant_name'", ":", "CONF", ".", "ironic", ".", "admin_tenant_name", ",", "'os_service_type'", ":", "'baremetal'", ",", "'os_endpoint_type'", ":", "'public'", ",", "'insecure'", ":", "'true'", ",", "'ironic_url'", ":", "CONF", ".", "ironic", ".", "api_endpoint", "}", "icli", "=", "ironic_client", ".", "get_client", "(", "1", ",", "**", "kwargs", ")", "return", "icli" ]
return an ironic client .
train
false
47,272
def set_enabled_auth_backend(backend_id): siteconfig = SiteConfiguration.objects.get_current() siteconfig.set(u'auth_backend', backend_id)
[ "def", "set_enabled_auth_backend", "(", "backend_id", ")", ":", "siteconfig", "=", "SiteConfiguration", ".", "objects", ".", "get_current", "(", ")", "siteconfig", ".", "set", "(", "u'auth_backend'", ",", "backend_id", ")" ]
set the authentication backend to be used .
train
false
47,273
def deleteAllGroups(server): try: (client, key) = _get_session(server) except Exception as exc: err_msg = 'Exception raised when connecting to spacewalk server ({0}): {1}'.format(server, exc) log.error(err_msg) return {'Error': err_msg} groups = client.systemgroup.listAllGroups(key) deleted_groups = [] failed_groups = [] for group in groups: if (client.systemgroup.delete(key, group['name']) == 1): deleted_groups.append(group['name']) else: failed_groups.append(group['name']) ret = {'deleted': deleted_groups} if failed_groups: ret['failed'] = failed_groups return ret
[ "def", "deleteAllGroups", "(", "server", ")", ":", "try", ":", "(", "client", ",", "key", ")", "=", "_get_session", "(", "server", ")", "except", "Exception", "as", "exc", ":", "err_msg", "=", "'Exception raised when connecting to spacewalk server ({0}): {1}'", ".", "format", "(", "server", ",", "exc", ")", "log", ".", "error", "(", "err_msg", ")", "return", "{", "'Error'", ":", "err_msg", "}", "groups", "=", "client", ".", "systemgroup", ".", "listAllGroups", "(", "key", ")", "deleted_groups", "=", "[", "]", "failed_groups", "=", "[", "]", "for", "group", "in", "groups", ":", "if", "(", "client", ".", "systemgroup", ".", "delete", "(", "key", ",", "group", "[", "'name'", "]", ")", "==", "1", ")", ":", "deleted_groups", ".", "append", "(", "group", "[", "'name'", "]", ")", "else", ":", "failed_groups", ".", "append", "(", "group", "[", "'name'", "]", ")", "ret", "=", "{", "'deleted'", ":", "deleted_groups", "}", "if", "failed_groups", ":", "ret", "[", "'failed'", "]", "=", "failed_groups", "return", "ret" ]
delete all server groups from spacewalk .
train
true
47,274
def string_suggestions(string, vocabulary, cutoff=0.6, maxnum=3): return [tup[1] for tup in sorted([(string_similarity(string, sugg), sugg) for sugg in vocabulary], key=(lambda tup: tup[0]), reverse=True) if (tup[0] >= cutoff)][:maxnum]
[ "def", "string_suggestions", "(", "string", ",", "vocabulary", ",", "cutoff", "=", "0.6", ",", "maxnum", "=", "3", ")", ":", "return", "[", "tup", "[", "1", "]", "for", "tup", "in", "sorted", "(", "[", "(", "string_similarity", "(", "string", ",", "sugg", ")", ",", "sugg", ")", "for", "sugg", "in", "vocabulary", "]", ",", "key", "=", "(", "lambda", "tup", ":", "tup", "[", "0", "]", ")", ",", "reverse", "=", "True", ")", "if", "(", "tup", "[", "0", "]", ">=", "cutoff", ")", "]", "[", ":", "maxnum", "]" ]
given a string and a vocabulary .
train
false
47,276
def _echo(stdout, stderr): if stdout: sys.stdout.write(stdout) if stderr: sys.stderr.write(stderr)
[ "def", "_echo", "(", "stdout", ",", "stderr", ")", ":", "if", "stdout", ":", "sys", ".", "stdout", ".", "write", "(", "stdout", ")", "if", "stderr", ":", "sys", ".", "stderr", ".", "write", "(", "stderr", ")" ]
echo messages to stdout and stderr .
train
false
47,277
def test_pyc(): f = tempfile.NamedTemporaryFile(suffix='.hy', delete=False) f.write('(defn pyctest [s] s)') f.close() write_hy_as_pyc(f.name) os.unlink(f.name) cfile = ('%s.pyc' % f.name[:(- len('.hy'))]) mod = imp.load_compiled('pyc', cfile) os.unlink(cfile) assert (mod.pyctest('Foo') == 'Foo')
[ "def", "test_pyc", "(", ")", ":", "f", "=", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.hy'", ",", "delete", "=", "False", ")", "f", ".", "write", "(", "'(defn pyctest [s] s)'", ")", "f", ".", "close", "(", ")", "write_hy_as_pyc", "(", "f", ".", "name", ")", "os", ".", "unlink", "(", "f", ".", "name", ")", "cfile", "=", "(", "'%s.pyc'", "%", "f", ".", "name", "[", ":", "(", "-", "len", "(", "'.hy'", ")", ")", "]", ")", "mod", "=", "imp", ".", "load_compiled", "(", "'pyc'", ",", "cfile", ")", "os", ".", "unlink", "(", "cfile", ")", "assert", "(", "mod", ".", "pyctest", "(", "'Foo'", ")", "==", "'Foo'", ")" ]
test pyc compilation .
train
false
47,278
def _flip_g(g): def tr(l): return [(1 - a) for a in l] return meijerg(tr(g.bm), tr(g.bother), tr(g.an), tr(g.aother), (1 / g.argument))
[ "def", "_flip_g", "(", "g", ")", ":", "def", "tr", "(", "l", ")", ":", "return", "[", "(", "1", "-", "a", ")", "for", "a", "in", "l", "]", "return", "meijerg", "(", "tr", "(", "g", ".", "bm", ")", ",", "tr", "(", "g", ".", "bother", ")", ",", "tr", "(", "g", ".", "an", ")", ",", "tr", "(", "g", ".", "aother", ")", ",", "(", "1", "/", "g", ".", "argument", ")", ")" ]
turn the g function into one of inverse argument (i .
train
false
47,284
def createShader(vert_fname, frag_fname): with open(op.join(this_dir, vert_fname), 'rb') as fid: vert = fid.read().decode('ASCII') with open(op.join(this_dir, frag_fname), 'rb') as fid: frag = fid.read().decode('ASCII') vertexShader = gl.glCreateShader(gl.GL_VERTEX_SHADER) gl.glShaderSource(vertexShader, vert) gl.glCompileShader(vertexShader) fragmentShader = gl.glCreateShader(gl.GL_FRAGMENT_SHADER) gl.glShaderSource(fragmentShader, frag) gl.glCompileShader(fragmentShader) programObj = gl.glCreateProgram() gl.glAttachShader(programObj, vertexShader) gl.glAttachShader(programObj, fragmentShader) gl.glLinkProgram(programObj) checkGLError() return programObj
[ "def", "createShader", "(", "vert_fname", ",", "frag_fname", ")", ":", "with", "open", "(", "op", ".", "join", "(", "this_dir", ",", "vert_fname", ")", ",", "'rb'", ")", "as", "fid", ":", "vert", "=", "fid", ".", "read", "(", ")", ".", "decode", "(", "'ASCII'", ")", "with", "open", "(", "op", ".", "join", "(", "this_dir", ",", "frag_fname", ")", ",", "'rb'", ")", "as", "fid", ":", "frag", "=", "fid", ".", "read", "(", ")", ".", "decode", "(", "'ASCII'", ")", "vertexShader", "=", "gl", ".", "glCreateShader", "(", "gl", ".", "GL_VERTEX_SHADER", ")", "gl", ".", "glShaderSource", "(", "vertexShader", ",", "vert", ")", "gl", ".", "glCompileShader", "(", "vertexShader", ")", "fragmentShader", "=", "gl", ".", "glCreateShader", "(", "gl", ".", "GL_FRAGMENT_SHADER", ")", "gl", ".", "glShaderSource", "(", "fragmentShader", ",", "frag", ")", "gl", ".", "glCompileShader", "(", "fragmentShader", ")", "programObj", "=", "gl", ".", "glCreateProgram", "(", ")", "gl", ".", "glAttachShader", "(", "programObj", ",", "vertexShader", ")", "gl", ".", "glAttachShader", "(", "programObj", ",", "fragmentShader", ")", "gl", ".", "glLinkProgram", "(", "programObj", ")", "checkGLError", "(", ")", "return", "programObj" ]
createshader - create .
train
false
47,285
def DatetimeToWmiTime(dt): td = dt.utcoffset() if td: offset = ((td.seconds + (((td.days * 60) * 60) * 24)) / 60) if (offset >= 0): str_offset = ('+%03d' % offset) else: str_offset = ('%03d' % offset) else: str_offset = '+000' return (u'%04d%02d%02d%02d%02d%02d.%06d%s' % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.microsecond, str_offset))
[ "def", "DatetimeToWmiTime", "(", "dt", ")", ":", "td", "=", "dt", ".", "utcoffset", "(", ")", "if", "td", ":", "offset", "=", "(", "(", "td", ".", "seconds", "+", "(", "(", "(", "td", ".", "days", "*", "60", ")", "*", "60", ")", "*", "24", ")", ")", "/", "60", ")", "if", "(", "offset", ">=", "0", ")", ":", "str_offset", "=", "(", "'+%03d'", "%", "offset", ")", "else", ":", "str_offset", "=", "(", "'%03d'", "%", "offset", ")", "else", ":", "str_offset", "=", "'+000'", "return", "(", "u'%04d%02d%02d%02d%02d%02d.%06d%s'", "%", "(", "dt", ".", "year", ",", "dt", ".", "month", ",", "dt", ".", "day", ",", "dt", ".", "hour", ",", "dt", ".", "minute", ",", "dt", ".", "second", ",", "dt", ".", "microsecond", ",", "str_offset", ")", ")" ]
take a datetime tuple and return it as yyyymmddhhmmss .
train
false
47,286
def handle_update(request, basket, **kwargs): methods = cached_load(u'SHUUP_BASKET_UPDATE_METHODS_SPEC')(request=request, basket=basket) prefix_method_dict = methods.get_prefix_to_method_map() basket_changed = False for (key, value) in six.iteritems(kwargs): for (prefix, method) in six.iteritems(prefix_method_dict): if key.startswith(prefix): line_id = key[len(prefix):] line = basket.find_line_by_line_id(line_id) field_changed = method(key=key, value=value, line=line) basket_changed = (basket_changed or field_changed) break if basket_changed: basket.clean_empty_lines() basket.dirty = True
[ "def", "handle_update", "(", "request", ",", "basket", ",", "**", "kwargs", ")", ":", "methods", "=", "cached_load", "(", "u'SHUUP_BASKET_UPDATE_METHODS_SPEC'", ")", "(", "request", "=", "request", ",", "basket", "=", "basket", ")", "prefix_method_dict", "=", "methods", ".", "get_prefix_to_method_map", "(", ")", "basket_changed", "=", "False", "for", "(", "key", ",", "value", ")", "in", "six", ".", "iteritems", "(", "kwargs", ")", ":", "for", "(", "prefix", ",", "method", ")", "in", "six", ".", "iteritems", "(", "prefix_method_dict", ")", ":", "if", "key", ".", "startswith", "(", "prefix", ")", ":", "line_id", "=", "key", "[", "len", "(", "prefix", ")", ":", "]", "line", "=", "basket", ".", "find_line_by_line_id", "(", "line_id", ")", "field_changed", "=", "method", "(", "key", "=", "key", ",", "value", "=", "value", ",", "line", "=", "line", ")", "basket_changed", "=", "(", "basket_changed", "or", "field_changed", ")", "break", "if", "basket_changed", ":", "basket", ".", "clean_empty_lines", "(", ")", "basket", ".", "dirty", "=", "True" ]
handle updating a basket .
train
false
47,288
def get_routing_table(ip_version, namespace=None): return list(privileged.get_routing_table(ip_version, namespace))
[ "def", "get_routing_table", "(", "ip_version", ",", "namespace", "=", "None", ")", ":", "return", "list", "(", "privileged", ".", "get_routing_table", "(", "ip_version", ",", "namespace", ")", ")" ]
return a list of dictionaries .
train
false
47,289
def CheckPosixThreading(filename, clean_lines, linenum, error): line = clean_lines.elided[linenum] for (single_thread_function, multithread_safe_function) in threading_list: ix = line.find(single_thread_function) if ((ix >= 0) and ((ix == 0) or ((not line[(ix - 1)].isalnum()) and (line[(ix - 1)] not in ('_', '.', '>'))))): error(filename, linenum, 'runtime/threadsafe_fn', 2, (((('Consider using ' + multithread_safe_function) + '...) instead of ') + single_thread_function) + '...) for improved thread safety.'))
[ "def", "CheckPosixThreading", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "for", "(", "single_thread_function", ",", "multithread_safe_function", ")", "in", "threading_list", ":", "ix", "=", "line", ".", "find", "(", "single_thread_function", ")", "if", "(", "(", "ix", ">=", "0", ")", "and", "(", "(", "ix", "==", "0", ")", "or", "(", "(", "not", "line", "[", "(", "ix", "-", "1", ")", "]", ".", "isalnum", "(", ")", ")", "and", "(", "line", "[", "(", "ix", "-", "1", ")", "]", "not", "in", "(", "'_'", ",", "'.'", ",", "'>'", ")", ")", ")", ")", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'runtime/threadsafe_fn'", ",", "2", ",", "(", "(", "(", "(", "'Consider using '", "+", "multithread_safe_function", ")", "+", "'...) instead of '", ")", "+", "single_thread_function", ")", "+", "'...) for improved thread safety.'", ")", ")" ]
checks for calls to thread-unsafe functions .
train
false
47,293
def add_field(field, func, dict_sequence): for item in dict_sequence: if (field not in item): item[field] = func(item) (yield item)
[ "def", "add_field", "(", "field", ",", "func", ",", "dict_sequence", ")", ":", "for", "item", "in", "dict_sequence", ":", "if", "(", "field", "not", "in", "item", ")", ":", "item", "[", "field", "]", "=", "func", "(", "item", ")", "(", "yield", "item", ")" ]
apply given function to the record and store result in given field of current record .
train
true
47,294
def drop_database(name): _run_as_pg(('dropdb %(name)s' % locals()))
[ "def", "drop_database", "(", "name", ")", ":", "_run_as_pg", "(", "(", "'dropdb %(name)s'", "%", "locals", "(", ")", ")", ")" ]
drops the database that the specified manager controls .
train
false
47,295
def _make_operations_stub(client): if (client.emulator_host is None): return make_secure_stub(client.credentials, client.user_agent, operations_grpc.OperationsStub, OPERATIONS_API_HOST) else: return make_insecure_stub(operations_grpc.OperationsStub, client.emulator_host)
[ "def", "_make_operations_stub", "(", "client", ")", ":", "if", "(", "client", ".", "emulator_host", "is", "None", ")", ":", "return", "make_secure_stub", "(", "client", ".", "credentials", ",", "client", ".", "user_agent", ",", "operations_grpc", ".", "OperationsStub", ",", "OPERATIONS_API_HOST", ")", "else", ":", "return", "make_insecure_stub", "(", "operations_grpc", ".", "OperationsStub", ",", "client", ".", "emulator_host", ")" ]
creates grpc stub to make requests to the operations api .
train
false
47,296
def EncodeAppIdNamespace(app_id, namespace): if (not namespace): return app_id else: return ((app_id + _NAMESPACE_SEPARATOR) + namespace)
[ "def", "EncodeAppIdNamespace", "(", "app_id", ",", "namespace", ")", ":", "if", "(", "not", "namespace", ")", ":", "return", "app_id", "else", ":", "return", "(", "(", "app_id", "+", "_NAMESPACE_SEPARATOR", ")", "+", "namespace", ")" ]
concatenates app id and namespace into a single string .
train
false
47,297
def _onPygletKey(symbol, modifiers, emulated=False): global useText keyTime = psychopy.core.getTime() if emulated: thisKey = unicode(symbol) keySource = 'EmulatedKey' else: thisKey = pyglet.window.key.symbol_string(symbol).lower() if ('user_key' in thisKey): useText = True return useText = False thisKey = thisKey.lstrip('_').lstrip('NUM_') keySource = 'Keypress' _keyBuffer.append((thisKey, modifiers, keyTime)) logging.data(('%s: %s' % (keySource, thisKey)))
[ "def", "_onPygletKey", "(", "symbol", ",", "modifiers", ",", "emulated", "=", "False", ")", ":", "global", "useText", "keyTime", "=", "psychopy", ".", "core", ".", "getTime", "(", ")", "if", "emulated", ":", "thisKey", "=", "unicode", "(", "symbol", ")", "keySource", "=", "'EmulatedKey'", "else", ":", "thisKey", "=", "pyglet", ".", "window", ".", "key", ".", "symbol_string", "(", "symbol", ")", ".", "lower", "(", ")", "if", "(", "'user_key'", "in", "thisKey", ")", ":", "useText", "=", "True", "return", "useText", "=", "False", "thisKey", "=", "thisKey", ".", "lstrip", "(", "'_'", ")", ".", "lstrip", "(", "'NUM_'", ")", "keySource", "=", "'Keypress'", "_keyBuffer", ".", "append", "(", "(", "thisKey", ",", "modifiers", ",", "keyTime", ")", ")", "logging", ".", "data", "(", "(", "'%s: %s'", "%", "(", "keySource", ",", "thisKey", ")", ")", ")" ]
handler for on_key_press pyglet events; call directly to emulate a key press appends a tuple with into the global _keybuffer .
train
false
47,298
def format_freshdesk_property_change_message(ticket, event_info): content = ('%s <%s> updated [ticket #%s](%s):\n\n' % (ticket.requester_name, ticket.requester_email, ticket.id, ticket.url)) content += ('%s: **%s** => **%s**' % (event_info[0].capitalize(), event_info[1], event_info[2])) return content
[ "def", "format_freshdesk_property_change_message", "(", "ticket", ",", "event_info", ")", ":", "content", "=", "(", "'%s <%s> updated [ticket #%s](%s):\\n\\n'", "%", "(", "ticket", ".", "requester_name", ",", "ticket", ".", "requester_email", ",", "ticket", ".", "id", ",", "ticket", ".", "url", ")", ")", "content", "+=", "(", "'%s: **%s** => **%s**'", "%", "(", "event_info", "[", "0", "]", ".", "capitalize", "(", ")", ",", "event_info", "[", "1", "]", ",", "event_info", "[", "2", "]", ")", ")", "return", "content" ]
freshdesk will only tell us the first event to match our webhook configuration .
train
false
47,299
def okAssemblies(num): for i in xrange(num): createAssembly(i, i, i)
[ "def", "okAssemblies", "(", "num", ")", ":", "for", "i", "in", "xrange", "(", "num", ")", ":", "createAssembly", "(", "i", ",", "i", ",", "i", ")" ]
creates a number .
train
false
47,301
def reindent(source, indent_size): reindenter = Reindenter(source) return reindenter.run(indent_size)
[ "def", "reindent", "(", "source", ",", "indent_size", ")", ":", "reindenter", "=", "Reindenter", "(", "source", ")", "return", "reindenter", ".", "run", "(", "indent_size", ")" ]
reindent all lines .
train
false
47,302
def _restore_app_status(app, save=True): log.info(('Changing app from incomplete to previous status: %d' % app.pk)) app.status = (app.highest_status if (app.highest_status != mkt.STATUS_NULL) else mkt.STATUS_PENDING) if save: app.save()
[ "def", "_restore_app_status", "(", "app", ",", "save", "=", "True", ")", ":", "log", ".", "info", "(", "(", "'Changing app from incomplete to previous status: %d'", "%", "app", ".", "pk", ")", ")", "app", ".", "status", "=", "(", "app", ".", "highest_status", "if", "(", "app", ".", "highest_status", "!=", "mkt", ".", "STATUS_NULL", ")", "else", "mkt", ".", "STATUS_PENDING", ")", "if", "save", ":", "app", ".", "save", "(", ")" ]
restore an incomplete app to its former status .
train
false
47,303
def test_readonly_data(): with pytest.raises(ValueError) as excinfo: configdata.DATA['general'].setv('temp', 'ignore-case', 'true', 'true') assert (str(excinfo.value) == 'Trying to modify a read-only config!')
[ "def", "test_readonly_data", "(", ")", ":", "with", "pytest", ".", "raises", "(", "ValueError", ")", "as", "excinfo", ":", "configdata", ".", "DATA", "[", "'general'", "]", ".", "setv", "(", "'temp'", ",", "'ignore-case'", ",", "'true'", ",", "'true'", ")", "assert", "(", "str", "(", "excinfo", ".", "value", ")", "==", "'Trying to modify a read-only config!'", ")" ]
make sure data is readonly .
train
false
47,304
def local_uri_to_path(uri, media_dir): if ((not uri.startswith(u'local:directory:')) and (not uri.startswith(u'local:track:'))): raise ValueError(u'Invalid URI.') file_path = path.uri_to_path(uri).split(':', 1)[1] return os.path.join(media_dir, file_path)
[ "def", "local_uri_to_path", "(", "uri", ",", "media_dir", ")", ":", "if", "(", "(", "not", "uri", ".", "startswith", "(", "u'local:directory:'", ")", ")", "and", "(", "not", "uri", ".", "startswith", "(", "u'local:track:'", ")", ")", ")", ":", "raise", "ValueError", "(", "u'Invalid URI.'", ")", "file_path", "=", "path", ".", "uri_to_path", "(", "uri", ")", ".", "split", "(", "':'", ",", "1", ")", "[", "1", "]", "return", "os", ".", "path", ".", "join", "(", "media_dir", ",", "file_path", ")" ]
convert local track or directory uri to absolute path .
train
false
47,307
def get_icons(zfp, name_or_list_of_names): from PyQt5.Qt import QIcon, QPixmap names = name_or_list_of_names ans = get_resources(zfp, names) if isinstance(names, basestring): names = [names] if (ans is None): ans = {} if isinstance(ans, basestring): ans = dict([(names[0], ans)]) ians = {} for name in names: p = QPixmap() raw = ans.get(name, None) if raw: p.loadFromData(raw) ians[name] = QIcon(p) if (len(names) == 1): ians = ians.pop(names[0]) return ians
[ "def", "get_icons", "(", "zfp", ",", "name_or_list_of_names", ")", ":", "from", "PyQt5", ".", "Qt", "import", "QIcon", ",", "QPixmap", "names", "=", "name_or_list_of_names", "ans", "=", "get_resources", "(", "zfp", ",", "names", ")", "if", "isinstance", "(", "names", ",", "basestring", ")", ":", "names", "=", "[", "names", "]", "if", "(", "ans", "is", "None", ")", ":", "ans", "=", "{", "}", "if", "isinstance", "(", "ans", ",", "basestring", ")", ":", "ans", "=", "dict", "(", "[", "(", "names", "[", "0", "]", ",", "ans", ")", "]", ")", "ians", "=", "{", "}", "for", "name", "in", "names", ":", "p", "=", "QPixmap", "(", ")", "raw", "=", "ans", ".", "get", "(", "name", ",", "None", ")", "if", "raw", ":", "p", ".", "loadFromData", "(", "raw", ")", "ians", "[", "name", "]", "=", "QIcon", "(", "p", ")", "if", "(", "len", "(", "names", ")", "==", "1", ")", ":", "ians", "=", "ians", ".", "pop", "(", "names", "[", "0", "]", ")", "return", "ians" ]
load icons from the plugin zip file .
train
false
47,308
@login_required def associate_failure(request, message, template_failure='authopenid/associate.html', openid_form=AssociateOpenID, redirect_name=None, extra_context=None, **kwargs): return render(template_failure, {'form': openid_form(request.user), 'msg': message}, context_instance=_build_context(request, extra_context=extra_context))
[ "@", "login_required", "def", "associate_failure", "(", "request", ",", "message", ",", "template_failure", "=", "'authopenid/associate.html'", ",", "openid_form", "=", "AssociateOpenID", ",", "redirect_name", "=", "None", ",", "extra_context", "=", "None", ",", "**", "kwargs", ")", ":", "return", "render", "(", "template_failure", ",", "{", "'form'", ":", "openid_form", "(", "request", ".", "user", ")", ",", "'msg'", ":", "message", "}", ",", "context_instance", "=", "_build_context", "(", "request", ",", "extra_context", "=", "extra_context", ")", ")" ]
function used when new openid association fail .
train
false
47,309
def _noconds_(default): def make_wrapper(func): from sympy.core.decorators import wraps @wraps(func) def wrapper(*args, **kwargs): noconds = kwargs.pop('noconds', default) res = func(*args, **kwargs) if noconds: return res[0] return res return wrapper return make_wrapper
[ "def", "_noconds_", "(", "default", ")", ":", "def", "make_wrapper", "(", "func", ")", ":", "from", "sympy", ".", "core", ".", "decorators", "import", "wraps", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "noconds", "=", "kwargs", ".", "pop", "(", "'noconds'", ",", "default", ")", "res", "=", "func", "(", "*", "args", ",", "**", "kwargs", ")", "if", "noconds", ":", "return", "res", "[", "0", "]", "return", "res", "return", "wrapper", "return", "make_wrapper" ]
this is a decorator generator for dropping convergence conditions .
train
false
47,311
def install_missing(name, version=None, source=None): choc_path = _find_chocolatey(__context__, __salt__) if (_LooseVersion(chocolatey_version()) >= _LooseVersion('0.9.8.24')): log.warning('installmissing is deprecated, using install') return install(name, version=version) cmd = [choc_path, 'installmissing', name] if version: cmd.extend(['--version', version]) if source: cmd.extend(['--source', source]) cmd.extend(_yes(__context__)) result = __salt__['cmd.run_all'](cmd, python_shell=False) if (result['retcode'] != 0): err = 'Running chocolatey failed: {0}'.format(result['stdout']) raise CommandExecutionError(err) return result['stdout']
[ "def", "install_missing", "(", "name", ",", "version", "=", "None", ",", "source", "=", "None", ")", ":", "choc_path", "=", "_find_chocolatey", "(", "__context__", ",", "__salt__", ")", "if", "(", "_LooseVersion", "(", "chocolatey_version", "(", ")", ")", ">=", "_LooseVersion", "(", "'0.9.8.24'", ")", ")", ":", "log", ".", "warning", "(", "'installmissing is deprecated, using install'", ")", "return", "install", "(", "name", ",", "version", "=", "version", ")", "cmd", "=", "[", "choc_path", ",", "'installmissing'", ",", "name", "]", "if", "version", ":", "cmd", ".", "extend", "(", "[", "'--version'", ",", "version", "]", ")", "if", "source", ":", "cmd", ".", "extend", "(", "[", "'--source'", ",", "source", "]", ")", "cmd", ".", "extend", "(", "_yes", "(", "__context__", ")", ")", "result", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "if", "(", "result", "[", "'retcode'", "]", "!=", "0", ")", ":", "err", "=", "'Running chocolatey failed: {0}'", ".", "format", "(", "result", "[", "'stdout'", "]", ")", "raise", "CommandExecutionError", "(", "err", ")", "return", "result", "[", "'stdout'", "]" ]
instructs chocolatey to install a package if it doesnt already exist .
train
true
47,316
def get_xml_as_object(elt, cls): return xml_object.from_element(None, cls, elt)
[ "def", "get_xml_as_object", "(", "elt", ",", "cls", ")", ":", "return", "xml_object", ".", "from_element", "(", "None", ",", "cls", ",", "elt", ")" ]
returns a native :class:spyne .
train
false
47,317
def asciihexdecode(data): decode = (lambda hx: chr(int(hx, 16))) out = map(decode, hex_re.findall(data)) m = trail_re.search(data) if m: out.append(decode(('%c0' % m.group(1)))) return ''.join(out)
[ "def", "asciihexdecode", "(", "data", ")", ":", "decode", "=", "(", "lambda", "hx", ":", "chr", "(", "int", "(", "hx", ",", "16", ")", ")", ")", "out", "=", "map", "(", "decode", ",", "hex_re", ".", "findall", "(", "data", ")", ")", "m", "=", "trail_re", ".", "search", "(", "data", ")", "if", "m", ":", "out", ".", "append", "(", "decode", "(", "(", "'%c0'", "%", "m", ".", "group", "(", "1", ")", ")", ")", ")", "return", "''", ".", "join", "(", "out", ")" ]
asciihexdecode filter: pdfreference v1 .
train
true
47,318
def get_or_create_sjson(item, transcripts): user_filename = transcripts[item.transcript_language] user_subs_id = os.path.splitext(user_filename)[0] (source_subs_id, result_subs_dict) = (user_subs_id, {1.0: user_subs_id}) try: sjson_transcript = Transcript.asset(item.location, source_subs_id, item.transcript_language).data except NotFoundError: generate_sjson_for_all_speeds(item, user_filename, result_subs_dict, item.transcript_language) sjson_transcript = Transcript.asset(item.location, source_subs_id, item.transcript_language).data return sjson_transcript
[ "def", "get_or_create_sjson", "(", "item", ",", "transcripts", ")", ":", "user_filename", "=", "transcripts", "[", "item", ".", "transcript_language", "]", "user_subs_id", "=", "os", ".", "path", ".", "splitext", "(", "user_filename", ")", "[", "0", "]", "(", "source_subs_id", ",", "result_subs_dict", ")", "=", "(", "user_subs_id", ",", "{", "1.0", ":", "user_subs_id", "}", ")", "try", ":", "sjson_transcript", "=", "Transcript", ".", "asset", "(", "item", ".", "location", ",", "source_subs_id", ",", "item", ".", "transcript_language", ")", ".", "data", "except", "NotFoundError", ":", "generate_sjson_for_all_speeds", "(", "item", ",", "user_filename", ",", "result_subs_dict", ",", "item", ".", "transcript_language", ")", "sjson_transcript", "=", "Transcript", ".", "asset", "(", "item", ".", "location", ",", "source_subs_id", ",", "item", ".", "transcript_language", ")", ".", "data", "return", "sjson_transcript" ]
get sjson if already exists .
train
false
47,322
@contextmanager def mock_get_submissions_score(earned=0, possible=1, attempted=True): with patch('lms.djangoapps.grades.scores._get_score_from_submissions') as mock_score: mock_score.return_value = (earned, possible, earned, possible, attempted) (yield mock_score)
[ "@", "contextmanager", "def", "mock_get_submissions_score", "(", "earned", "=", "0", ",", "possible", "=", "1", ",", "attempted", "=", "True", ")", ":", "with", "patch", "(", "'lms.djangoapps.grades.scores._get_score_from_submissions'", ")", "as", "mock_score", ":", "mock_score", ".", "return_value", "=", "(", "earned", ",", "possible", ",", "earned", ",", "possible", ",", "attempted", ")", "(", "yield", "mock_score", ")" ]
mocks the _get_submissions_score function to return the specified values .
train
false
47,323
def get_privacy_spent(log_moments, target_eps=None, target_delta=None): assert ((target_eps is None) ^ (target_delta is None)) assert (not ((target_eps is None) and (target_delta is None))) if (target_eps is not None): return (target_eps, _compute_delta(log_moments, target_eps)) else: return (_compute_eps(log_moments, target_delta), target_delta)
[ "def", "get_privacy_spent", "(", "log_moments", ",", "target_eps", "=", "None", ",", "target_delta", "=", "None", ")", ":", "assert", "(", "(", "target_eps", "is", "None", ")", "^", "(", "target_delta", "is", "None", ")", ")", "assert", "(", "not", "(", "(", "target_eps", "is", "None", ")", "and", "(", "target_delta", "is", "None", ")", ")", ")", "if", "(", "target_eps", "is", "not", "None", ")", ":", "return", "(", "target_eps", ",", "_compute_delta", "(", "log_moments", ",", "target_eps", ")", ")", "else", ":", "return", "(", "_compute_eps", "(", "log_moments", ",", "target_delta", ")", ",", "target_delta", ")" ]
compute delta for given eps from log moments .
train
false
47,324
def _unify_keywords(): old_path = BASE_PATH.format('keywords') if os.path.exists(old_path): if os.path.isdir(old_path): for triplet in os.walk(old_path): for file_name in triplet[2]: file_path = '{0}/{1}'.format(triplet[0], file_name) with salt.utils.fopen(file_path) as fh_: for line in fh_: line = line.strip() if (line and (not line.startswith('#'))): append_to_package_conf('accept_keywords', string=line) shutil.rmtree(old_path) else: with salt.utils.fopen(old_path) as fh_: for line in fh_: line = line.strip() if (line and (not line.startswith('#'))): append_to_package_conf('accept_keywords', string=line) os.remove(old_path)
[ "def", "_unify_keywords", "(", ")", ":", "old_path", "=", "BASE_PATH", ".", "format", "(", "'keywords'", ")", "if", "os", ".", "path", ".", "exists", "(", "old_path", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "old_path", ")", ":", "for", "triplet", "in", "os", ".", "walk", "(", "old_path", ")", ":", "for", "file_name", "in", "triplet", "[", "2", "]", ":", "file_path", "=", "'{0}/{1}'", ".", "format", "(", "triplet", "[", "0", "]", ",", "file_name", ")", "with", "salt", ".", "utils", ".", "fopen", "(", "file_path", ")", "as", "fh_", ":", "for", "line", "in", "fh_", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "(", "line", "and", "(", "not", "line", ".", "startswith", "(", "'#'", ")", ")", ")", ":", "append_to_package_conf", "(", "'accept_keywords'", ",", "string", "=", "line", ")", "shutil", ".", "rmtree", "(", "old_path", ")", "else", ":", "with", "salt", ".", "utils", ".", "fopen", "(", "old_path", ")", "as", "fh_", ":", "for", "line", "in", "fh_", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "(", "line", "and", "(", "not", "line", ".", "startswith", "(", "'#'", ")", ")", ")", ":", "append_to_package_conf", "(", "'accept_keywords'", ",", "string", "=", "line", ")", "os", ".", "remove", "(", "old_path", ")" ]
merge /etc/portage/package .
train
true
47,325
def item_candidates(item, artist, title): out = [] if (artist and title): try: out.extend(mb.match_track(artist, title)) except mb.MusicBrainzAPIError as exc: exc.log(log) out.extend(plugins.item_candidates(item, artist, title)) return out
[ "def", "item_candidates", "(", "item", ",", "artist", ",", "title", ")", ":", "out", "=", "[", "]", "if", "(", "artist", "and", "title", ")", ":", "try", ":", "out", ".", "extend", "(", "mb", ".", "match_track", "(", "artist", ",", "title", ")", ")", "except", "mb", ".", "MusicBrainzAPIError", "as", "exc", ":", "exc", ".", "log", "(", "log", ")", "out", ".", "extend", "(", "plugins", ".", "item_candidates", "(", "item", ",", "artist", ",", "title", ")", ")", "return", "out" ]
search for item matches .
train
false
47,327
def get_literals(literal_type): return tuple(literals[literal_type])
[ "def", "get_literals", "(", "literal_type", ")", ":", "return", "tuple", "(", "literals", "[", "literal_type", "]", ")" ]
where literal_type is one of keywords .
train
false
47,332
def gen_task_name(app, name, module_name): module_name = (module_name or u'__main__') try: module = sys.modules[module_name] except KeyError: module = None if (module is not None): module_name = module.__name__ if (MP_MAIN_FILE and (module.__file__ == MP_MAIN_FILE)): module_name = u'__main__' if ((module_name == u'__main__') and app.main): return u'.'.join([app.main, name]) return u'.'.join((p for p in (module_name, name) if p))
[ "def", "gen_task_name", "(", "app", ",", "name", ",", "module_name", ")", ":", "module_name", "=", "(", "module_name", "or", "u'__main__'", ")", "try", ":", "module", "=", "sys", ".", "modules", "[", "module_name", "]", "except", "KeyError", ":", "module", "=", "None", "if", "(", "module", "is", "not", "None", ")", ":", "module_name", "=", "module", ".", "__name__", "if", "(", "MP_MAIN_FILE", "and", "(", "module", ".", "__file__", "==", "MP_MAIN_FILE", ")", ")", ":", "module_name", "=", "u'__main__'", "if", "(", "(", "module_name", "==", "u'__main__'", ")", "and", "app", ".", "main", ")", ":", "return", "u'.'", ".", "join", "(", "[", "app", ".", "main", ",", "name", "]", ")", "return", "u'.'", ".", "join", "(", "(", "p", "for", "p", "in", "(", "module_name", ",", "name", ")", "if", "p", ")", ")" ]
generate task name from name/module pair .
train
false
47,333
def box(cls, margin, spacing, *items): stretch = STRETCH skipped = SKIPPED layout = cls() layout.setSpacing(spacing) set_margin(layout, margin) for i in items: if isinstance(i, QtWidgets.QWidget): layout.addWidget(i) elif isinstance(i, (QtWidgets.QHBoxLayout, QtWidgets.QVBoxLayout, QtWidgets.QFormLayout, QtWidgets.QLayout)): layout.addLayout(i) elif (i is stretch): layout.addStretch() elif (i is skipped): continue elif isinstance(i, int_types): layout.addSpacing(i) return layout
[ "def", "box", "(", "cls", ",", "margin", ",", "spacing", ",", "*", "items", ")", ":", "stretch", "=", "STRETCH", "skipped", "=", "SKIPPED", "layout", "=", "cls", "(", ")", "layout", ".", "setSpacing", "(", "spacing", ")", "set_margin", "(", "layout", ",", "margin", ")", "for", "i", "in", "items", ":", "if", "isinstance", "(", "i", ",", "QtWidgets", ".", "QWidget", ")", ":", "layout", ".", "addWidget", "(", "i", ")", "elif", "isinstance", "(", "i", ",", "(", "QtWidgets", ".", "QHBoxLayout", ",", "QtWidgets", ".", "QVBoxLayout", ",", "QtWidgets", ".", "QFormLayout", ",", "QtWidgets", ".", "QLayout", ")", ")", ":", "layout", ".", "addLayout", "(", "i", ")", "elif", "(", "i", "is", "stretch", ")", ":", "layout", ".", "addStretch", "(", ")", "elif", "(", "i", "is", "skipped", ")", ":", "continue", "elif", "isinstance", "(", "i", ",", "int_types", ")", ":", "layout", ".", "addSpacing", "(", "i", ")", "return", "layout" ]
turn the axes box on or off according to *on* .
train
false
47,334
def background_thread(): count = 0 while True: socketio.sleep(10) count += 1 socketio.emit('my_response', {'data': 'Server generated event', 'count': count}, namespace='/test')
[ "def", "background_thread", "(", ")", ":", "count", "=", "0", "while", "True", ":", "socketio", ".", "sleep", "(", "10", ")", "count", "+=", "1", "socketio", ".", "emit", "(", "'my_response'", ",", "{", "'data'", ":", "'Server generated event'", ",", "'count'", ":", "count", "}", ",", "namespace", "=", "'/test'", ")" ]
handles the timeout logic .
train
true
47,335
def _list(x): if (not isinstance(x, list)): x = list(x) return x
[ "def", "_list", "(", "x", ")", ":", "if", "(", "not", "isinstance", "(", "x", ",", "list", ")", ")", ":", "x", "=", "list", "(", "x", ")", "return", "x" ]
force x to a list .
train
false
47,336
def calc_timeleft(bytesleft, bps): try: if (bytesleft <= 0): return '0:00:00' totalseconds = int((bytesleft / bps)) (minutes, seconds) = divmod(totalseconds, 60) (hours, minutes) = divmod(minutes, 60) (days, hours) = divmod(hours, 24) if (minutes < 10): minutes = ('0%s' % minutes) if (seconds < 10): seconds = ('0%s' % seconds) if (days > 0): if (hours < 10): hours = ('0%s' % hours) return ('%s:%s:%s:%s' % (days, hours, minutes, seconds)) else: return ('%s:%s:%s' % (hours, minutes, seconds)) except: return '0:00:00'
[ "def", "calc_timeleft", "(", "bytesleft", ",", "bps", ")", ":", "try", ":", "if", "(", "bytesleft", "<=", "0", ")", ":", "return", "'0:00:00'", "totalseconds", "=", "int", "(", "(", "bytesleft", "/", "bps", ")", ")", "(", "minutes", ",", "seconds", ")", "=", "divmod", "(", "totalseconds", ",", "60", ")", "(", "hours", ",", "minutes", ")", "=", "divmod", "(", "minutes", ",", "60", ")", "(", "days", ",", "hours", ")", "=", "divmod", "(", "hours", ",", "24", ")", "if", "(", "minutes", "<", "10", ")", ":", "minutes", "=", "(", "'0%s'", "%", "minutes", ")", "if", "(", "seconds", "<", "10", ")", ":", "seconds", "=", "(", "'0%s'", "%", "seconds", ")", "if", "(", "days", ">", "0", ")", ":", "if", "(", "hours", "<", "10", ")", ":", "hours", "=", "(", "'0%s'", "%", "hours", ")", "return", "(", "'%s:%s:%s:%s'", "%", "(", "days", ",", "hours", ",", "minutes", ",", "seconds", ")", ")", "else", ":", "return", "(", "'%s:%s:%s'", "%", "(", "hours", ",", "minutes", ",", "seconds", ")", ")", "except", ":", "return", "'0:00:00'" ]
calculate the time left in the format hh:mm:ss .
train
false
47,337
def get_exception(): return sys.exc_info()[1]
[ "def", "get_exception", "(", ")", ":", "return", "sys", ".", "exc_info", "(", ")", "[", "1", "]" ]
get the current exception .
train
false
47,338
def p_expression_uminus(p): p[0] = (- p[2])
[ "def", "p_expression_uminus", "(", "p", ")", ":", "p", "[", "0", "]", "=", "(", "-", "p", "[", "2", "]", ")" ]
expression : minus expression .
train
false
47,339
def is_forking(argv): if ((len(argv) >= 2) and (argv[1] == '--multiprocessing-fork')): return True else: return False
[ "def", "is_forking", "(", "argv", ")", ":", "if", "(", "(", "len", "(", "argv", ")", ">=", "2", ")", "and", "(", "argv", "[", "1", "]", "==", "'--multiprocessing-fork'", ")", ")", ":", "return", "True", "else", ":", "return", "False" ]
return whether commandline indicates we are forking .
train
false
47,342
def human_readable(size, isbits=False, unit=None): try: return basic.bytes_to_human(size, isbits, unit) except: raise errors.AnsibleFilterError(("human_readable() can't interpret following string: %s" % size))
[ "def", "human_readable", "(", "size", ",", "isbits", "=", "False", ",", "unit", "=", "None", ")", ":", "try", ":", "return", "basic", ".", "bytes_to_human", "(", "size", ",", "isbits", ",", "unit", ")", "except", ":", "raise", "errors", ".", "AnsibleFilterError", "(", "(", "\"human_readable() can't interpret following string: %s\"", "%", "size", ")", ")" ]
convert a size in bytes into megabytes .
train
false