id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
30,706
def valid_domain(domain): domain.encode('ascii') if (domain and (domain[0] in '."')): return False if Definitions.DOMAIN_RE.match(domain): return True return False
[ "def", "valid_domain", "(", "domain", ")", ":", "domain", ".", "encode", "(", "'ascii'", ")", "if", "(", "domain", "and", "(", "domain", "[", "0", "]", "in", "'.\"'", ")", ")", ":", "return", "False", "if", "Definitions", ".", "DOMAIN_RE", ".", "matc...
validate a cookie domain ascii string .
train
true
30,707
def test_rmtree_retries_for_3sec(tmpdir, monkeypatch): monkeypatch.setattr(shutil, 'rmtree', Failer(duration=5).call) with pytest.raises(OSError): rmtree('foo')
[ "def", "test_rmtree_retries_for_3sec", "(", "tmpdir", ",", "monkeypatch", ")", ":", "monkeypatch", ".", "setattr", "(", "shutil", ",", "'rmtree'", ",", "Failer", "(", "duration", "=", "5", ")", ".", "call", ")", "with", "pytest", ".", "raises", "(", "OSErr...
test pip .
train
false
30,708
def en_percent(num, ndigits=u'not-given'): with translation.override(u'en-US'): if (ndigits == u'not-given'): return percent(Decimal(num)) else: return percent(Decimal(num), ndigits)
[ "def", "en_percent", "(", "num", ",", "ndigits", "=", "u'not-given'", ")", ":", "with", "translation", ".", "override", "(", "u'en-US'", ")", ":", "if", "(", "ndigits", "==", "u'not-given'", ")", ":", "return", "percent", "(", "Decimal", "(", "num", ")",...
format given number as percent in en-us locale .
train
false
30,709
def rollout(env, agent, max_episode_steps): ob = env.reset() data = collections.defaultdict(list) for _ in xrange(max_episode_steps): data['observation'].append(ob) action = agent.act(ob) data['action'].append(action) (ob, rew, done, _) = env.step(action) data['reward'].append(rew) if done: break return data
[ "def", "rollout", "(", "env", ",", "agent", ",", "max_episode_steps", ")", ":", "ob", "=", "env", ".", "reset", "(", ")", "data", "=", "collections", ".", "defaultdict", "(", "list", ")", "for", "_", "in", "xrange", "(", "max_episode_steps", ")", ":", ...
simulate the env and agent for max_episode_steps .
train
false
30,710
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
30,711
def http_request_post(url, payload, body_content_workflow=False, allow_redirects=allow_redirects): try: result = requests.post(url, data=payload, headers=headers, stream=body_content_workflow, timeout=timeout, proxies=proxies, allow_redirects=allow_redirects, verify=allow_ssl_verify) return result except Exception as e: return __requests__.models.Response()
[ "def", "http_request_post", "(", "url", ",", "payload", ",", "body_content_workflow", "=", "False", ",", "allow_redirects", "=", "allow_redirects", ")", ":", "try", ":", "result", "=", "requests", ".", "post", "(", "url", ",", "data", "=", "payload", ",", ...
payload = {key1: value1 .
train
false
30,712
def md_escape(txt): return MD_ESCAPE_RE.sub((lambda match: ('\\' + match.group(0))), txt)
[ "def", "md_escape", "(", "txt", ")", ":", "return", "MD_ESCAPE_RE", ".", "sub", "(", "(", "lambda", "match", ":", "(", "'\\\\'", "+", "match", ".", "group", "(", "0", ")", ")", ")", ",", "txt", ")" ]
call this if you want to be sure your text wont be interpreted as markdown .
train
false
30,713
def is_bin_file(path): if (not os.path.isfile(path)): return None try: with fopen(path, 'r') as fp_: return is_bin_str(fp_.read(2048)) except os.error: return None
[ "def", "is_bin_file", "(", "path", ")", ":", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "path", ")", ")", ":", "return", "None", "try", ":", "with", "fopen", "(", "path", ",", "'r'", ")", "as", "fp_", ":", "return", "is_bin_str", "(...
detects if the file is a binary .
train
false
30,715
def _scalar_property(fieldname): def _getter(self): 'Scalar property getter.' return self._properties.get(fieldname) def _setter(self, value): 'Scalar property setter.' self._patch_property(fieldname, value) return property(_getter, _setter)
[ "def", "_scalar_property", "(", "fieldname", ")", ":", "def", "_getter", "(", "self", ")", ":", "return", "self", ".", "_properties", ".", "get", "(", "fieldname", ")", "def", "_setter", "(", "self", ",", "value", ")", ":", "self", ".", "_patch_property"...
create a property descriptor around the :class:_propertymixin helpers .
train
true
30,716
def alloc_boolean_result(builder, name='ret'): ret = cgutils.alloca_once(builder, Type.int(1), name=name) return ret
[ "def", "alloc_boolean_result", "(", "builder", ",", "name", "=", "'ret'", ")", ":", "ret", "=", "cgutils", ".", "alloca_once", "(", "builder", ",", "Type", ".", "int", "(", "1", ")", ",", "name", "=", "name", ")", "return", "ret" ]
allocate an uninitialized boolean result slot .
train
false
30,717
def contains(path, text): path = os.path.expanduser(path) if (not os.path.exists(path)): return False stripped_text = str(text).strip() try: with salt.utils.filebuffer.BufferedReader(path) as breader: for chunk in breader: if (stripped_text in chunk): return True return False except (IOError, OSError): return False
[ "def", "contains", "(", "path", ",", "text", ")", ":", "path", "=", "os", ".", "path", ".", "expanduser", "(", "path", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ")", ":", "return", "False", "stripped_text", "=", "...
checks if the specified bank contains the specified key .
train
true
30,718
def _fastq_convert_tab(in_handle, out_handle, alphabet=None): from Bio.SeqIO.QualityIO import FastqGeneralIterator count = 0 for (title, seq, qual) in FastqGeneralIterator(in_handle): count += 1 out_handle.write(('%s DCTB %s\n' % (title.split(None, 1)[0], seq))) return count
[ "def", "_fastq_convert_tab", "(", "in_handle", ",", "out_handle", ",", "alphabet", "=", "None", ")", ":", "from", "Bio", ".", "SeqIO", ".", "QualityIO", "import", "FastqGeneralIterator", "count", "=", "0", "for", "(", "title", ",", "seq", ",", "qual", ")",...
fast fastq to simple tabbed conversion .
train
false
30,719
def dummy_sgs(dummies, sym, n): if (len(dummies) > n): raise ValueError('List too large') res = [] if (sym is not None): for j in dummies[::2]: a = list(range((n + 2))) if (sym == 1): a[n] = (n + 1) a[(n + 1)] = n (a[j], a[(j + 1)]) = (a[(j + 1)], a[j]) res.append(a) for j in dummies[:(-3):2]: a = list(range((n + 2))) a[j:(j + 4)] = (a[(j + 2)], a[(j + 3)], a[j], a[(j + 1)]) res.append(a) return res
[ "def", "dummy_sgs", "(", "dummies", ",", "sym", ",", "n", ")", ":", "if", "(", "len", "(", "dummies", ")", ">", "n", ")", ":", "raise", "ValueError", "(", "'List too large'", ")", "res", "=", "[", "]", "if", "(", "sym", "is", "not", "None", ")", ...
return the strong generators for dummy indices parameters dummies : list of dummy indices dummies[2k] .
train
false
30,720
def weeks(w): return (w * DAYS_PER_WEEK)
[ "def", "weeks", "(", "w", ")", ":", "return", "(", "w", "*", "DAYS_PER_WEEK", ")" ]
return weeks as days .
train
false
30,721
def iter_over_pairs(pairs): if isinstance(pairs, dict): return iteritems(pairs) else: return pairs
[ "def", "iter_over_pairs", "(", "pairs", ")", ":", "if", "isinstance", "(", "pairs", ",", "dict", ")", ":", "return", "iteritems", "(", "pairs", ")", "else", ":", "return", "pairs" ]
return an iterator over pairs present in the pairs input .
train
false
30,722
def send_signal(signum): os.kill(os.getpid(), signum)
[ "def", "send_signal", "(", "signum", ")", ":", "os", ".", "kill", "(", "os", ".", "getpid", "(", ")", ",", "signum", ")" ]
send the given signal .
train
false
30,723
def make_gettext_patterns(): kwstr = 'msgid msgstr' kw = (('\\b' + any('keyword', kwstr.split())) + '\\b') fuzzy = any('builtin', ['#,[^\\n]*']) links = any('normal', ['#:[^\\n]*']) comment = any('comment', ['#[^\\n]*']) number = any('number', ['\\b[+-]?[0-9]+[lL]?\\b', '\\b[+-]?0[xX][0-9A-Fa-f]+[lL]?\\b', '\\b[+-]?[0-9]+(?:\\.[0-9]+)?(?:[eE][+-]?[0-9]+)?\\b']) sqstring = "(\\b[rRuU])?'[^'\\\\\\n]*(\\\\.[^'\\\\\\n]*)*'?" dqstring = '(\\b[rRuU])?"[^"\\\\\\n]*(\\\\.[^"\\\\\\n]*)*"?' string = any('string', [sqstring, dqstring]) return '|'.join([kw, string, number, fuzzy, links, comment, any('SYNC', ['\\n'])])
[ "def", "make_gettext_patterns", "(", ")", ":", "kwstr", "=", "'msgid msgstr'", "kw", "=", "(", "(", "'\\\\b'", "+", "any", "(", "'keyword'", ",", "kwstr", ".", "split", "(", ")", ")", ")", "+", "'\\\\b'", ")", "fuzzy", "=", "any", "(", "'builtin'", "...
strongly inspired from idlelib .
train
false
30,725
def is_rewritable_or_comparable(sign, num, B): for h in B: if (sign[1] < Sign(h)[1]): if monomial_divides(Polyn(h).LM, sign[0]): return True if (sign[1] == Sign(h)[1]): if (num < Num(h)): if monomial_divides(Sign(h)[0], sign[0]): return True return False
[ "def", "is_rewritable_or_comparable", "(", "sign", ",", "num", ",", "B", ")", ":", "for", "h", "in", "B", ":", "if", "(", "sign", "[", "1", "]", "<", "Sign", "(", "h", ")", "[", "1", "]", ")", ":", "if", "monomial_divides", "(", "Polyn", "(", "...
check if a labeled polynomial is redundant by checking if its signature and number imply rewritability or comparability .
train
false
30,726
def rm_all_but(base_dir, dirs_to_keep, warn=False): try: all_dirs = os.listdir(base_dir) except OSError: u'Dir has been deleted' return all_dirs = [d for d in all_dirs if (not d.startswith(u'log.'))] dirs_to_rm = list(dirs_to_keep.symmetric_difference(all_dirs)) for dir_name in dirs_to_rm: dir_name = os.path.join(base_dir, dir_name) if os.path.exists(dir_name): if warn: print((u'removing directory: %s' % dir_name)) shutil.rmtree(dir_name)
[ "def", "rm_all_but", "(", "base_dir", ",", "dirs_to_keep", ",", "warn", "=", "False", ")", ":", "try", ":", "all_dirs", "=", "os", ".", "listdir", "(", "base_dir", ")", "except", "OSError", ":", "return", "all_dirs", "=", "[", "d", "for", "d", "in", ...
remove all the sub-directories of base_dir .
train
false
30,728
def translated(k, gui=False): return strings[k]
[ "def", "translated", "(", "k", ",", "gui", "=", "False", ")", ":", "return", "strings", "[", "k", "]" ]
returns a translated string .
train
false
30,729
def hashing(plaintext, salt='', sha='512'): app = webapp2.get_app() if (sha == '1'): phrase = hashlib.sha1() elif (sha == '256'): phrase = hashlib.sha256() else: phrase = hashlib.sha512() phrase.update(('%s@%s' % (plaintext, salt))) phrase_digest = phrase.hexdigest() try: from Crypto.Cipher import AES mode = AES.MODE_CBC iv = phrase_digest[:16] encryptor = AES.new(app.config.get('aes_key'), mode, iv) ciphertext = [encryptor.encrypt(chunk) for chunk in chunks(phrase_digest, 16)] return ''.join(ciphertext) except Exception as e: logging.error('CRYPTO is not running: {}'.format(e)) raise
[ "def", "hashing", "(", "plaintext", ",", "salt", "=", "''", ",", "sha", "=", "'512'", ")", ":", "app", "=", "webapp2", ".", "get_app", "(", ")", "if", "(", "sha", "==", "'1'", ")", ":", "phrase", "=", "hashlib", ".", "sha1", "(", ")", "elif", "...
returns the hashed and encrypted hexdigest of a plaintext and salt .
train
false
30,732
def arity_parse_demo(): print() print(u'A grammar with no arity constraints. Each DependencyProduction') print(u'specifies a relationship between one head word and only one') print(u'modifier word.') grammar = DependencyGrammar.fromstring(u"\n 'fell' -> 'price' | 'stock'\n 'price' -> 'of' | 'the'\n 'of' -> 'stock'\n 'stock' -> 'the'\n ") print(grammar) print() print(u"For the sentence 'The price of the stock fell', this grammar") print(u'will produce the following three parses:') pdp = ProjectiveDependencyParser(grammar) trees = pdp.parse([u'the', u'price', u'of', u'the', u'stock', u'fell']) for tree in trees: print(tree) print() print(u'By contrast, the following grammar contains a ') print(u'DependencyProduction that specifies a relationship') print(u"between a single head word, 'price', and two modifier") print(u"words, 'of' and 'the'.") grammar = DependencyGrammar.fromstring(u"\n 'fell' -> 'price' | 'stock'\n 'price' -> 'of' 'the'\n 'of' -> 'stock'\n 'stock' -> 'the'\n ") print(grammar) print() print(u'This constrains the number of possible parses to just one:') pdp = ProjectiveDependencyParser(grammar) trees = pdp.parse([u'the', u'price', u'of', u'the', u'stock', u'fell']) for tree in trees: print(tree)
[ "def", "arity_parse_demo", "(", ")", ":", "print", "(", ")", "print", "(", "u'A grammar with no arity constraints. Each DependencyProduction'", ")", "print", "(", "u'specifies a relationship between one head word and only one'", ")", "print", "(", "u'modifier word.'", ")", "g...
a demonstration showing the creation of a dependencygrammar in which a specific number of modifiers is listed for a given head .
train
false
30,735
def graph(expr): nodes = range(len(expr)) edges = list() labels = dict() stack = [] for (i, node) in enumerate(expr): if stack: edges.append((stack[(-1)][0], i)) stack[(-1)][1] -= 1 labels[i] = (node.name if isinstance(node, Primitive) else node.value) stack.append([i, node.arity]) while (stack and (stack[(-1)][1] == 0)): stack.pop() return (nodes, edges, labels)
[ "def", "graph", "(", "expr", ")", ":", "nodes", "=", "range", "(", "len", "(", "expr", ")", ")", "edges", "=", "list", "(", ")", "labels", "=", "dict", "(", ")", "stack", "=", "[", "]", "for", "(", "i", ",", "node", ")", "in", "enumerate", "(...
construct the graph of a tree expression .
train
true
30,736
def traverse_using(iterator, obj, visitors): for target in iterator: meth = visitors.get(target.__visit_name__, None) if meth: meth(target) return obj
[ "def", "traverse_using", "(", "iterator", ",", "obj", ",", "visitors", ")", ":", "for", "target", "in", "iterator", ":", "meth", "=", "visitors", ".", "get", "(", "target", ".", "__visit_name__", ",", "None", ")", "if", "meth", ":", "meth", "(", "targe...
visit the given expression structure using the given iterator of objects .
train
false
30,738
def get_exploration_ids_matching_query(query_string, cursor=None): returned_exploration_ids = [] search_cursor = cursor for _ in range(MAX_ITERATIONS): remaining_to_fetch = (feconf.SEARCH_RESULTS_PAGE_SIZE - len(returned_exploration_ids)) (exp_ids, search_cursor) = search_explorations(query_string, remaining_to_fetch, cursor=search_cursor) invalid_exp_ids = [] for (ind, model) in enumerate(exp_models.ExpSummaryModel.get_multi(exp_ids)): if (model is not None): returned_exploration_ids.append(exp_ids[ind]) else: invalid_exp_ids.append(exp_ids[ind]) if ((len(returned_exploration_ids) == feconf.SEARCH_RESULTS_PAGE_SIZE) or (search_cursor is None)): break else: logging.error(('Search index contains stale exploration ids: %s' % ', '.join(invalid_exp_ids))) if ((len(returned_exploration_ids) < feconf.SEARCH_RESULTS_PAGE_SIZE) and (search_cursor is not None)): logging.error(('Could not fulfill search request for query string %s; at least %s retries were needed.' % (query_string, MAX_ITERATIONS))) return (returned_exploration_ids, search_cursor)
[ "def", "get_exploration_ids_matching_query", "(", "query_string", ",", "cursor", "=", "None", ")", ":", "returned_exploration_ids", "=", "[", "]", "search_cursor", "=", "cursor", "for", "_", "in", "range", "(", "MAX_ITERATIONS", ")", ":", "remaining_to_fetch", "="...
returns a list with all exploration ids matching the given search query string .
train
false
30,739
def cgconfig_restart(): return service_cgconfig_control('restart')
[ "def", "cgconfig_restart", "(", ")", ":", "return", "service_cgconfig_control", "(", "'restart'", ")" ]
restart cgconfig service .
train
false
30,742
def generateCertificateObjects(organization, organizationalUnit): pkey = crypto.PKey() pkey.generate_key(crypto.TYPE_RSA, 512) req = crypto.X509Req() subject = req.get_subject() subject.O = organization subject.OU = organizationalUnit req.set_pubkey(pkey) req.sign(pkey, 'md5') cert = crypto.X509() cert.set_serial_number(1) cert.gmtime_adj_notBefore(0) cert.gmtime_adj_notAfter(60) cert.set_issuer(req.get_subject()) cert.set_subject(req.get_subject()) cert.set_pubkey(req.get_pubkey()) cert.sign(pkey, 'md5') return (pkey, req, cert)
[ "def", "generateCertificateObjects", "(", "organization", ",", "organizationalUnit", ")", ":", "pkey", "=", "crypto", ".", "PKey", "(", ")", "pkey", ".", "generate_key", "(", "crypto", ".", "TYPE_RSA", ",", "512", ")", "req", "=", "crypto", ".", "X509Req", ...
create a certificate for given c{organization} and c{organizationalunit} .
train
false
30,744
def get_slice_length(builder, slicestruct): start = slicestruct.start stop = slicestruct.stop step = slicestruct.step one = ir.Constant(start.type, 1) zero = ir.Constant(start.type, 0) is_step_negative = cgutils.is_neg_int(builder, step) delta = builder.sub(stop, start) pos_dividend = builder.sub(delta, one) neg_dividend = builder.add(delta, one) dividend = builder.select(is_step_negative, neg_dividend, pos_dividend) nominal_length = builder.add(one, builder.sdiv(dividend, step)) is_zero_length = builder.select(is_step_negative, builder.icmp_signed('>=', delta, zero), builder.icmp_signed('<=', delta, zero)) return builder.select(is_zero_length, zero, nominal_length)
[ "def", "get_slice_length", "(", "builder", ",", "slicestruct", ")", ":", "start", "=", "slicestruct", ".", "start", "stop", "=", "slicestruct", ".", "stop", "step", "=", "slicestruct", ".", "step", "one", "=", "ir", ".", "Constant", "(", "start", ".", "t...
given a slice .
train
false
30,745
def formatTrace(trace): def formatWithName(obj): if hasattr(obj, 'name'): return u'{0} ({1})'.format(obj, obj.name) else: return u'{0}'.format(obj) result = [] lineage = [] for (parent, child) in trace: if ((not lineage) or (lineage[(-1)] is not parent)): if (parent in lineage): while (lineage[(-1)] is not parent): lineage.pop() else: if (not lineage): result.append(u'{0}\n'.format(formatWithName(parent))) lineage.append(parent) result.append((u' ' * len(lineage))) result.append(u'-> {0}\n'.format(formatWithName(child))) return u''.join(result)
[ "def", "formatTrace", "(", "trace", ")", ":", "def", "formatWithName", "(", "obj", ")", ":", "if", "hasattr", "(", "obj", ",", "'name'", ")", ":", "return", "u'{0} ({1})'", ".", "format", "(", "obj", ",", "obj", ".", "name", ")", "else", ":", "return...
format a trace as a visual indication of the messages propagation through various observers .
train
false
30,746
def get_localzone_name(): handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) TZLOCALKEYNAME = 'SYSTEM\\CurrentControlSet\\Control\\TimeZoneInformation' localtz = winreg.OpenKey(handle, TZLOCALKEYNAME) keyvalues = valuestodict(localtz) localtz.Close() if ('TimeZoneKeyName' in keyvalues): tzkeyname = keyvalues['TimeZoneKeyName'].split('\x00', 1)[0] else: tzwin = keyvalues['StandardName'] TZKEYNAME = 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Time Zones' tzkey = winreg.OpenKey(handle, TZKEYNAME) tzkeyname = None for i in range(winreg.QueryInfoKey(tzkey)[0]): subkey = winreg.EnumKey(tzkey, i) sub = winreg.OpenKey(tzkey, subkey) data = valuestodict(sub) sub.Close() if (data['Std'] == tzwin): tzkeyname = subkey break tzkey.Close() handle.Close() if (tzkeyname is None): raise LookupError('Can not find Windows timezone configuration') timezone = win_tz.get(tzkeyname) if (timezone is None): timezone = win_tz.get((tzkeyname + ' Standard Time')) return timezone
[ "def", "get_localzone_name", "(", ")", ":", "handle", "=", "winreg", ".", "ConnectRegistry", "(", "None", ",", "winreg", ".", "HKEY_LOCAL_MACHINE", ")", "TZLOCALKEYNAME", "=", "'SYSTEM\\\\CurrentControlSet\\\\Control\\\\TimeZoneInformation'", "localtz", "=", "winreg", "...
get local time zone name .
train
true
30,747
def check_include_exclude(path_str, include_pat=None, exclude_pat=None): ret = True if include_pat: if re.match('E@', include_pat): retchk_include = (True if re.search(include_pat[2:], path_str) else False) else: retchk_include = (True if fnmatch.fnmatch(path_str, include_pat) else False) if exclude_pat: if re.match('E@', exclude_pat): retchk_exclude = (False if re.search(exclude_pat[2:], path_str) else True) else: retchk_exclude = (False if fnmatch.fnmatch(path_str, exclude_pat) else True) if (include_pat and (not exclude_pat)): ret = retchk_include elif (exclude_pat and (not include_pat)): ret = retchk_exclude elif (include_pat and exclude_pat): ret = (retchk_include and retchk_exclude) else: ret = True return ret
[ "def", "check_include_exclude", "(", "path_str", ",", "include_pat", "=", "None", ",", "exclude_pat", "=", "None", ")", ":", "ret", "=", "True", "if", "include_pat", ":", "if", "re", ".", "match", "(", "'E@'", ",", "include_pat", ")", ":", "retchk_include"...
check for glob or regexp patterns for include_pat and exclude_pat in the path_str string and return true/false conditions as follows .
train
false
30,748
def b16encode(s): return binascii.hexlify(s).upper()
[ "def", "b16encode", "(", "s", ")", ":", "return", "binascii", ".", "hexlify", "(", "s", ")", ".", "upper", "(", ")" ]
encode a string using base16 .
train
false
30,749
def monomial_gcd(A, B): return tuple([min(a, b) for (a, b) in zip(A, B)])
[ "def", "monomial_gcd", "(", "A", ",", "B", ")", ":", "return", "tuple", "(", "[", "min", "(", "a", ",", "b", ")", "for", "(", "a", ",", "b", ")", "in", "zip", "(", "A", ",", "B", ")", "]", ")" ]
greatest common divisor of tuples representing monomials .
train
false
30,751
def remember(request, userid=_marker, **kw): if (userid is _marker): principal = kw.pop('principal', _marker) if (principal is _marker): raise TypeError("remember() missing 1 required positional argument: 'userid'") else: deprecated('principal', 'The "principal" argument was deprecated in Pyramid 1.6. It will be removed in Pyramid 1.9. Use the "userid" argument instead.') userid = principal policy = _get_authentication_policy(request) if (policy is None): return [] return policy.remember(request, userid, **kw)
[ "def", "remember", "(", "request", ",", "userid", "=", "_marker", ",", "**", "kw", ")", ":", "if", "(", "userid", "is", "_marker", ")", ":", "principal", "=", "kw", ".", "pop", "(", "'principal'", ",", "_marker", ")", "if", "(", "principal", "is", ...
returns a sequence of header tuples (e .
train
false
30,752
def parameter(): return s3_rest_controller()
[ "def", "parameter", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
rest controller .
train
false
30,753
def essential(): table = s3db.hrm_human_resource s3.filter = (table.essential == True) return s3_rest_controller('hrm', 'human_resource')
[ "def", "essential", "(", ")", ":", "table", "=", "s3db", ".", "hrm_human_resource", "s3", ".", "filter", "=", "(", "table", ".", "essential", "==", "True", ")", "return", "s3_rest_controller", "(", "'hrm'", ",", "'human_resource'", ")" ]
filtered staff controller .
train
false
30,754
def human_to_bytes(size, default_unit=None, isbits=False): try: return basic.human_to_bytes(size, default_unit, isbits) except: raise errors.AnsibleFilterError(("human_to_bytes() can't interpret following string: %s" % size))
[ "def", "human_to_bytes", "(", "size", ",", "default_unit", "=", "None", ",", "isbits", "=", "False", ")", ":", "try", ":", "return", "basic", ".", "human_to_bytes", "(", "size", ",", "default_unit", ",", "isbits", ")", "except", ":", "raise", "errors", "...
return bytes count from a human readable string .
train
false
30,756
def get_mem_used(): try: import resource except ImportError: raise NotSupportedException a = os.popen(('cat /proc/%s/statm 2>/dev/null' % os.getpid())).read().split() if (not a): raise NotSupportedException return ((int(a[1]) * resource.getpagesize()), (int(a[0]) * resource.getpagesize()))
[ "def", "get_mem_used", "(", ")", ":", "try", ":", "import", "resource", "except", "ImportError", ":", "raise", "NotSupportedException", "a", "=", "os", ".", "popen", "(", "(", "'cat /proc/%s/statm 2>/dev/null'", "%", "os", ".", "getpid", "(", ")", ")", ")", ...
this only works on linux .
train
false
30,757
def get_tool_panel_config_tool_path_install_dir(app, repository): tool_shed = common_util.remove_port_from_tool_shed_url(str(repository.tool_shed)) relative_install_dir = ('%s/repos/%s/%s/%s' % (tool_shed, str(repository.owner), str(repository.name), str(repository.installed_changeset_revision))) shed_config_dict = repository.get_shed_config_dict(app) if (not shed_config_dict): for shed_config_dict in app.toolbox.dynamic_confs(include_migrated_tool_conf=True): if ((repository.dist_to_shed and (shed_config_dict['config_filename'] == app.config.migrated_tools_config)) or ((not repository.dist_to_shed) and (shed_config_dict['config_filename'] != app.config.migrated_tools_config))): break shed_tool_conf = shed_config_dict['config_filename'] tool_path = shed_config_dict['tool_path'] return (shed_tool_conf, tool_path, relative_install_dir)
[ "def", "get_tool_panel_config_tool_path_install_dir", "(", "app", ",", "repository", ")", ":", "tool_shed", "=", "common_util", ".", "remove_port_from_tool_shed_url", "(", "str", "(", "repository", ".", "tool_shed", ")", ")", "relative_install_dir", "=", "(", "'%s/rep...
return shed-related tool panel config .
train
false
30,758
def test_string(): schema = vol.Schema(cv.string) with pytest.raises(vol.MultipleInvalid): schema(None) for value in (True, 1, 'hello'): schema(value)
[ "def", "test_string", "(", ")", ":", "schema", "=", "vol", ".", "Schema", "(", "cv", ".", "string", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "MultipleInvalid", ")", ":", "schema", "(", "None", ")", "for", "value", "in", "(", "True", "...
return true if the object is a string .
train
false
30,759
def remove_projection_from_vector(v, w): return vector_subtract(v, project(v, w))
[ "def", "remove_projection_from_vector", "(", "v", ",", "w", ")", ":", "return", "vector_subtract", "(", "v", ",", "project", "(", "v", ",", "w", ")", ")" ]
projects v onto w and subtracts the result from v .
train
false
30,760
@register.simple_tag(takes_context=True) def escape_naive(context): return 'Hello {0}!'.format(context['name'])
[ "@", "register", ".", "simple_tag", "(", "takes_context", "=", "True", ")", "def", "escape_naive", "(", "context", ")", ":", "return", "'Hello {0}!'", ".", "format", "(", "context", "[", "'name'", "]", ")" ]
a tag that doesnt even think about escaping issues .
train
false
30,761
def timings(reps, func, *args, **kw): return timings_out(reps, func, *args, **kw)[0:2]
[ "def", "timings", "(", "reps", ",", "func", ",", "*", "args", ",", "**", "kw", ")", ":", "return", "timings_out", "(", "reps", ",", "func", ",", "*", "args", ",", "**", "kw", ")", "[", "0", ":", "2", "]" ]
timings -> execute a function reps times .
train
true
30,762
def ssl_login_shortcut(func): def wrapped(*args, **kwargs): '\n This manages the function wrapping, by determining whether to inject\n the _external signup or just continuing to the internal function\n call.\n ' if (not settings.FEATURES['AUTH_USE_CERTIFICATES']): return func(*args, **kwargs) request = args[0] if (request.user and request.user.is_authenticated()): return func(*args, **kwargs) cert = ssl_get_cert_from_request(request) if (not cert): return func(*args, **kwargs) def retfun(): 'Wrap function again for call by _external_login_or_signup' return func(*args, **kwargs) (_user, email, fullname) = _ssl_dn_extract_info(cert) return _external_login_or_signup(request, external_id=email, external_domain='ssl:MIT', credentials=cert, email=email, fullname=fullname, retfun=retfun) return wrapped
[ "def", "ssl_login_shortcut", "(", "func", ")", ":", "def", "wrapped", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "not", "settings", ".", "FEATURES", "[", "'AUTH_USE_CERTIFICATES'", "]", ")", ":", "return", "func", "(", "*", "args", ","...
python function decorator for login procedures .
train
false
30,763
def extract_extension_options(field_list, options_spec): option_list = extract_options(field_list) option_dict = assemble_option_dict(option_list, options_spec) return option_dict
[ "def", "extract_extension_options", "(", "field_list", ",", "options_spec", ")", ":", "option_list", "=", "extract_options", "(", "field_list", ")", "option_dict", "=", "assemble_option_dict", "(", "option_list", ",", "options_spec", ")", "return", "option_dict" ]
return a dictionary mapping extension option names to converted values .
train
false
30,765
def _is_shorter_than_possible_normal_number(metadata, number): possible_number_pattern = re.compile((metadata.general_desc.possible_number_pattern or U_EMPTY_STRING)) return (_test_number_length_against_pattern(possible_number_pattern, number) == ValidationResult.TOO_SHORT)
[ "def", "_is_shorter_than_possible_normal_number", "(", "metadata", ",", "number", ")", ":", "possible_number_pattern", "=", "re", ".", "compile", "(", "(", "metadata", ".", "general_desc", ".", "possible_number_pattern", "or", "U_EMPTY_STRING", ")", ")", "return", "...
helper method to check whether a number is too short to be a regular length phone number in a region .
train
false
30,767
def make_user_interest_vector(user_interests): return [(1 if (interest in user_interests) else 0) for interest in unique_interests]
[ "def", "make_user_interest_vector", "(", "user_interests", ")", ":", "return", "[", "(", "1", "if", "(", "interest", "in", "user_interests", ")", "else", "0", ")", "for", "interest", "in", "unique_interests", "]" ]
given a list of interests .
train
false
30,768
@pytest.mark.network def test_simple_uninstall(script): result = script.pip('install', 'INITools==0.2') assert (join(script.site_packages, 'initools') in result.files_created), sorted(result.files_created.keys()) script.run('python', '-c', 'import initools') result2 = script.pip('uninstall', 'INITools', '-y') assert_all_changes(result, result2, [(script.venv / 'build'), 'cache'])
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_simple_uninstall", "(", "script", ")", ":", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'INITools==0.2'", ")", "assert", "(", "join", "(", "script", ".", "site_packages", ",", "'init...
test simple install and uninstall .
train
false
30,769
def bin_output(func): func.argtypes = [GEOM_PTR, POINTER(c_size_t)] func.errcheck = check_sized_string func.restype = c_uchar_p return func
[ "def", "bin_output", "(", "func", ")", ":", "func", ".", "argtypes", "=", "[", "GEOM_PTR", ",", "POINTER", "(", "c_size_t", ")", "]", "func", ".", "errcheck", "=", "check_sized_string", "func", ".", "restype", "=", "c_uchar_p", "return", "func" ]
generates a prototype for the routines that return a a sized string .
train
false
30,770
def get_version_output(binary, version_cmd): cmd = [] for item in (binary, version_cmd): if isinstance(item, list): cmd.extend(item) else: cmd.append(item) if os.path.isfile(cmd[0]): (_, output, _) = module.run_command(cmd) return output
[ "def", "get_version_output", "(", "binary", ",", "version_cmd", ")", ":", "cmd", "=", "[", "]", "for", "item", "in", "(", "binary", ",", "version_cmd", ")", ":", "if", "isinstance", "(", "item", ",", "list", ")", ":", "cmd", ".", "extend", "(", "item...
runs and returns the version output for a command .
train
false
30,771
def sum_dicts(dicts): sum_dict = {} for val_dict in dicts: for (id_, value) in val_dict.items(): if (id_ in sum_dict): sum_dict[id_] = (sum_dict[id_] + value) else: sum_dict[id_] = value return sum_dict
[ "def", "sum_dicts", "(", "dicts", ")", ":", "sum_dict", "=", "{", "}", "for", "val_dict", "in", "dicts", ":", "for", "(", "id_", ",", "value", ")", "in", "val_dict", ".", "items", "(", ")", ":", "if", "(", "id_", "in", "sum_dict", ")", ":", "sum_...
sums the dictionaries entrywise .
train
false
30,772
def resolve_duplicates(session, task): if (task.choice_flag in (action.ASIS, action.APPLY, action.RETAG)): found_duplicates = task.find_duplicates(session.lib) if found_duplicates: log.debug(u'found duplicates: {}'.format([o.id for o in found_duplicates])) duplicate_action = config['import']['duplicate_action'].as_choice({u'skip': u's', u'keep': u'k', u'remove': u'r', u'ask': u'a'}) log.debug(u'default action for duplicates: {0}', duplicate_action) if (duplicate_action == u's'): task.set_choice(action.SKIP) elif (duplicate_action == u'k'): pass elif (duplicate_action == u'r'): task.should_remove_duplicates = True else: session.resolve_duplicate(task, found_duplicates) session.log_choice(task, True)
[ "def", "resolve_duplicates", "(", "session", ",", "task", ")", ":", "if", "(", "task", ".", "choice_flag", "in", "(", "action", ".", "ASIS", ",", "action", ".", "APPLY", ",", "action", ".", "RETAG", ")", ")", ":", "found_duplicates", "=", "task", ".", ...
check if a task conflicts with items or albums already imported and ask the session to resolve this .
train
false
30,774
def do_urlencode(value): itemiter = None if isinstance(value, dict): itemiter = iteritems(value) elif (not isinstance(value, string_types)): try: itemiter = iter(value) except TypeError: pass if (itemiter is None): return unicode_urlencode(value) return u'&'.join((((unicode_urlencode(k) + '=') + unicode_urlencode(v, for_qs=True)) for (k, v) in itemiter))
[ "def", "do_urlencode", "(", "value", ")", ":", "itemiter", "=", "None", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "itemiter", "=", "iteritems", "(", "value", ")", "elif", "(", "not", "isinstance", "(", "value", ",", "string_types", ")", ...
escape strings for use in urls .
train
true
30,775
def check_valid_sports_naming(pattern=None): if (pattern is None): pattern = sickbeard.NAMING_PATTERN logger.log(((u'Checking whether the pattern ' + pattern) + u' is valid for an sports episode'), logger.DEBUG) valid = validate_name(pattern, sports=True) return valid
[ "def", "check_valid_sports_naming", "(", "pattern", "=", "None", ")", ":", "if", "(", "pattern", "is", "None", ")", ":", "pattern", "=", "sickbeard", ".", "NAMING_PATTERN", "logger", ".", "log", "(", "(", "(", "u'Checking whether the pattern '", "+", "pattern"...
checks if the name is can be parsed back to its original form for an sports format .
train
false
30,776
def render_template(template_name, **context): tmpl = jinja_env.get_template(template_name) context['url_for'] = url_for return Response(tmpl.render(context), mimetype='text/html')
[ "def", "render_template", "(", "template_name", ",", "**", "context", ")", ":", "tmpl", "=", "jinja_env", ".", "get_template", "(", "template_name", ")", "context", "[", "'url_for'", "]", "=", "url_for", "return", "Response", "(", "tmpl", ".", "render", "(",...
renders a template from the template folder with the given context .
train
true
30,777
def _do_eval(match, exp): return eval(exp, globals(), {'m': match})
[ "def", "_do_eval", "(", "match", ",", "exp", ")", ":", "return", "eval", "(", "exp", ",", "globals", "(", ")", ",", "{", "'m'", ":", "match", "}", ")" ]
used internally to evaluate an expresseion .
train
false
30,779
def setup_join_cache(sender, **kwargs): sender._meta._join_cache = {}
[ "def", "setup_join_cache", "(", "sender", ",", "**", "kwargs", ")", ":", "sender", ".", "_meta", ".", "_join_cache", "=", "{", "}" ]
the information needed to join between model fields is something that is invariant over the life of the model .
train
false
30,780
def cors_handler(*args, **kwargs): req_head = cherrypy.request.headers resp_head = cherrypy.response.headers ac_method = req_head.get('Access-Control-Request-Method', None) allowed_methods = ['GET', 'POST'] allowed_headers = ['X-Auth-Token', 'Content-Type'] if (ac_method and (ac_method in allowed_methods)): resp_head['Access-Control-Allow-Methods'] = ', '.join(allowed_methods) resp_head['Access-Control-Allow-Headers'] = ', '.join(allowed_headers) resp_head['Connection'] = 'keep-alive' resp_head['Access-Control-Max-Age'] = '1400' return {}
[ "def", "cors_handler", "(", "*", "args", ",", "**", "kwargs", ")", ":", "req_head", "=", "cherrypy", ".", "request", ".", "headers", "resp_head", "=", "cherrypy", ".", "response", ".", "headers", "ac_method", "=", "req_head", ".", "get", "(", "'Access-Cont...
check a cors preflight request and return a valid response .
train
false
30,781
def get_config_directory(section): if (section is None): return os.getcwd() if ('project_dir' in section): return path(section.get('project_dir')) config = os.path.abspath(section.get('files', '').origin) return (config if os.path.isdir(config) else os.path.dirname(config))
[ "def", "get_config_directory", "(", "section", ")", ":", "if", "(", "section", "is", "None", ")", ":", "return", "os", ".", "getcwd", "(", ")", "if", "(", "'project_dir'", "in", "section", ")", ":", "return", "path", "(", "section", ".", "get", "(", ...
retrieves the configuration directory for the given section .
train
false
30,783
def rl_op(left, right): if (len(right) > 0): rl_gate = right[0] rl_gate_is_unitary = is_scalar_matrix((Dagger(rl_gate), rl_gate), _get_min_qubits(rl_gate), True) if ((len(right) > 0) and rl_gate_is_unitary): new_right = right[1:len(right)] new_left = ((Dagger(rl_gate),) + left) return (new_left, new_right) return None
[ "def", "rl_op", "(", "left", ",", "right", ")", ":", "if", "(", "len", "(", "right", ")", ">", "0", ")", ":", "rl_gate", "=", "right", "[", "0", "]", "rl_gate_is_unitary", "=", "is_scalar_matrix", "(", "(", "Dagger", "(", "rl_gate", ")", ",", "rl_g...
perform a rl operation .
train
false
30,784
def none_if_empty(fn): def wrapper(value): if ((value == '') or (value is None) or (value == [])): return None return fn(value) return wrapper
[ "def", "none_if_empty", "(", "fn", ")", ":", "def", "wrapper", "(", "value", ")", ":", "if", "(", "(", "value", "==", "''", ")", "or", "(", "value", "is", "None", ")", "or", "(", "value", "==", "[", "]", ")", ")", ":", "return", "None", "return...
a decorator which returns none if its input is empty else fn(x) .
train
false
30,785
def _BackupFilters(): _cpplint_state.BackupFilters()
[ "def", "_BackupFilters", "(", ")", ":", "_cpplint_state", ".", "BackupFilters", "(", ")" ]
saves the current filter list to backup storage .
train
false
30,786
def LoadPlist(filename): data = None try: p = subprocess.Popen(['/usr/bin/plutil', '-convert', 'xml1', '-o', '-', filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out_data, err_data) = p.communicate() except IOError as e: print e if (p.returncode == 0): data = plistlib.readPlistFromString(out_data) return data
[ "def", "LoadPlist", "(", "filename", ")", ":", "data", "=", "None", "try", ":", "p", "=", "subprocess", ".", "Popen", "(", "[", "'/usr/bin/plutil'", ",", "'-convert'", ",", "'xml1'", ",", "'-o'", ",", "'-'", ",", "filename", "]", ",", "stdout", "=", ...
plists can be read with plistlib .
train
false
30,788
def check_encoder(option): encs = g.encoders if (option >= len(encs)): message = '%s%s%s is too high, type %sencoders%s to see valid values' message = (message % (c.y, option, c.w, c.g, c.w)) return dict(valid=False, message=message) else: message = 'Encoder set to %s%s%s' message = (message % (c.y, encs[option]['name'], c.w)) return dict(valid=True, message=message)
[ "def", "check_encoder", "(", "option", ")", ":", "encs", "=", "g", ".", "encoders", "if", "(", "option", ">=", "len", "(", "encs", ")", ")", ":", "message", "=", "'%s%s%s is too high, type %sencoders%s to see valid values'", "message", "=", "(", "message", "%"...
check encoder value is acceptable .
train
false
30,789
@register.inclusion_tag(u'includes/form_errors.html', takes_context=True) def errors_for(context, form): return {u'form': form}
[ "@", "register", ".", "inclusion_tag", "(", "u'includes/form_errors.html'", ",", "takes_context", "=", "True", ")", "def", "errors_for", "(", "context", ",", "form", ")", ":", "return", "{", "u'form'", ":", "form", "}" ]
renders an alert if the form has any errors .
train
false
30,790
def run_conda_command(command, prefix, *arguments): (p, sub_parsers) = generate_parser() assert (command in parser_config), 'Wrong command for conda {0}'.format(command) parser_config[command](sub_parsers) prefix = escape_for_winpath(prefix) if arguments: arguments = list(map(escape_for_winpath, arguments)) if (command is Commands.INFO): command_line = '{0} {1}'.format(command, ' '.join(arguments)) elif (command is Commands.LIST): command_line = '{0} -n {1} {2}'.format(command, prefix, ' '.join(arguments)) else: command_line = '{0} -y -q -n {1} {2}'.format(command, prefix, ' '.join(arguments)) args = p.parse_args(split(command_line)) context._set_argparse_args(args) with captured() as c: args.func(args, p) return (c.stdout, c.stderr)
[ "def", "run_conda_command", "(", "command", ",", "prefix", ",", "*", "arguments", ")", ":", "(", "p", ",", "sub_parsers", ")", "=", "generate_parser", "(", ")", "assert", "(", "command", "in", "parser_config", ")", ",", "'Wrong command for conda {0}'", ".", ...
run conda command .
train
false
30,791
def check_permissions(path, permission): return (oct((os.stat(path).st_mode & 511)) == oct(permission))
[ "def", "check_permissions", "(", "path", ",", "permission", ")", ":", "return", "(", "oct", "(", "(", "os", ".", "stat", "(", "path", ")", ".", "st_mode", "&", "511", ")", ")", "==", "oct", "(", "permission", ")", ")" ]
check file or directory permissions .
train
false
30,793
def isPathEntirelyInsideLoops(loops, path): for loop in loops: if isPathEntirelyInsideLoop(loop, path): return True return False
[ "def", "isPathEntirelyInsideLoops", "(", "loops", ",", "path", ")", ":", "for", "loop", "in", "loops", ":", "if", "isPathEntirelyInsideLoop", "(", "loop", ",", "path", ")", ":", "return", "True", "return", "False" ]
determine if a path is entirely inside another loop in a list .
train
false
30,795
def test_byteorder(): mjd = np.array([53000.0, 54000.0]) big_endian = mjd.astype('>f8') little_endian = mjd.astype('<f8') time_mjd = Time(mjd, format='mjd') time_big = Time(big_endian, format='mjd') time_little = Time(little_endian, format='mjd') assert np.all((time_big == time_mjd)) assert np.all((time_little == time_mjd))
[ "def", "test_byteorder", "(", ")", ":", "mjd", "=", "np", ".", "array", "(", "[", "53000.0", ",", "54000.0", "]", ")", "big_endian", "=", "mjd", ".", "astype", "(", "'>f8'", ")", "little_endian", "=", "mjd", ".", "astype", "(", "'<f8'", ")", "time_mj...
ensure that bigendian and little-endian both work .
train
false
30,796
def paramstopot(thresh, shape, scale): return (shape, (scale - (shape * thresh)))
[ "def", "paramstopot", "(", "thresh", ",", "shape", ",", "scale", ")", ":", "return", "(", "shape", ",", "(", "scale", "-", "(", "shape", "*", "thresh", ")", ")", ")" ]
transform shape scale for peak over threshold y = x-u|x>u ~ gpd if x ~ gpd notation of de zea bermudez .
train
false
30,797
@core_helper def resource_view_icon(resource_view): view_plugin = datapreview.get_view_plugin(resource_view['view_type']) return view_plugin.info().get('icon', 'picture')
[ "@", "core_helper", "def", "resource_view_icon", "(", "resource_view", ")", ":", "view_plugin", "=", "datapreview", ".", "get_view_plugin", "(", "resource_view", "[", "'view_type'", "]", ")", "return", "view_plugin", ".", "info", "(", ")", ".", "get", "(", "'i...
returns the icon for a particular view type .
train
false
30,798
def cluster_get(context, id=None, is_up=None, get_services=False, services_summary=False, read_deleted='no', name_match_level=None, **filters): return IMPL.cluster_get(context, id, is_up, get_services, services_summary, read_deleted, name_match_level, **filters)
[ "def", "cluster_get", "(", "context", ",", "id", "=", "None", ",", "is_up", "=", "None", ",", "get_services", "=", "False", ",", "services_summary", "=", "False", ",", "read_deleted", "=", "'no'", ",", "name_match_level", "=", "None", ",", "**", "filters",...
get a cluster that matches the criteria .
train
false
30,799
def add_time_to_time(time1, time2, result_format='number', exclude_millis=False): time = (Time(time1) + Time(time2)) return time.convert(result_format, millis=is_falsy(exclude_millis))
[ "def", "add_time_to_time", "(", "time1", ",", "time2", ",", "result_format", "=", "'number'", ",", "exclude_millis", "=", "False", ")", ":", "time", "=", "(", "Time", "(", "time1", ")", "+", "Time", "(", "time2", ")", ")", "return", "time", ".", "conve...
adds time to another time and returns the resulting time .
train
false
30,800
def ex_lvalue(name): return ast.Name(name, ast.Store())
[ "def", "ex_lvalue", "(", "name", ")", ":", "return", "ast", ".", "Name", "(", "name", ",", "ast", ".", "Store", "(", ")", ")" ]
a variable load expression .
train
false
30,801
@library.filter def money(amount, digits=None, widen=0): return format_money(amount, digits, widen)
[ "@", "library", ".", "filter", "def", "money", "(", "amount", ",", "digits", "=", "None", ",", "widen", "=", "0", ")", ":", "return", "format_money", "(", "amount", ",", "digits", ",", "widen", ")" ]
format money amount according to current locale settings .
train
false
30,802
@pytest.fixture def fake_keyevent_factory(): def fake_keyevent(key, modifiers=0, text='', typ=QEvent.KeyPress): 'Generate a new fake QKeyPressEvent.' evtmock = unittest.mock.create_autospec(QKeyEvent, instance=True) evtmock.key.return_value = key evtmock.modifiers.return_value = modifiers evtmock.text.return_value = text evtmock.type.return_value = typ return evtmock return fake_keyevent
[ "@", "pytest", ".", "fixture", "def", "fake_keyevent_factory", "(", ")", ":", "def", "fake_keyevent", "(", "key", ",", "modifiers", "=", "0", ",", "text", "=", "''", ",", "typ", "=", "QEvent", ".", "KeyPress", ")", ":", "evtmock", "=", "unittest", ".",...
fixture that when called will return a mock instance of a qkeyevent .
train
false
30,804
def confirm_prompt(page, cancel=False, require_notification=None): page.wait_for_element_visibility('.prompt', 'Prompt is visible') confirmation_button_css = ('.prompt .action-' + ('secondary' if cancel else 'primary')) page.wait_for_element_visibility(confirmation_button_css, 'Confirmation button is visible') require_notification = ((not cancel) if (require_notification is None) else require_notification) click_css(page, confirmation_button_css, require_notification=require_notification)
[ "def", "confirm_prompt", "(", "page", ",", "cancel", "=", "False", ",", "require_notification", "=", "None", ")", ":", "page", ".", "wait_for_element_visibility", "(", "'.prompt'", ",", "'Prompt is visible'", ")", "confirmation_button_css", "=", "(", "'.prompt .acti...
ensures that a modal prompt and confirmation button are visible .
train
false
30,805
@set_database def update_parents(parent_mapping=None, **kwargs): if parent_mapping: db = kwargs.get('db') if db: with db.atomic() as transaction: for (key, value) in parent_mapping.iteritems(): if value: try: parent = Item.get((Item.id == value), (Item.kind == 'Topic')) item = Item.get((Item.path == key)) except DoesNotExist: print (key, value, 'Parent or Item not found') if (item and parent): item.parent = parent item.save()
[ "@", "set_database", "def", "update_parents", "(", "parent_mapping", "=", "None", ",", "**", "kwargs", ")", ":", "if", "parent_mapping", ":", "db", "=", "kwargs", ".", "get", "(", "'db'", ")", "if", "db", ":", "with", "db", ".", "atomic", "(", ")", "...
convenience function to add parent nodes to other nodes in the database .
train
false
30,806
@frappe.whitelist() def update_order(board_name, order): board = frappe.get_doc(u'Kanban Board', board_name) doctype = board.reference_doctype fieldname = board.field_name order_dict = json.loads(order) updated_cards = [] for (col_name, cards) in order_dict.iteritems(): order_list = [] for card in cards: column = frappe.get_value(doctype, {u'name': card}, fieldname) if (column != col_name): frappe.set_value(doctype, card, fieldname, col_name) updated_cards.append(dict(name=card, column=col_name)) for column in board.columns: if (column.column_name == col_name): column.order = json.dumps(cards) board.save() return (board, updated_cards)
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "update_order", "(", "board_name", ",", "order", ")", ":", "board", "=", "frappe", ".", "get_doc", "(", "u'Kanban Board'", ",", "board_name", ")", "doctype", "=", "board", ".", "reference_doctype", "fieldname...
save the order of cards in columns .
train
false
30,807
def peer_status(): root = _gluster_xml('peer status') if (not _gluster_ok(root)): return None result = {} for peer in _iter(root, 'peer'): uuid = peer.find('uuid').text result[uuid] = {'hostnames': []} for item in peer: if (item.tag == 'hostname'): result[uuid]['hostnames'].append(item.text) elif (item.tag == 'hostnames'): for hostname in item: if (hostname.text not in result[uuid]['hostnames']): result[uuid]['hostnames'].append(hostname.text) elif (item.tag != 'uuid'): result[uuid][item.tag] = item.text return result
[ "def", "peer_status", "(", ")", ":", "root", "=", "_gluster_xml", "(", "'peer status'", ")", "if", "(", "not", "_gluster_ok", "(", "root", ")", ")", ":", "return", "None", "result", "=", "{", "}", "for", "peer", "in", "_iter", "(", "root", ",", "'pee...
return peer status information the return value is a dictionary with peer uuids as keys and dicts of peer information as values .
train
true
30,808
def indefinite_article(word): return 'a'
[ "def", "indefinite_article", "(", "word", ")", ":", "return", "'a'" ]
returns the indefinite article for a given word .
train
false
30,809
def gen_accept(id_, keysize=2048, force=False): id_ = clean.id(id_) ret = gen(id_, keysize) acc_path = os.path.join(__opts__['pki_dir'], 'minions', id_) if (os.path.isfile(acc_path) and (not force)): return {} with salt.utils.fopen(acc_path, 'w+') as fp_: fp_.write(ret['pub']) return ret
[ "def", "gen_accept", "(", "id_", ",", "keysize", "=", "2048", ",", "force", "=", "False", ")", ":", "id_", "=", "clean", ".", "id", "(", "id_", ")", "ret", "=", "gen", "(", "id_", ",", "keysize", ")", "acc_path", "=", "os", ".", "path", ".", "j...
generate a key pair then accept the public key .
train
true
30,812
def getsource(obj): try: try: src = encoding.to_unicode(inspect.getsource(obj)) except TypeError: if hasattr(obj, '__class__'): src = encoding.to_unicode(inspect.getsource(obj.__class__)) else: src = getdoc(obj) return src except (TypeError, IOError): return
[ "def", "getsource", "(", "obj", ")", ":", "try", ":", "try", ":", "src", "=", "encoding", ".", "to_unicode", "(", "inspect", ".", "getsource", "(", "obj", ")", ")", "except", "TypeError", ":", "if", "hasattr", "(", "obj", ",", "'__class__'", ")", ":"...
wrapper around inspect .
train
false
30,813
def is_port_free(port, address): try: s = socket.socket() if (address == 'localhost'): s.bind(('localhost', port)) free = True else: s.connect((address, port)) free = False except socket.error: if (address == 'localhost'): free = False else: free = True s.close() return free
[ "def", "is_port_free", "(", "port", ",", "address", ")", ":", "try", ":", "s", "=", "socket", ".", "socket", "(", ")", "if", "(", "address", "==", "'localhost'", ")", ":", "s", ".", "bind", "(", "(", "'localhost'", ",", "port", ")", ")", "free", ...
return true if the given port is available for use .
train
false
30,814
def print_triggered_alarms(entity=None): alarms = entity.triggeredAlarmState for alarm in alarms: print(('#' * 40)) print('alarm_moref: {0}'.format(alarm.key.split('.')[0])) print('alarm status: {0}'.format(alarm.overallStatus))
[ "def", "print_triggered_alarms", "(", "entity", "=", "None", ")", ":", "alarms", "=", "entity", ".", "triggeredAlarmState", "for", "alarm", "in", "alarms", ":", "print", "(", "(", "'#'", "*", "40", ")", ")", "print", "(", "'alarm_moref: {0}'", ".", "format...
this is a useful method if you need to print out the alarm morefs .
train
false
30,816
def subquery(alias, *args, **kwargs): return Select(*args, **kwargs).alias(alias)
[ "def", "subquery", "(", "alias", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "Select", "(", "*", "args", ",", "**", "kwargs", ")", ".", "alias", "(", "alias", ")" ]
return an :class: .
train
false
30,817
def _SimplifiedValue(validator, value): if isinstance(value, ValidatedBase): return value.ToDict() elif isinstance(value, (list, tuple)): return [_SimplifiedValue(validator, item) for item in value] elif isinstance(validator, Validator): return validator.ToValue(value) return value
[ "def", "_SimplifiedValue", "(", "validator", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "ValidatedBase", ")", ":", "return", "value", ".", "ToDict", "(", ")", "elif", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ")", ...
convert any value to simplified collections and basic types .
train
false
30,820
def can_validate_certs(): return _CAN_VALIDATE_CERTS
[ "def", "can_validate_certs", "(", ")", ":", "return", "_CAN_VALIDATE_CERTS" ]
return true if we have the ssl package and can validate certificates .
train
false
30,821
def no_action(): return ControllableAction(result=succeed(None))
[ "def", "no_action", "(", ")", ":", "return", "ControllableAction", "(", "result", "=", "succeed", "(", "None", ")", ")" ]
return an istatechange that immediately does nothing .
train
false
30,822
def bound(logp, *conditions, **kwargs): broadcast_conditions = kwargs.get('broadcast_conditions', True) if broadcast_conditions: alltrue = alltrue_elemwise else: alltrue = alltrue_scalar return tt.switch(alltrue(conditions), logp, (- np.inf))
[ "def", "bound", "(", "logp", ",", "*", "conditions", ",", "**", "kwargs", ")", ":", "broadcast_conditions", "=", "kwargs", ".", "get", "(", "'broadcast_conditions'", ",", "True", ")", "if", "broadcast_conditions", ":", "alltrue", "=", "alltrue_elemwise", "else...
bounds a log probability density with several conditions .
train
false
30,824
def Manager(): from multiprocessing.managers import SyncManager m = SyncManager() m.start() return m
[ "def", "Manager", "(", ")", ":", "from", "multiprocessing", ".", "managers", "import", "SyncManager", "m", "=", "SyncManager", "(", ")", "m", ".", "start", "(", ")", "return", "m" ]
returns a manager associated with a running server process the managers methods such as lock() .
train
false
30,825
def following(request, user_id): instance = get_object_or_404(User, pk=user_id) return render_to_response('actstream/following.html', {'following': models.following(instance), 'user': instance}, context_instance=RequestContext(request))
[ "def", "following", "(", "request", ",", "user_id", ")", ":", "instance", "=", "get_object_or_404", "(", "User", ",", "pk", "=", "user_id", ")", "return", "render_to_response", "(", "'actstream/following.html'", ",", "{", "'following'", ":", "models", ".", "fo...
returns a list of actors that the user identified by user_id is following .
train
false
30,826
def get_date_from_file(filepath, sep): with open(filepath) as f: line = f.readline() try: return line.split(sep)[0] except IndexError: return None
[ "def", "get_date_from_file", "(", "filepath", ",", "sep", ")", ":", "with", "open", "(", "filepath", ")", "as", "f", ":", "line", "=", "f", ".", "readline", "(", ")", "try", ":", "return", "line", ".", "split", "(", "sep", ")", "[", "0", "]", "ex...
get the date from the file .
train
false
30,827
def import_statsd(): try: import django_statsd is_wolphs_statsd = (hasattr(django_statsd, 'start') and hasattr(django_statsd, 'stop')) if (not is_wolphs_statsd): django_statsd = None except ImportError: django_statsd = None return django_statsd
[ "def", "import_statsd", "(", ")", ":", "try", ":", "import", "django_statsd", "is_wolphs_statsd", "=", "(", "hasattr", "(", "django_statsd", ",", "'start'", ")", "and", "hasattr", "(", "django_statsd", ",", "'stop'", ")", ")", "if", "(", "not", "is_wolphs_st...
import only the statd by wolph not the mozilla statsd todo: move to mozilla statds which is more widely used .
train
false
30,828
def varcorrection_unbalanced(nobs_all, srange=False): nobs_all = np.asarray(nobs_all) if (not srange): return (1.0 / nobs_all).sum() else: return ((1.0 / nobs_all).sum() / len(nobs_all))
[ "def", "varcorrection_unbalanced", "(", "nobs_all", ",", "srange", "=", "False", ")", ":", "nobs_all", "=", "np", ".", "asarray", "(", "nobs_all", ")", "if", "(", "not", "srange", ")", ":", "return", "(", "1.0", "/", "nobs_all", ")", ".", "sum", "(", ...
correction factor for variance with unequal sample sizes this is just a harmonic mean parameters nobs_all : array_like the number of observations for each sample srange : bool if true .
train
false
30,830
def fix_lines(source_lines, options, filename=u''): original_newline = find_newline(source_lines) tmp_source = u''.join(normalize_line_endings(source_lines, u'\n')) previous_hashes = set() if options.line_range: fixed_source = tmp_source else: fixed_source = apply_global_fixes(tmp_source, options, filename=filename) passes = 0 long_line_ignore_cache = set() while (hash(fixed_source) not in previous_hashes): if ((options.pep8_passes >= 0) and (passes > options.pep8_passes)): break passes += 1 previous_hashes.add(hash(fixed_source)) tmp_source = copy.copy(fixed_source) fix = FixPEP8(filename, options, contents=tmp_source, long_line_ignore_cache=long_line_ignore_cache) fixed_source = fix.fix() sio = io.StringIO(fixed_source) return u''.join(normalize_line_endings(sio.readlines(), original_newline))
[ "def", "fix_lines", "(", "source_lines", ",", "options", ",", "filename", "=", "u''", ")", ":", "original_newline", "=", "find_newline", "(", "source_lines", ")", "tmp_source", "=", "u''", ".", "join", "(", "normalize_line_endings", "(", "source_lines", ",", "...
return fixed source code .
train
true
30,831
def _sso_params(member): try: auth_provider = AuthProvider.objects.get(organization=member.organization_id) except AuthProvider.DoesNotExist: sso_is_valid = True requires_sso = False else: if auth_provider.flags.allow_unlinked: requires_sso = False sso_is_valid = True else: requires_sso = True try: auth_identity = AuthIdentity.objects.get(auth_provider=auth_provider, user=member.user_id) except AuthIdentity.DoesNotExist: sso_is_valid = False else: sso_is_valid = auth_identity.is_valid(member) return (requires_sso, sso_is_valid)
[ "def", "_sso_params", "(", "member", ")", ":", "try", ":", "auth_provider", "=", "AuthProvider", ".", "objects", ".", "get", "(", "organization", "=", "member", ".", "organization_id", ")", "except", "AuthProvider", ".", "DoesNotExist", ":", "sso_is_valid", "=...
return a tuple of for a given member .
train
false
30,832
def get_precreated_dataset(precreated_datasets, name): names = [d.name for d in precreated_datasets] if (names.count(name) > 0): return precreated_datasets.pop(names.index(name)) else: return None
[ "def", "get_precreated_dataset", "(", "precreated_datasets", ",", "name", ")", ":", "names", "=", "[", "d", ".", "name", "for", "d", "in", "precreated_datasets", "]", "if", "(", "names", ".", "count", "(", "name", ")", ">", "0", ")", ":", "return", "pr...
return a dataset matching a name from the list of precreated datasets .
train
false
30,833
@utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) @utils.arg('secgroup', metavar='<secgroup>', help=_('Name or ID of Security Group.')) def do_add_secgroup(cs, args): server = _find_server(cs, args.server) server.add_security_group(args.secgroup)
[ "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "@", "utils", ".", "arg", "(", "'secgroup'", ",", "metavar", "=", "'<secgroup>'", ",", "help", "=", "_", "(",...
add a security group to a server .
train
false
30,835
def check_win_maxpath(folder): if sabnzbd.WIN32: for p in os.listdir(folder): if (len(os.path.join(folder, p)) > 259): return False return True
[ "def", "check_win_maxpath", "(", "folder", ")", ":", "if", "sabnzbd", ".", "WIN32", ":", "for", "p", "in", "os", ".", "listdir", "(", "folder", ")", ":", "if", "(", "len", "(", "os", ".", "path", ".", "join", "(", "folder", ",", "p", ")", ")", ...
return false if any file path in folder exceeds the windows maximum .
train
false