id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
25,962
def config_with_defaults(config=None): defaults_copy = configobj.ConfigObj(constants.RENEWER_DEFAULTS) defaults_copy.merge((config if (config is not None) else configobj.ConfigObj())) return defaults_copy
[ "def", "config_with_defaults", "(", "config", "=", "None", ")", ":", "defaults_copy", "=", "configobj", ".", "ConfigObj", "(", "constants", ".", "RENEWER_DEFAULTS", ")", "defaults_copy", ".", "merge", "(", "(", "config", "if", "(", "config", "is", "not", "No...
merge supplied config .
train
false
25,965
def tanhPrime(x): tmp = tanh(x) return (1 - (tmp * tmp))
[ "def", "tanhPrime", "(", "x", ")", ":", "tmp", "=", "tanh", "(", "x", ")", "return", "(", "1", "-", "(", "tmp", "*", "tmp", ")", ")" ]
derivative of tanh .
train
false
25,966
def rgb2hex(rgb): return ('#%02x%02x%02x' % tuple([round((val * 255)) for val in rgb]))
[ "def", "rgb2hex", "(", "rgb", ")", ":", "return", "(", "'#%02x%02x%02x'", "%", "tuple", "(", "[", "round", "(", "(", "val", "*", "255", ")", ")", "for", "val", "in", "rgb", "]", ")", ")" ]
given a len 3 rgb tuple of 0-1 floats .
train
false
25,967
def uncompyle_file(filename, outstream=None, showasm=0, showast=0, deob=0): (version, co) = _load_module(filename) uncompyle(version, co, outstream, showasm, showast, deob) co = None
[ "def", "uncompyle_file", "(", "filename", ",", "outstream", "=", "None", ",", "showasm", "=", "0", ",", "showast", "=", "0", ",", "deob", "=", "0", ")", ":", "(", "version", ",", "co", ")", "=", "_load_module", "(", "filename", ")", "uncompyle", "(",...
decompile python byte-code file .
train
false
25,968
def debugx(expr, pre_msg=''): cf = sys._getframe(1) print ('[DBG:%s] %s%s -> %r' % (cf.f_code.co_name, pre_msg, expr, eval(expr, cf.f_globals, cf.f_locals)))
[ "def", "debugx", "(", "expr", ",", "pre_msg", "=", "''", ")", ":", "cf", "=", "sys", ".", "_getframe", "(", "1", ")", "print", "(", "'[DBG:%s] %s%s -> %r'", "%", "(", "cf", ".", "f_code", ".", "co_name", ",", "pre_msg", ",", "expr", ",", "eval", "(...
print the value of an expression from the callers frame .
train
true
25,969
def has_extended_length(flags): return ((flags & _BGP_PA_EXTENDED_LENGTH) == _BGP_PA_EXTENDED_LENGTH)
[ "def", "has_extended_length", "(", "flags", ")", ":", "return", "(", "(", "flags", "&", "_BGP_PA_EXTENDED_LENGTH", ")", "==", "_BGP_PA_EXTENDED_LENGTH", ")" ]
used in bgppathattr to check if the extended-length flag is set .
train
false
25,970
def split_mongo_store_config(data_dir): modulestore_options = {'default_class': 'xmodule.raw_module.RawDescriptor', 'fs_root': data_dir, 'render_template': 'edxmako.shortcuts.render_to_string'} store = {'default': {'NAME': 'draft', 'ENGINE': 'xmodule.modulestore.split_mongo.split_draft.DraftVersioningModuleStore', 'DOC_STORE_CONFIG': {'host': MONGO_HOST, 'port': MONGO_PORT_NUM, 'db': 'test_xmodule_{}'.format(os.getpid()), 'collection': 'modulestore'}, 'OPTIONS': modulestore_options}} return store
[ "def", "split_mongo_store_config", "(", "data_dir", ")", ":", "modulestore_options", "=", "{", "'default_class'", ":", "'xmodule.raw_module.RawDescriptor'", ",", "'fs_root'", ":", "data_dir", ",", "'render_template'", ":", "'edxmako.shortcuts.render_to_string'", "}", "store...
defines split module store .
train
false
25,971
def StringIO(*args, **kw): global StringIO try: from cStringIO import StringIO except ImportError: from StringIO import StringIO return StringIO(*args, **kw)
[ "def", "StringIO", "(", "*", "args", ",", "**", "kw", ")", ":", "global", "StringIO", "try", ":", "from", "cStringIO", "import", "StringIO", "except", "ImportError", ":", "from", "StringIO", "import", "StringIO", "return", "StringIO", "(", "*", "args", ","...
thunk to load the real stringio on demand .
train
true
25,972
def Pmfs(pmfs, **options): for pmf in pmfs: Pmf(pmf, **options)
[ "def", "Pmfs", "(", "pmfs", ",", "**", "options", ")", ":", "for", "pmf", "in", "pmfs", ":", "Pmf", "(", "pmf", ",", "**", "options", ")" ]
plots a sequence of pmfs .
train
false
25,973
def ne(a, b): return (a != b)
[ "def", "ne", "(", "a", ",", "b", ")", ":", "return", "(", "a", "!=", "b", ")" ]
this just assumes that words in all caps or titles are named entities .
train
false
25,974
@utils.arg('--cell', metavar='<cell-name>', help=_('Name of the cell to get the capacities.'), default=None) def do_cell_capacities(cs, args): cell = cs.cells.capacities(args.cell) print((_('Ram Available: %s MB') % cell.capacities['ram_free']['total_mb'])) utils.print_dict(cell.capacities['ram_free']['units_by_mb'], dict_property='Ram(MB)', dict_value='Units') print((_('\nDisk Available: %s MB') % cell.capacities['disk_free']['total_mb'])) utils.print_dict(cell.capacities['disk_free']['units_by_mb'], dict_property='Disk(MB)', dict_value='Units')
[ "@", "utils", ".", "arg", "(", "'--cell'", ",", "metavar", "=", "'<cell-name>'", ",", "help", "=", "_", "(", "'Name of the cell to get the capacities.'", ")", ",", "default", "=", "None", ")", "def", "do_cell_capacities", "(", "cs", ",", "args", ")", ":", ...
get cell capacities for all cells or a given cell .
train
false
25,975
def shortcode_role(name, rawtext, text, lineno, inliner, options={}, content=[]): return ([docutils.nodes.raw(u'', text, format=u'html')], [])
[ "def", "shortcode_role", "(", "name", ",", "rawtext", ",", "text", ",", "lineno", ",", "inliner", ",", "options", "=", "{", "}", ",", "content", "=", "[", "]", ")", ":", "return", "(", "[", "docutils", ".", "nodes", ".", "raw", "(", "u''", ",", "...
a shortcode role that passes through raw inline html .
train
false
25,976
def do_populate(clear=False): scope_dict = oauth_scopes.public_scopes if clear: ApiOAuth2Scope.remove() for (name, scope) in scope_dict.iteritems(): if (scope.is_public is True): get_or_create(name, scope.description, save=True) else: logger.info('{} is not a publicly advertised scope; did not load into database'.format(name))
[ "def", "do_populate", "(", "clear", "=", "False", ")", ":", "scope_dict", "=", "oauth_scopes", ".", "public_scopes", "if", "clear", ":", "ApiOAuth2Scope", ".", "remove", "(", ")", "for", "(", "name", ",", "scope", ")", "in", "scope_dict", ".", "iteritems",...
given a dictionary of scope definitions .
train
false
25,978
def typed_dict(key_type, value_type, default): return (lambda setting: {key_type(StringConverter(key)): (value_type(StringConverter(value)) if (value != '') else default) for (key, value) in dict(setting).items()})
[ "def", "typed_dict", "(", "key_type", ",", "value_type", ",", "default", ")", ":", "return", "(", "lambda", "setting", ":", "{", "key_type", "(", "StringConverter", "(", "key", ")", ")", ":", "(", "value_type", "(", "StringConverter", "(", "value", ")", ...
creates a function that converts a setting into a dict with the given types .
train
false
25,979
def parse_hosts_line(line): (host, keytype, key) = line.strip().split(None, 3)[:3] fp_plain = hashlib.md5(b64decode(key)).hexdigest() fingerprint = u':'.join([(a + b) for (a, b) in zip(fp_plain[::2], fp_plain[1::2])]) if host.startswith(u'|1|'): host = _(u'[hostname hashed]') return (host, keytype, fingerprint)
[ "def", "parse_hosts_line", "(", "line", ")", ":", "(", "host", ",", "keytype", ",", "key", ")", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "None", ",", "3", ")", "[", ":", "3", "]", "fp_plain", "=", "hashlib", ".", "md5", "(", "b64...
parses single hosts line into tuple host .
train
false
25,980
def prettydt(dt): if (dt.utcoffset() >= timedelta(0)): offset = ('+%s' % (dt.utcoffset(),)) else: offset = ('-%s' % (((-1) * dt.utcoffset()),)) return ('%04d-%02d-%02d %02d:%02d:%02d %s %s' % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.tzname(), offset))
[ "def", "prettydt", "(", "dt", ")", ":", "if", "(", "dt", ".", "utcoffset", "(", ")", ">=", "timedelta", "(", "0", ")", ")", ":", "offset", "=", "(", "'+%s'", "%", "(", "dt", ".", "utcoffset", "(", ")", ",", ")", ")", "else", ":", "offset", "=...
datetime as a string using a known format .
train
false
25,982
def CDLSTICKSANDWICH(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDLSTICKSANDWICH)
[ "def", "CDLSTICKSANDWICH", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLSTICKSANDWICH", ")" ]
stick sandwich .
train
false
25,984
def _get_size_tput(): try: import subprocess proc = subprocess.Popen(['tput', 'cols'], stdin=subprocess.PIPE, stdout=subprocess.PIPE) output = proc.communicate(input=None) cols = int(output[0]) proc = subprocess.Popen(['tput', 'lines'], stdin=subprocess.PIPE, stdout=subprocess.PIPE) output = proc.communicate(input=None) rows = int(output[0]) return (cols, rows) except: return None
[ "def", "_get_size_tput", "(", ")", ":", "try", ":", "import", "subprocess", "proc", "=", "subprocess", ".", "Popen", "(", "[", "'tput'", ",", "'cols'", "]", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ")...
attempt to discover the dimensions of a terminal window .
train
true
25,985
def test_disarmed_weird(): nikola.utils.USE_SLUGIFY = False o = nikola.utils.slugify(u'Za\u017c\xf3\u0142\u0107 g\u0119\u015bl\u0105 ja\u017a\u0144!-123.456 "Hello World"?#H<e>l/l\\o:W\'o\rr*l DCTB d|!\n', lang=u'pl') assert (o == u'Za\u017c\xf3\u0142\u0107 g\u0119\u015bl\u0105 ja\u017a\u0144!-123.456 -Hello World---H-e-l-l-o-W-o-r-l-d-!-') assert isinstance(o, nikola.utils.unicode_str) nikola.utils.USE_SLUGIFY = True
[ "def", "test_disarmed_weird", "(", ")", ":", "nikola", ".", "utils", ".", "USE_SLUGIFY", "=", "False", "o", "=", "nikola", ".", "utils", ".", "slugify", "(", "u'Za\\u017c\\xf3\\u0142\\u0107 g\\u0119\\u015bl\\u0105 ja\\u017a\\u0144!-123.456 \"Hello World\"?#H<e>l/l\\\\o:W\\'o\...
test disarmed slugify with banned characters .
train
false
25,987
def cert_from_key_info_dict(key_info, ignore_age=False): res = [] if (not ('x509_data' in key_info)): return res for x509_data in key_info['x509_data']: x509_certificate = x509_data['x509_certificate'] cert = x509_certificate['text'].strip() cert = '\n'.join(split_len(''.join([s.strip() for s in cert.split()]), 64)) if (ignore_age or active_cert(cert)): res.append(cert) else: logger.info('Inactive cert') return res
[ "def", "cert_from_key_info_dict", "(", "key_info", ",", "ignore_age", "=", "False", ")", ":", "res", "=", "[", "]", "if", "(", "not", "(", "'x509_data'", "in", "key_info", ")", ")", ":", "return", "res", "for", "x509_data", "in", "key_info", "[", "'x509_...
get all x509 certs from a keyinfo dictionary .
train
true
25,988
def postorder_unrooted(graph): allnodes = set(graph.nodes()) visited = set() orders = [] def dfs_walk(node): visited.add(node) for succ in graph.successors(node): if (not (succ in visited)): dfs_walk(succ) orders[(-1)].append(node) while (len(allnodes) > len(visited)): remaining = (allnodes - visited) root = remaining.pop() orders.append([]) dfs_walk(root) return orders
[ "def", "postorder_unrooted", "(", "graph", ")", ":", "allnodes", "=", "set", "(", "graph", ".", "nodes", "(", ")", ")", "visited", "=", "set", "(", ")", "orders", "=", "[", "]", "def", "dfs_walk", "(", "node", ")", ":", "visited", ".", "add", "(", ...
unrooted post-order traversal of a graph .
train
false
25,989
def ax_normed_data(x1list, y1list, ax_ymax): unit = (ax_ymax / max(abs(y1list))) nxlist = [] nylist = [] for i in range(len(y1list)): if (y1list[i] != 0): nxlist.append(x1list[i]) nylist.append(y1list[i]) nylist = np.abs((np.array(nylist) * unit)) return (nxlist, nylist)
[ "def", "ax_normed_data", "(", "x1list", ",", "y1list", ",", "ax_ymax", ")", ":", "unit", "=", "(", "ax_ymax", "/", "max", "(", "abs", "(", "y1list", ")", ")", ")", "nxlist", "=", "[", "]", "nylist", "=", "[", "]", "for", "i", "in", "range", "(", ...
docstring for normed_data .
train
false
25,990
def _ensure_dir(directory): try: os.makedirs(directory) except OSError as exc: if (exc.errno == errno.EEXIST): pass else: raise
[ "def", "_ensure_dir", "(", "directory", ")", ":", "try", ":", "os", ".", "makedirs", "(", "directory", ")", "except", "OSError", "as", "exc", ":", "if", "(", "exc", ".", "errno", "==", "errno", ".", "EEXIST", ")", ":", "pass", "else", ":", "raise" ]
ensure that directory exists .
train
false
25,992
def saveit(func): name = ('_' + func.__name__) def _wrapper(self, *args, **kwds): if (not hasattr(self, name)): setattr(self, name, func(self, *args, **kwds)) return getattr(self, name) return _wrapper
[ "def", "saveit", "(", "func", ")", ":", "name", "=", "(", "'_'", "+", "func", ".", "__name__", ")", "def", "_wrapper", "(", "self", ",", "*", "args", ",", "**", "kwds", ")", ":", "if", "(", "not", "hasattr", "(", "self", ",", "name", ")", ")", ...
a decorator that caches the return value of a function .
train
true
25,993
def new(rsa_key): return PKCS115_SigScheme(rsa_key)
[ "def", "new", "(", "rsa_key", ")", ":", "return", "PKCS115_SigScheme", "(", "rsa_key", ")" ]
returns a form for a new genericimagedatasetjob .
train
false
25,994
def interpret_sents(inputs, grammar, semkey=u'SEM', trace=0): return [[(syn, root_semrep(syn, semkey)) for syn in syntrees] for syntrees in parse_sents(inputs, grammar, trace=trace)]
[ "def", "interpret_sents", "(", "inputs", ",", "grammar", ",", "semkey", "=", "u'SEM'", ",", "trace", "=", "0", ")", ":", "return", "[", "[", "(", "syn", ",", "root_semrep", "(", "syn", ",", "semkey", ")", ")", "for", "syn", "in", "syntrees", "]", "...
add the semantic representation to each syntactic parse tree of each input sentence .
train
false
25,996
def intTo8ByteArray(inValue): values = (((inValue >> 56) & 255), ((inValue >> 48) & 255), ((inValue >> 40) & 255), ((inValue >> 32) & 255), ((inValue >> 24) & 255), ((inValue >> 16) & 255), ((inValue >> 8) & 255), (inValue & 255)) s = struct.Struct('B B B B B B B B') packed_data = s.pack(*values) return packed_data
[ "def", "intTo8ByteArray", "(", "inValue", ")", ":", "values", "=", "(", "(", "(", "inValue", ">>", "56", ")", "&", "255", ")", ",", "(", "(", "inValue", ">>", "48", ")", "&", "255", ")", ",", "(", "(", "inValue", ">>", "40", ")", "&", "255", ...
converts an int to a packed byte array .
train
false
25,997
@docfiller def median_filter(input, size=None, footprint=None, output=None, mode='reflect', cval=0.0, origin=0): return _rank_filter(input, 0, size, footprint, output, mode, cval, origin, 'median')
[ "@", "docfiller", "def", "median_filter", "(", "input", ",", "size", "=", "None", ",", "footprint", "=", "None", ",", "output", "=", "None", ",", "mode", "=", "'reflect'", ",", "cval", "=", "0.0", ",", "origin", "=", "0", ")", ":", "return", "_rank_f...
calculates a multidimensional median filter .
train
false
25,998
def cmd_line(args): args = [str(x) for x in args] return ' '.join((pipes.quote(x) for x in args))
[ "def", "cmd_line", "(", "args", ")", ":", "args", "=", "[", "str", "(", "x", ")", "for", "x", "in", "args", "]", "return", "' '", ".", "join", "(", "(", "pipes", ".", "quote", "(", "x", ")", "for", "x", "in", "args", ")", ")" ]
build a command line that works in a shell .
train
false
26,000
def get_navigator_audit_log_dir(): return get_conf().get(_CONF_NAVIGATOR_AUDIT_LOG_DIR, '')
[ "def", "get_navigator_audit_log_dir", "(", ")", ":", "return", "get_conf", "(", ")", ".", "get", "(", "_CONF_NAVIGATOR_AUDIT_LOG_DIR", ",", "''", ")" ]
returns audit_event_log_dir .
train
false
26,001
def empty_asset_finder(): return tmp_asset_finder(equities=None)
[ "def", "empty_asset_finder", "(", ")", ":", "return", "tmp_asset_finder", "(", "equities", "=", "None", ")" ]
context manager for creating an empty asset finder .
train
false
26,003
def load_average(): return _load_average()
[ "def", "load_average", "(", ")", ":", "return", "_load_average", "(", ")" ]
return system load average as a triple .
train
false
26,004
def fileno(): if (not _state): raise RuntimeError('no active input()') return _state.fileno()
[ "def", "fileno", "(", ")", ":", "if", "(", "not", "_state", ")", ":", "raise", "RuntimeError", "(", "'no active input()'", ")", "return", "_state", ".", "fileno", "(", ")" ]
return the file number of the current file .
train
false
26,005
def from_rdata_list(name, ttl, rdatas): if isinstance(name, (str, unicode)): name = dns.name.from_text(name, None) if (len(rdatas) == 0): raise ValueError('rdata list must not be empty') r = None for rd in rdatas: if (r is None): r = RRset(name, rd.rdclass, rd.rdtype) r.update_ttl(ttl) first_time = False r.add(rd) return r
[ "def", "from_rdata_list", "(", "name", ",", "ttl", ",", "rdatas", ")", ":", "if", "isinstance", "(", "name", ",", "(", "str", ",", "unicode", ")", ")", ":", "name", "=", "dns", ".", "name", ".", "from_text", "(", "name", ",", "None", ")", "if", "...
create an rdataset with the specified ttl .
train
true
26,006
def filter_definition_names(names, origin, position=None): if (not names): return [] stmt = names[0].get_definition() scope = stmt.get_parent_scope() if (not (isinstance(scope, er.FunctionExecution) and isinstance(scope.base, er.LambdaWrapper))): names = filter_after_position(names, position) names = [name for name in names if name.is_definition()] for name in list(names): if (name.value.startswith('__') and (not name.value.endswith('__'))): if filter_private_variable(scope, origin): names.remove(name) return names
[ "def", "filter_definition_names", "(", "names", ",", "origin", ",", "position", "=", "None", ")", ":", "if", "(", "not", "names", ")", ":", "return", "[", "]", "stmt", "=", "names", "[", "0", "]", ".", "get_definition", "(", ")", "scope", "=", "stmt"...
filter names that are actual definitions in a scope .
train
false
26,007
def previous_week_day(base_date, weekday): day = (base_date - timedelta(days=1)) while (day.weekday() != weekday): day = (day - timedelta(days=1)) return day
[ "def", "previous_week_day", "(", "base_date", ",", "weekday", ")", ":", "day", "=", "(", "base_date", "-", "timedelta", "(", "days", "=", "1", ")", ")", "while", "(", "day", ".", "weekday", "(", ")", "!=", "weekday", ")", ":", "day", "=", "(", "day...
finds previous weekday .
train
true
26,008
def arg_parser(argv, globals): doc = globals['__doc__'] (description, epilog) = doc.split('\n', 1) parser = argparse.ArgumentParser(description=description, epilog=epilog, formatter_class=argparse.RawTextHelpFormatter) return parser
[ "def", "arg_parser", "(", "argv", ",", "globals", ")", ":", "doc", "=", "globals", "[", "'__doc__'", "]", "(", "description", ",", "epilog", ")", "=", "doc", ".", "split", "(", "'\\n'", ",", "1", ")", "parser", "=", "argparse", ".", "ArgumentParser", ...
build an argparser for this cli tool .
train
false
26,009
def has_previous_repository_reviews(app, repository, changeset_revision): repo = hg_util.get_repo_for_repository(app, repository=repository, repo_path=None, create=False) reviewed_revision_hashes = [review.changeset_revision for review in repository.reviews] for changeset in hg_util.reversed_upper_bounded_changelog(repo, changeset_revision): previous_changeset_revision = str(repo.changectx(changeset)) if (previous_changeset_revision in reviewed_revision_hashes): return True return False
[ "def", "has_previous_repository_reviews", "(", "app", ",", "repository", ",", "changeset_revision", ")", ":", "repo", "=", "hg_util", ".", "get_repo_for_repository", "(", "app", ",", "repository", "=", "repository", ",", "repo_path", "=", "None", ",", "create", ...
determine if a repository has a changeset revision review prior to the received changeset revision .
train
false
26,010
@doctest_depends_on(modules=('lxml',)) def apply_xsl(mml, xsl): from lxml import etree s = etree.XML(get_resource(xsl).read()) transform = etree.XSLT(s) doc = etree.XML(mml) result = transform(doc) s = str(result) return s
[ "@", "doctest_depends_on", "(", "modules", "=", "(", "'lxml'", ",", ")", ")", "def", "apply_xsl", "(", "mml", ",", "xsl", ")", ":", "from", "lxml", "import", "etree", "s", "=", "etree", ".", "XML", "(", "get_resource", "(", "xsl", ")", ".", "read", ...
apply a xsl to a mathml string .
train
false
26,012
def add_cli_options(): CONF.unregister_opt(sql_connection_opt) CONF.register_cli_opt(sql_connection_opt)
[ "def", "add_cli_options", "(", ")", ":", "CONF", ".", "unregister_opt", "(", "sql_connection_opt", ")", "CONF", ".", "register_cli_opt", "(", "sql_connection_opt", ")" ]
adds any configuration options that the db layer might have .
train
false
26,013
def _ps(osdata): grains = {} bsd_choices = ('FreeBSD', 'NetBSD', 'OpenBSD', 'MacOS') if (osdata['os'] in bsd_choices): grains['ps'] = 'ps auxwww' elif (osdata['os_family'] == 'Solaris'): grains['ps'] = '/usr/ucb/ps auxwww' elif (osdata['os'] == 'Windows'): grains['ps'] = 'tasklist.exe' elif (osdata.get('virtual', '') == 'openvzhn'): grains['ps'] = 'ps -fH -p $(grep -l "^envID:[[:space:]]*0\\$" /proc/[0-9]*/status | sed -e "s=/proc/\\([0-9]*\\)/.*=\\1=") | awk \'{ $7=""; print }\'' else: grains['ps'] = 'ps -efHww' return grains
[ "def", "_ps", "(", "osdata", ")", ":", "grains", "=", "{", "}", "bsd_choices", "=", "(", "'FreeBSD'", ",", "'NetBSD'", ",", "'OpenBSD'", ",", "'MacOS'", ")", "if", "(", "osdata", "[", "'os'", "]", "in", "bsd_choices", ")", ":", "grains", "[", "'ps'",...
return the ps grain .
train
true
26,014
def get_sessions_to_clear(user=None, keep_current=False, device=None): if (not user): user = frappe.session.user if (not device): device = (frappe.session.data.device or u'desktop') limit = 0 if (user == frappe.session.user): simultaneous_sessions = (frappe.db.get_value(u'User', user, u'simultaneous_sessions') or 1) limit = (simultaneous_sessions - 1) condition = u'' if keep_current: condition = u' and sid != "{0}"'.format(frappe.db.escape(frappe.session.sid)) return frappe.db.sql_list(u'select sid from tabSessions\n DCTB DCTB where user=%s and device=%s {condition}\n DCTB DCTB order by lastupdate desc limit {limit}, 100'.format(condition=condition, limit=limit), (user, device))
[ "def", "get_sessions_to_clear", "(", "user", "=", "None", ",", "keep_current", "=", "False", ",", "device", "=", "None", ")", ":", "if", "(", "not", "user", ")", ":", "user", "=", "frappe", ".", "session", ".", "user", "if", "(", "not", "device", ")"...
returns sessions of the current user .
train
false
26,015
def clean_partial_pipeline(request): name = setting('SOCIAL_AUTH_PARTIAL_PIPELINE_KEY', 'partial_pipeline') if (name in request.session): request.session.pop(name, None)
[ "def", "clean_partial_pipeline", "(", "request", ")", ":", "name", "=", "setting", "(", "'SOCIAL_AUTH_PARTIAL_PIPELINE_KEY'", ",", "'partial_pipeline'", ")", "if", "(", "name", "in", "request", ".", "session", ")", ":", "request", ".", "session", ".", "pop", "...
cleans any data for partial pipeline .
train
false
26,016
def test_reader_macro_error(): try: macroexpand(tokenize("(dispatch_reader_macro '- '())")[0], HyASTCompiler(__name__)) except HyTypeError as e: assert ('with the character `-`' in str(e))
[ "def", "test_reader_macro_error", "(", ")", ":", "try", ":", "macroexpand", "(", "tokenize", "(", "\"(dispatch_reader_macro '- '())\"", ")", "[", "0", "]", ",", "HyASTCompiler", "(", "__name__", ")", ")", "except", "HyTypeError", "as", "e", ":", "assert", "(",...
check if we get correct error with wrong dispatch character .
train
false
26,017
def current(sam=False): try: if sam: user_name = win32api.GetUserNameEx(win32con.NameSamCompatible) else: user_name = win32api.GetUserName() except pywintypes.error as exc: (number, context, message) = exc log.error('Failed to get current user') log.error('nbr: {0}'.format(number)) log.error('ctx: {0}'.format(context)) log.error('msg: {0}'.format(message)) return False if (not user_name): return False return user_name
[ "def", "current", "(", "sam", "=", "False", ")", ":", "try", ":", "if", "sam", ":", "user_name", "=", "win32api", ".", "GetUserNameEx", "(", "win32con", ".", "NameSamCompatible", ")", "else", ":", "user_name", "=", "win32api", ".", "GetUserName", "(", ")...
retrieve information on the logged-in user from plotly .
train
false
26,018
def native_stringify_dict(dct_or_tuples, encoding='utf-8', keys_only=True): d = {} for (k, v) in iteritems(dict(dct_or_tuples)): k = _to_native_str(k, encoding) if (not keys_only): if isinstance(v, dict): v = native_stringify_dict(v, encoding=encoding, keys_only=keys_only) elif isinstance(v, list): v = [_to_native_str(e, encoding) for e in v] else: v = _to_native_str(v, encoding) d[k] = v return d
[ "def", "native_stringify_dict", "(", "dct_or_tuples", ",", "encoding", "=", "'utf-8'", ",", "keys_only", "=", "True", ")", ":", "d", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "iteritems", "(", "dict", "(", "dct_or_tuples", ")", ")", ":", "...
return a dict with unicode keys of the given dict converted to strings .
train
false
26,019
def _get_tmp(): userdir = os.path.expanduser('~') for testdir in [tempfile.gettempdir(), os.path.join(userdir, '.cache'), os.path.join(userdir, '.tmp'), userdir]: if ((not os.path.exists(testdir)) or (not (_run_shell_command('echo success', testdir) == 'success'))): continue return testdir return ''
[ "def", "_get_tmp", "(", ")", ":", "userdir", "=", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", "for", "testdir", "in", "[", "tempfile", ".", "gettempdir", "(", ")", ",", "os", ".", "path", ".", "join", "(", "userdir", ",", "'.cache'", ")"...
find an executable tmp directory .
train
false
26,020
def get_serv_parms(service): import _winreg value = [] try: key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, (_SERVICE_KEY + service)) for n in xrange(_winreg.QueryInfoKey(key)[1]): (name, value, _val_type) = _winreg.EnumValue(key, n) if (name == _SERVICE_PARM): break _winreg.CloseKey(key) except WindowsError: pass for n in xrange(len(value)): value[n] = value[n] return value
[ "def", "get_serv_parms", "(", "service", ")", ":", "import", "_winreg", "value", "=", "[", "]", "try", ":", "key", "=", "_winreg", ".", "OpenKey", "(", "_winreg", ".", "HKEY_LOCAL_MACHINE", ",", "(", "_SERVICE_KEY", "+", "service", ")", ")", "for", "n", ...
get the service command line parameters from registry .
train
false
26,022
def read_actions(): while True: key = get_key() if (key in (const.KEY_UP, 'k')): (yield const.ACTION_PREVIOUS) elif (key in (const.KEY_DOWN, 'j')): (yield const.ACTION_NEXT) elif (key in (const.KEY_CTRL_C, 'q')): (yield const.ACTION_ABORT) elif (key in ('\n', '\r')): (yield const.ACTION_SELECT)
[ "def", "read_actions", "(", ")", ":", "while", "True", ":", "key", "=", "get_key", "(", ")", "if", "(", "key", "in", "(", "const", ".", "KEY_UP", ",", "'k'", ")", ")", ":", "(", "yield", "const", ".", "ACTION_PREVIOUS", ")", "elif", "(", "key", "...
yields actions for pressed keys .
train
true
26,023
def full2sparse(vec, eps=1e-09): vec = np.asarray(vec, dtype=float) nnz = np.nonzero((abs(vec) > eps))[0] return list(zip(nnz, vec.take(nnz)))
[ "def", "full2sparse", "(", "vec", ",", "eps", "=", "1e-09", ")", ":", "vec", "=", "np", ".", "asarray", "(", "vec", ",", "dtype", "=", "float", ")", "nnz", "=", "np", ".", "nonzero", "(", "(", "abs", "(", "vec", ")", ">", "eps", ")", ")", "["...
convert a dense np array into the sparse document format .
train
false
26,027
def _require_crypto_or_die(): if (not HAS_CRYPTO): raise CryptoUnavailableError('No crypto library available')
[ "def", "_require_crypto_or_die", "(", ")", ":", "if", "(", "not", "HAS_CRYPTO", ")", ":", "raise", "CryptoUnavailableError", "(", "'No crypto library available'", ")" ]
ensure we have a crypto library .
train
false
26,031
def remove_entrance_exam_graders(course_key, user): grading_model = CourseGradingModel.fetch(course_key) graders = grading_model.graders for (i, grader) in enumerate(graders): if (grader['type'] == GRADER_TYPES['ENTRANCE_EXAM']): CourseGradingModel.delete_grader(course_key, i, user)
[ "def", "remove_entrance_exam_graders", "(", "course_key", ",", "user", ")", ":", "grading_model", "=", "CourseGradingModel", ".", "fetch", "(", "course_key", ")", "graders", "=", "grading_model", ".", "graders", "for", "(", "i", ",", "grader", ")", "in", "enum...
removes existing entrance exam graders attached to the specified course typically used when adding/removing an entrance exam .
train
false
26,032
def credential(): s3.filter = (FS('person_id$human_resource.type') == 2) return s3db.hrm_credential_controller()
[ "def", "credential", "(", ")", ":", "s3", ".", "filter", "=", "(", "FS", "(", "'person_id$human_resource.type'", ")", "==", "2", ")", "return", "s3db", ".", "hrm_credential_controller", "(", ")" ]
credentials controller .
train
false
26,033
def compute_canonical_form(property_name, value): if isinstance(value, basestring): for (canonical_form, rexps) in properties_rexps[property_name].items(): for rexp in rexps: if rexp.match(value): return canonical_form return None
[ "def", "compute_canonical_form", "(", "property_name", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "basestring", ")", ":", "for", "(", "canonical_form", ",", "rexps", ")", "in", "properties_rexps", "[", "property_name", "]", ".", "items", ...
return the canonical form of a property given its type if it is a valid one .
train
false
26,034
def reset_stats(lbn, profile='default'): cmd = {'cmd': 'reset', 'mime': 'prop', 'w': lbn} return (_do_http(cmd, profile)['worker.result.type'] == 'OK')
[ "def", "reset_stats", "(", "lbn", ",", "profile", "=", "'default'", ")", ":", "cmd", "=", "{", "'cmd'", ":", "'reset'", ",", "'mime'", ":", "'prop'", ",", "'w'", ":", "lbn", "}", "return", "(", "_do_http", "(", "cmd", ",", "profile", ")", "[", "'wo...
reset all runtime statistics for the load balancer cli examples: .
train
false
26,035
def _matching_loader_thinks_module_is_package(loader, mod_name): if hasattr(loader, 'is_package'): return loader.is_package(mod_name) elif ((loader.__class__.__module__ == '_frozen_importlib') and (loader.__class__.__name__ == 'NamespaceLoader')): return True raise AttributeError(('%s.is_package() method is missing but is required by Flask of PEP 302 import hooks. If you do not use import hooks and you encounter this error please file a bug against Flask.' % loader.__class__.__name__))
[ "def", "_matching_loader_thinks_module_is_package", "(", "loader", ",", "mod_name", ")", ":", "if", "hasattr", "(", "loader", ",", "'is_package'", ")", ":", "return", "loader", ".", "is_package", "(", "mod_name", ")", "elif", "(", "(", "loader", ".", "__class_...
given the loader that loaded a module and the module this function attempts to figure out if the given module is actually a package .
train
false
26,036
def set_container_agent_enabled_on_node(node, enabled): if enabled: d = node.run_script('enable_service', 'flocker-container-agent') else: d = node.run_script('disable_service', 'flocker-container-agent') if (not enabled): d.addCallback((lambda _: node.reboot())) d.addCallback((lambda _: deferLater(reactor, 20, (lambda : None)))) d = d.addCallback((lambda _: verify_socket(node.public_address, 22))) d.addCallback((lambda _: loop_until(reactor, (lambda : is_process_running(node, 'flocker-dataset-agent'))))) d.addCallback((lambda _: node.run_script('disable_service', 'flocker-dataset-agent'))) d.addCallback((lambda _: node.run_script('enable_service', 'flocker-dataset-agent'))) d.addCallback((lambda _: loop_until(reactor, (lambda : is_process_running(node, 'flocker-dataset-agent'))))) d.addCallback((lambda _: None)) return d
[ "def", "set_container_agent_enabled_on_node", "(", "node", ",", "enabled", ")", ":", "if", "enabled", ":", "d", "=", "node", ".", "run_script", "(", "'enable_service'", ",", "'flocker-container-agent'", ")", "else", ":", "d", "=", "node", ".", "run_script", "(...
ensure the container agent is enabled/disabled as specified .
train
false
26,037
def var_propagate1_pre36(a, b): c = ((a if (a > b) else b) + 5) return c
[ "def", "var_propagate1_pre36", "(", "a", ",", "b", ")", ":", "c", "=", "(", "(", "a", "if", "(", "a", ">", "b", ")", "else", "b", ")", "+", "5", ")", "return", "c" ]
label 0: a = arg [a] b = arg [b] $0 .
train
false
26,039
def expandServices(service_elements): expanded = [] for service_element in service_elements: expanded.extend(expandService(service_element)) return expanded
[ "def", "expandServices", "(", "service_elements", ")", ":", "expanded", "=", "[", "]", "for", "service_element", "in", "service_elements", ":", "expanded", ".", "extend", "(", "expandService", "(", "service_element", ")", ")", "return", "expanded" ]
take a sorted iterator of service elements and expand it into a sorted iterator of: there may be more than one item in the resulting list for each service element if there is more than one uri or type for a service .
train
true
26,040
def get_logger_for_python_runner_action(action_name): logger_name = ('actions.python.%s' % action_name) logger = logging.getLogger(logger_name) console = stdlib_logging.StreamHandler() console.setLevel(stdlib_logging.DEBUG) formatter = stdlib_logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) logger.addHandler(console) logger.setLevel(stdlib_logging.DEBUG) return logger
[ "def", "get_logger_for_python_runner_action", "(", "action_name", ")", ":", "logger_name", "=", "(", "'actions.python.%s'", "%", "action_name", ")", "logger", "=", "logging", ".", "getLogger", "(", "logger_name", ")", "console", "=", "stdlib_logging", ".", "StreamHa...
set up a logger which logs all the messages with level debug and above to stderr .
train
false
26,041
def project_post_save(sender, instance, created, **kwargs): if (not created): return if instance._importing: return template = getattr(instance, 'creation_template', None) if (template is None): ProjectTemplate = apps.get_model('projects', 'ProjectTemplate') template = ProjectTemplate.objects.get(slug=settings.DEFAULT_PROJECT_TEMPLATE) if instance.tags: template.tags = instance.tags if instance.tags_colors: template.tags_colors = instance.tags_colors template.apply_to_project(instance) instance.save() Role = apps.get_model('users', 'Role') try: owner_role = instance.roles.get(slug=template.default_owner_role) except Role.DoesNotExist: owner_role = instance.roles.first() if owner_role: Membership = apps.get_model('projects', 'Membership') Membership.objects.create(user=instance.owner, project=instance, role=owner_role, is_admin=True, email=instance.owner.email)
[ "def", "project_post_save", "(", "sender", ",", "instance", ",", "created", ",", "**", "kwargs", ")", ":", "if", "(", "not", "created", ")", ":", "return", "if", "instance", ".", "_importing", ":", "return", "template", "=", "getattr", "(", "instance", "...
populate new project dependen default data .
train
false
26,042
def test_utils(): tmp_dir = _TempDir() tmp_fname = op.join(tmp_dir, 'test_gen_mne_locs.csv') _generate_mne_locs_file(tmp_fname) installed_locs = _load_mne_locs() generated_locs = _load_mne_locs(tmp_fname) assert_equal(set(installed_locs.keys()), set(generated_locs.keys())) for key in installed_locs.keys(): assert_allclose(installed_locs[key], generated_locs[key], atol=1e-07)
[ "def", "test_utils", "(", ")", ":", "tmp_dir", "=", "_TempDir", "(", ")", "tmp_fname", "=", "op", ".", "join", "(", "tmp_dir", ",", "'test_gen_mne_locs.csv'", ")", "_generate_mne_locs_file", "(", "tmp_fname", ")", "installed_locs", "=", "_load_mne_locs", "(", ...
test artemis123 utils .
train
false
26,043
def test_choose_port_returns_an_open_port(): app = OnionShare() app.choose_port() socket.socket().bind(('127.0.0.1', app.port))
[ "def", "test_choose_port_returns_an_open_port", "(", ")", ":", "app", "=", "OnionShare", "(", ")", "app", ".", "choose_port", "(", ")", "socket", ".", "socket", "(", ")", ".", "bind", "(", "(", "'127.0.0.1'", ",", "app", ".", "port", ")", ")" ]
choose_port() returns an open port .
train
false
26,044
def generate_package(target, source, env): target = str(target[0]) sources_dir = (target + '.sources') suffix = env['PACKAGESUFFIX'] if (suffix == 'zip'): return _generate_zip_package(target, source, sources_dir) else: return _generate_tar_package(target, source, sources_dir, suffix)
[ "def", "generate_package", "(", "target", ",", "source", ",", "env", ")", ":", "target", "=", "str", "(", "target", "[", "0", "]", ")", "sources_dir", "=", "(", "target", "+", "'.sources'", ")", "suffix", "=", "env", "[", "'PACKAGESUFFIX'", "]", "if", ...
generate a package containing all of the source files .
train
false
26,045
def _activities_from_datasets_followed_by_user_query(user_id, limit): import ckan.model as model follower_objects = model.UserFollowingDataset.followee_list(user_id) if (not follower_objects): return model.Session.query(model.Activity).filter('0=1') return _activities_union_all(*[_activities_limit(_package_activity_query(follower.object_id), limit) for follower in follower_objects])
[ "def", "_activities_from_datasets_followed_by_user_query", "(", "user_id", ",", "limit", ")", ":", "import", "ckan", ".", "model", "as", "model", "follower_objects", "=", "model", ".", "UserFollowingDataset", ".", "followee_list", "(", "user_id", ")", "if", "(", "...
return a query for all activities from datasets that user_id follows .
train
false
26,046
@handle_response_format @treeio_login_required @module_admin_required() def index_modules(request, response_format='html'): modules = Module.objects.all().order_by('title') return render_to_response('core/administration/index_modules', {'modules': modules}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "@", "module_admin_required", "(", ")", "def", "index_modules", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "modules", "=", "Module", ".", "objects", ".", "all", "(", ")", ".", "...
return list of all exploits modules .
train
false
26,048
def transform_params_constraint(params, Sinv, R, q): rsr = R.dot(Sinv).dot(R.T) reduction = Sinv.dot(R.T).dot(np.linalg.solve(rsr, (R.dot(params) - q))) return (params - reduction)
[ "def", "transform_params_constraint", "(", "params", ",", "Sinv", ",", "R", ",", "q", ")", ":", "rsr", "=", "R", ".", "dot", "(", "Sinv", ")", ".", "dot", "(", "R", ".", "T", ")", "reduction", "=", "Sinv", ".", "dot", "(", "R", ".", "T", ")", ...
find the parameters that statisfy linear constraint from unconstraint the linear constraint r params = q is imposed .
train
false
26,050
def is_blkdev(name): name = os.path.expanduser(name) stat_structure = None try: stat_structure = os.stat(name) except OSError as exc: if (exc.errno == errno.ENOENT): return False else: raise return stat.S_ISBLK(stat_structure.st_mode)
[ "def", "is_blkdev", "(", "name", ")", ":", "name", "=", "os", ".", "path", ".", "expanduser", "(", "name", ")", "stat_structure", "=", "None", "try", ":", "stat_structure", "=", "os", ".", "stat", "(", "name", ")", "except", "OSError", "as", "exc", "...
check if a file exists and is a block device .
train
true
26,051
@bdd.given('I clean up open tabs') def clean_open_tabs(quteproc): quteproc.set_setting('tabs', 'last-close', 'blank') quteproc.send_cmd(':window-only') quteproc.send_cmd(':tab-only') quteproc.send_cmd(':tab-close')
[ "@", "bdd", ".", "given", "(", "'I clean up open tabs'", ")", "def", "clean_open_tabs", "(", "quteproc", ")", ":", "quteproc", ".", "set_setting", "(", "'tabs'", ",", "'last-close'", ",", "'blank'", ")", "quteproc", ".", "send_cmd", "(", "':window-only'", ")",...
clean up open windows and tabs .
train
false
26,052
def _media_file_to_dict(mf, d): for fld in mf.readable_fields(): if ('art' == fld): continue val = getattr(mf, fld) if (val is None): val = '' if _verify_var_type(val): d[('$' + fld)] = _as_str(val)
[ "def", "_media_file_to_dict", "(", "mf", ",", "d", ")", ":", "for", "fld", "in", "mf", ".", "readable_fields", "(", ")", ":", "if", "(", "'art'", "==", "fld", ")", ":", "continue", "val", "=", "getattr", "(", "mf", ",", "fld", ")", "if", "(", "va...
populate dict with tags read from media file .
train
false
26,053
def getsid(name): return info(name)['sid']
[ "def", "getsid", "(", "name", ")", ":", "return", "info", "(", "name", ")", "[", "'sid'", "]" ]
return the sid for this windows service args: name : the name of the service for which to return the sid returns: str: a string representing the sid for the service cli example: .
train
false
26,054
def dmp_ff_prs_gcd(f, g, u, K): if (not u): return dup_ff_prs_gcd(f, g, K) result = _dmp_ff_trivial_gcd(f, g, u, K) if (result is not None): return result (fc, F) = dmp_primitive(f, u, K) (gc, G) = dmp_primitive(g, u, K) h = dmp_subresultants(F, G, u, K)[(-1)] (c, _, _) = dmp_ff_prs_gcd(fc, gc, (u - 1), K) (_, h) = dmp_primitive(h, u, K) h = dmp_mul_term(h, c, 0, u, K) h = dmp_ground_monic(h, u, K) cff = dmp_quo(f, h, u, K) cfg = dmp_quo(g, h, u, K) return (h, cff, cfg)
[ "def", "dmp_ff_prs_gcd", "(", "f", ",", "g", ",", "u", ",", "K", ")", ":", "if", "(", "not", "u", ")", ":", "return", "dup_ff_prs_gcd", "(", "f", ",", "g", ",", "K", ")", "result", "=", "_dmp_ff_trivial_gcd", "(", "f", ",", "g", ",", "u", ",", ...
computes polynomial gcd using subresultants over a field .
train
false
26,057
def _get_full_stream(stream_name, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) r = {} stream = _get_basic_stream(stream_name, conn)['result'] full_stream = stream while stream['StreamDescription']['HasMoreShards']: stream = _execute_with_retries(conn, 'describe_stream', StreamName=stream_name, ExclusiveStartShardId=stream['StreamDescription']['Shards'][(-1)]['ShardId']) stream = stream['result'] full_stream['StreamDescription']['Shards'] += stream['StreamDescription']['Shards'] r['result'] = full_stream return r
[ "def", "_get_full_stream", "(", "stream_name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",",...
get complete stream info from aws .
train
true
26,058
def provision_as_root(node, package_source, variants=()): commands = [] commands.append(run_remotely(username='root', address=node.address, commands=provision(package_source=package_source, distribution=node.distribution, variants=variants))) return sequence(commands)
[ "def", "provision_as_root", "(", "node", ",", "package_source", ",", "variants", "=", "(", ")", ")", ":", "commands", "=", "[", "]", "commands", ".", "append", "(", "run_remotely", "(", "username", "=", "'root'", ",", "address", "=", "node", ".", "addres...
provision flocker on a node using the root user .
train
false
26,060
def has_labels(dataset_dir, filename=LABELS_FILENAME): return tf.gfile.Exists(os.path.join(dataset_dir, filename))
[ "def", "has_labels", "(", "dataset_dir", ",", "filename", "=", "LABELS_FILENAME", ")", ":", "return", "tf", ".", "gfile", ".", "Exists", "(", "os", ".", "path", ".", "join", "(", "dataset_dir", ",", "filename", ")", ")" ]
specifies whether or not the dataset directory contains a label map file .
train
false
26,061
def page2hidden_page(page): page.status = 'hidden' return page
[ "def", "page2hidden_page", "(", "page", ")", ":", "page", ".", "status", "=", "'hidden'", "return", "page" ]
transform a page to a hidden page .
train
false
26,062
def getPlusMinusSign(number): if (number >= 0.0): return 1.0 return (-1.0)
[ "def", "getPlusMinusSign", "(", "number", ")", ":", "if", "(", "number", ">=", "0.0", ")", ":", "return", "1.0", "return", "(", "-", "1.0", ")" ]
get one if the number is zero or positive else negative one .
train
false
26,063
def _ls_spark_task_logs(fs, log_dir_stream, application_id=None, job_id=None): stderr_logs = [] key_to_stdout_log = {} for match in _ls_logs(fs, log_dir_stream, _match_task_log_path, application_id=application_id, job_id=job_id): if (match['log_type'] == 'stderr'): stderr_logs.append(match) elif (match['log_type'] == 'stdout'): key_to_stdout_log[_log_key(match)] = match for stderr_log in stderr_logs: stdout_log = key_to_stdout_log.get(_log_key(stderr_log)) if stdout_log: stderr_log['stdout'] = stdout_log return stderr_logs
[ "def", "_ls_spark_task_logs", "(", "fs", ",", "log_dir_stream", ",", "application_id", "=", "None", ",", "job_id", "=", "None", ")", ":", "stderr_logs", "=", "[", "]", "key_to_stdout_log", "=", "{", "}", "for", "match", "in", "_ls_logs", "(", "fs", ",", ...
yield matching spark logs .
train
false
26,064
def assemble_results(input_averages, input_variances, baseline_error, errortype, ntree): results = ['Model Random Forests', ('Error type %s' % errortype)] ave_error = float(mean(input_averages)) if (errortype in ['cv5', 'cv10']): ave_stdev = pooled_standard_deviation(input_variances) est_error = ('%s +/- %s' % (ave_error, ave_stdev)) est_error_line = ' DCTB '.join(['Estimated Error (mean +/- s.d)', est_error]) elif (errortype in ['oob', 'loo']): est_error_line = ' DCTB '.join(['Estimated Error (mean)', str(ave_error)]) results.append(est_error_line) results.append(' DCTB '.join(['Baseline Error (for random guessing', str(baseline_error)])) ratio = calc_baseline_error_to_observed_error(baseline_error, ave_error) results.append(' DCTB '.join(['Ratio baseline error to observed error', str(ratio)])) results.append(' DCTB '.join(['Number of trees', str(ntree)])) return results
[ "def", "assemble_results", "(", "input_averages", ",", "input_variances", ",", "baseline_error", ",", "errortype", ",", "ntree", ")", ":", "results", "=", "[", "'Model Random Forests'", ",", "(", "'Error type %s'", "%", "errortype", ")", "]", "ave_error", "="...
the summary format below is done on the r backend .
train
false
26,065
def units_func(f): return double_output(f, [c_void_p, POINTER(c_char_p)], strarg=True)
[ "def", "units_func", "(", "f", ")", ":", "return", "double_output", "(", "f", ",", "[", "c_void_p", ",", "POINTER", "(", "c_char_p", ")", "]", ",", "strarg", "=", "True", ")" ]
creates a ctypes function prototype for osr units functions .
train
false
26,066
@must_have_permission(ADMIN) @must_be_branched_from_node def edit_draft_registration_page(auth, node, draft, **kwargs): check_draft_state(draft) ret = project_utils.serialize_node(node, auth, primary=True) ret['draft'] = serialize_draft_registration(draft, auth) return ret
[ "@", "must_have_permission", "(", "ADMIN", ")", "@", "must_be_branched_from_node", "def", "edit_draft_registration_page", "(", "auth", ",", "node", ",", "draft", ",", "**", "kwargs", ")", ":", "check_draft_state", "(", "draft", ")", "ret", "=", "project_utils", ...
draft registration editor :return: serialized draftregistration :rtype: dict .
train
false
26,069
def test_change_mutable_default(): class MutableTester(XBlock, ): 'Test class with mutable fields.' list_field = List() field_data_a = DictFieldData({}) mutable_test_a = MutableTester(TestRuntime(services={'field-data': field_data_a}), scope_ids=Mock(spec=ScopeIds)) field_data_b = DictFieldData({}) mutable_test_b = MutableTester(TestRuntime(services={'field-data': field_data_b}), scope_ids=Mock(spec=ScopeIds)) mutable_test_a.list_field mutable_test_a.save() with assert_raises(KeyError): field_data_a.get(mutable_test_a, 'list_field') mutable_test_a.list_field.append(1) mutable_test_a.save() assert_equals([1], field_data_a.get(mutable_test_a, 'list_field')) with assert_raises(KeyError): field_data_b.get(mutable_test_b, 'list_field')
[ "def", "test_change_mutable_default", "(", ")", ":", "class", "MutableTester", "(", "XBlock", ",", ")", ":", "list_field", "=", "List", "(", ")", "field_data_a", "=", "DictFieldData", "(", "{", "}", ")", "mutable_test_a", "=", "MutableTester", "(", "TestRuntim...
ensure that mutating the default value for a field causes the changes to be saved .
train
false
26,071
def inception_v1(inputs, num_classes=1000, is_training=True, dropout_keep_prob=0.8, prediction_fn=slim.softmax, spatial_squeeze=True, reuse=None, scope='InceptionV1'): with tf.variable_scope(scope, 'InceptionV1', [inputs, num_classes], reuse=reuse) as scope: with slim.arg_scope([slim.batch_norm, slim.dropout], is_training=is_training): (net, end_points) = inception_v1_base(inputs, scope=scope) with tf.variable_scope('Logits'): net = slim.avg_pool2d(net, [7, 7], stride=1, scope='MaxPool_0a_7x7') net = slim.dropout(net, dropout_keep_prob, scope='Dropout_0b') logits = slim.conv2d(net, num_classes, [1, 1], activation_fn=None, normalizer_fn=None, scope='Conv2d_0c_1x1') if spatial_squeeze: logits = tf.squeeze(logits, [1, 2], name='SpatialSqueeze') end_points['Logits'] = logits end_points['Predictions'] = prediction_fn(logits, scope='Predictions') return (logits, end_points)
[ "def", "inception_v1", "(", "inputs", ",", "num_classes", "=", "1000", ",", "is_training", "=", "True", ",", "dropout_keep_prob", "=", "0.8", ",", "prediction_fn", "=", "slim", ".", "softmax", ",", "spatial_squeeze", "=", "True", ",", "reuse", "=", "None", ...
defines the inception v1 architecture .
train
false
26,072
@register.filter(is_safe=True) def apnumber(value): try: value = int(value) except (TypeError, ValueError): return value if (not (0 < value < 10)): return value return (_(u'one'), _(u'two'), _(u'three'), _(u'four'), _(u'five'), _(u'six'), _(u'seven'), _(u'eight'), _(u'nine'))[(value - 1)]
[ "@", "register", ".", "filter", "(", "is_safe", "=", "True", ")", "def", "apnumber", "(", "value", ")", ":", "try", ":", "value", "=", "int", "(", "value", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "return", "value", "if", "(", ...
for numbers 1-9 .
train
false
26,074
def mxlookup(name): if (Base.defaults['server'] == []): Base.DiscoverNameServers() a = Base.DnsRequest(name, qtype='mx').req().answers l = sorted(map((lambda x: x['data']), a)) return l
[ "def", "mxlookup", "(", "name", ")", ":", "if", "(", "Base", ".", "defaults", "[", "'server'", "]", "==", "[", "]", ")", ":", "Base", ".", "DiscoverNameServers", "(", ")", "a", "=", "Base", ".", "DnsRequest", "(", "name", ",", "qtype", "=", "'mx'",...
convenience routine for doing an mx lookup of a name .
train
false
26,075
def time_utcnow(): return datetime.now(UTC)
[ "def", "time_utcnow", "(", ")", ":", "return", "datetime", ".", "now", "(", "UTC", ")" ]
returns a timezone aware utc timestamp .
train
false
26,076
def calculate_username(email): email = email.split('@')[0] username = re.sub('[^\\w.@+-]', '-', email) username = username[:settings.USERNAME_MAX_LENGTH] suggested_username = username count = 0 while User.objects.filter(username=suggested_username).exists(): count += 1 suggested_username = ('%s%d' % (username, count)) if (len(suggested_username) > settings.USERNAME_MAX_LENGTH): return base64.urlsafe_b64encode(hashlib.sha1(email).digest()).rstrip('=') return suggested_username
[ "def", "calculate_username", "(", "email", ")", ":", "email", "=", "email", ".", "split", "(", "'@'", ")", "[", "0", "]", "username", "=", "re", ".", "sub", "(", "'[^\\\\w.@+-]'", ",", "'-'", ",", "email", ")", "username", "=", "username", "[", ":", ...
calculate username from email address .
train
false
26,077
def collect_set(option, opt_str, value, parser): assert (value is None) value = set([]) for arg in parser.rargs: if (arg[:1] == '-'): break value.add(arg) del parser.rargs[:len(value)] setattr(parser.values, option.dest, value)
[ "def", "collect_set", "(", "option", ",", "opt_str", ",", "value", ",", "parser", ")", ":", "assert", "(", "value", "is", "None", ")", "value", "=", "set", "(", "[", "]", ")", "for", "arg", "in", "parser", ".", "rargs", ":", "if", "(", "arg", "["...
collect multiple option values into a single set .
train
false
26,079
@pytest.mark.parametrize('scorer,processor', scorers_processors()) @given(data=st.data()) @settings(max_examples=100) def test_identical_strings_extracted(scorer, processor, data): strings = data.draw(st.lists(st.text(min_size=10, max_size=100), min_size=1, max_size=50)) choiceidx = data.draw(st.integers(min_value=0, max_value=(len(strings) - 1))) choice = strings[choiceidx] assume((processor(choice) != '')) result = process.extractBests(choice, strings, scorer=scorer, processor=processor, score_cutoff=100, limit=None) assert (result != []) assert ((choice, 100) in result)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'scorer,processor'", ",", "scorers_processors", "(", ")", ")", "@", "given", "(", "data", "=", "st", ".", "data", "(", ")", ")", "@", "settings", "(", "max_examples", "=", "100", ")", "def", "test_i...
test that identical strings will always return a perfect match .
train
false
26,080
def _conf_save_filters(): config = __salt__['config.option']('iptables.save_filters', []) return config
[ "def", "_conf_save_filters", "(", ")", ":", "config", "=", "__salt__", "[", "'config.option'", "]", "(", "'iptables.save_filters'", ",", "[", "]", ")", "return", "config" ]
return array of strings from save_filters in config .
train
false
26,081
def filter_headers_key(remove_headers): def filter(headers): return filter_headers(headers, remove_headers) return filter
[ "def", "filter_headers_key", "(", "remove_headers", ")", ":", "def", "filter", "(", "headers", ")", ":", "return", "filter_headers", "(", "headers", ",", "remove_headers", ")", "return", "filter" ]
returns a key function that produces a key by removing headers from a dict .
train
false
26,082
def NewClientLogUrl(client, obj_store, user_id, device_id, request, callback): kwargs = {'user_id': user_id, 'device_id': device_id, 'timestamp': request['timestamp'], 'client_log_id': request['client_log_id']} if ('content_type' in request): kwargs['content_type'] = request['content_type'] if ('content_md5' in request): kwargs['content_md5'] = request['content_md5'] if ('num_bytes' in request): kwargs['max_bytes'] = request['num_bytes'] logging.info(('GET NEW CLIENT LOG URL: user: %d, device: %d, client log id: %s' % (user_id, device_id, request['client_log_id']))) response = {'client_log_put_url': ClientLog.GetPutUrl(**kwargs)} callback(response)
[ "def", "NewClientLogUrl", "(", "client", ",", "obj_store", ",", "user_id", ",", "device_id", ",", "request", ",", "callback", ")", ":", "kwargs", "=", "{", "'user_id'", ":", "user_id", ",", "'device_id'", ":", "device_id", ",", "'timestamp'", ":", "request",...
gets an s3 put url for clients to write mobile device logs .
train
false
26,083
def write_AlignIO_protein(): assert (1 == AlignIO.convert('Clustalw/hedgehog.aln', 'clustal', 'Phylip/hedgehog.phy', 'phylip'))
[ "def", "write_AlignIO_protein", "(", ")", ":", "assert", "(", "1", "==", "AlignIO", ".", "convert", "(", "'Clustalw/hedgehog.aln'", ",", "'clustal'", ",", "'Phylip/hedgehog.phy'", ",", "'phylip'", ")", ")" ]
convert hedgehog .
train
false
26,084
def HandleException(desc, exception): args = (((str(desc) + '\n') + str(exception)), 'An exception occurred!') thread.start_new_thread(Forms.MessageBox.Show, args)
[ "def", "HandleException", "(", "desc", ",", "exception", ")", ":", "args", "=", "(", "(", "(", "str", "(", "desc", ")", "+", "'\\n'", ")", "+", "str", "(", "exception", ")", ")", ",", "'An exception occurred!'", ")", "thread", ".", "start_new_thread", ...
this pops up windows .
train
false
26,085
def GetRevisionsSample(): client = CreateClient() for entry in client.GetResources(limit=55).entry: revisions = client.GetRevisions(entry) for revision in revisions.entry: print revision.publish, revision.GetPublishLink()
[ "def", "GetRevisionsSample", "(", ")", ":", "client", "=", "CreateClient", "(", ")", "for", "entry", "in", "client", ".", "GetResources", "(", "limit", "=", "55", ")", ".", "entry", ":", "revisions", "=", "client", ".", "GetRevisions", "(", "entry", ")",...
get the revision history for resources .
train
false
26,087
def ping_default_gateway(): network = open('/etc/sysconfig/network') m = re.search('GATEWAY=(\\S+)', network.read()) if m: gw = m.group(1) cmd = ('ping %s -c 5 > /dev/null' % gw) return utils.system(cmd, ignore_status=True) raise error.TestError('Unable to find default gateway')
[ "def", "ping_default_gateway", "(", ")", ":", "network", "=", "open", "(", "'/etc/sysconfig/network'", ")", "m", "=", "re", ".", "search", "(", "'GATEWAY=(\\\\S+)'", ",", "network", ".", "read", "(", ")", ")", "if", "m", ":", "gw", "=", "m", ".", "grou...
ping the default gateway .
train
false
26,088
def S_white(x, d): r = np.ones(d.shape[1]) return aggregate_cov(x, d, r=r, weights=None)
[ "def", "S_white", "(", "x", ",", "d", ")", ":", "r", "=", "np", ".", "ones", "(", "d", ".", "shape", "[", "1", "]", ")", "return", "aggregate_cov", "(", "x", ",", "d", ",", "r", "=", "r", ",", "weights", "=", "None", ")" ]
simple white heteroscedasticity robust covariance note: calculating this way is very inefficient .
train
false
26,089
def connect_codedeploy(aws_access_key_id=None, aws_secret_access_key=None, **kwargs): from boto.codedeploy.layer1 import CodeDeployConnection return CodeDeployConnection(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, **kwargs)
[ "def", "connect_codedeploy", "(", "aws_access_key_id", "=", "None", ",", "aws_secret_access_key", "=", "None", ",", "**", "kwargs", ")", ":", "from", "boto", ".", "codedeploy", ".", "layer1", "import", "CodeDeployConnection", "return", "CodeDeployConnection", "(", ...
connect to aws codedeploy :type aws_access_key_id: string .
train
false
26,090
@_ignore_inferred def infer_returned_object(pyfunction, args): object_info = pyfunction.pycore.object_info result = object_info.get_exact_returned(pyfunction, args) if (result is not None): return result result = _infer_returned(pyfunction, args) if (result is not None): if (args and (pyfunction.get_module().get_resource() is not None)): params = args.get_arguments(pyfunction.get_param_names(special_args=False)) object_info.function_called(pyfunction, params, result) return result result = object_info.get_returned(pyfunction, args) if (result is not None): return result type_ = hint_return(pyfunction) if (type_ is not None): return rope.base.pyobjects.PyObject(type_)
[ "@", "_ignore_inferred", "def", "infer_returned_object", "(", "pyfunction", ",", "args", ")", ":", "object_info", "=", "pyfunction", ".", "pycore", ".", "object_info", "result", "=", "object_info", ".", "get_exact_returned", "(", "pyfunction", ",", "args", ")", ...
infer the pyobject this pyfunction returns after calling .
train
true
26,092
def _brick_get_connector_properties_error(multipath=False, enforce_multipath=False): connector = dict(DEFAULT_CONNECTOR) del connector['wwpns'] return connector
[ "def", "_brick_get_connector_properties_error", "(", "multipath", "=", "False", ",", "enforce_multipath", "=", "False", ")", ":", "connector", "=", "dict", "(", "DEFAULT_CONNECTOR", ")", "del", "connector", "[", "'wwpns'", "]", "return", "connector" ]
return an incomplete connector object .
train
false