id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
49,657
def _path_info(): return {'config': standarddir.config(), 'data': standarddir.data(), 'system_data': standarddir.system_data(), 'cache': standarddir.cache(), 'download': standarddir.download(), 'runtime': standarddir.runtime()}
[ "def", "_path_info", "(", ")", ":", "return", "{", "'config'", ":", "standarddir", ".", "config", "(", ")", ",", "'data'", ":", "standarddir", ".", "data", "(", ")", ",", "'system_data'", ":", "standarddir", ".", "system_data", "(", ")", ",", "'cache'", ":", "standarddir", ".", "cache", "(", ")", ",", "'download'", ":", "standarddir", ".", "download", "(", ")", ",", "'runtime'", ":", "standarddir", ".", "runtime", "(", ")", "}" ]
get info about important path names .
train
false
49,658
def test_ast_valid_unary_op(): can_compile(u'(not 2)') can_compile(u'(~ 1)')
[ "def", "test_ast_valid_unary_op", "(", ")", ":", "can_compile", "(", "u'(not 2)'", ")", "can_compile", "(", "u'(~ 1)'", ")" ]
make sure ast can compile valid unary operator .
train
false
49,659
def checkIntegrity(): logger.debug('running code integrity check') retVal = True for (checksum, _) in (re.split('\\s+', _) for _ in getFileItems(paths.CHECKSUM_MD5)): path = os.path.normpath(os.path.join(paths.SQLMAP_ROOT_PATH, _)) if (not os.path.isfile(path)): logger.error(("missing file detected '%s'" % path)) retVal = False elif (hashlib.md5(open(path, 'rb').read()).hexdigest() != checksum): logger.error(("wrong checksum of file '%s' detected" % path)) retVal = False return retVal
[ "def", "checkIntegrity", "(", ")", ":", "logger", ".", "debug", "(", "'running code integrity check'", ")", "retVal", "=", "True", "for", "(", "checksum", ",", "_", ")", "in", "(", "re", ".", "split", "(", "'\\\\s+'", ",", "_", ")", "for", "_", "in", "getFileItems", "(", "paths", ".", "CHECKSUM_MD5", ")", ")", ":", "path", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "paths", ".", "SQLMAP_ROOT_PATH", ",", "_", ")", ")", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "path", ")", ")", ":", "logger", ".", "error", "(", "(", "\"missing file detected '%s'\"", "%", "path", ")", ")", "retVal", "=", "False", "elif", "(", "hashlib", ".", "md5", "(", "open", "(", "path", ",", "'rb'", ")", ".", "read", "(", ")", ")", ".", "hexdigest", "(", ")", "!=", "checksum", ")", ":", "logger", ".", "error", "(", "(", "\"wrong checksum of file '%s' detected\"", "%", "path", ")", ")", "retVal", "=", "False", "return", "retVal" ]
checks integrity of code files during the unhandled exceptions .
train
false
49,660
def email_alerts(action): if action: return __execute_cmd('config -g cfgEmailAlert -o cfgEmailAlertEnable -i 1 1') else: return __execute_cmd('config -g cfgEmailAlert -o cfgEmailAlertEnable -i 1 0')
[ "def", "email_alerts", "(", "action", ")", ":", "if", "action", ":", "return", "__execute_cmd", "(", "'config -g cfgEmailAlert -o cfgEmailAlertEnable -i 1 1'", ")", "else", ":", "return", "__execute_cmd", "(", "'config -g cfgEmailAlert -o cfgEmailAlertEnable -i 1 0'", ")" ]
enable/disable email alerts cli example: .
train
false
49,661
def _do(name, fun, path=None): host = find_guest(name, quiet=True, path=path) if (not host): return False client = salt.client.get_local_client(__opts__['conf_file']) cmd_ret = client.cmd_iter(host, 'lxc.{0}'.format(fun), [name], kwarg={'path': path}, timeout=60) data = next(cmd_ret) data = data.get(host, {}).get('ret', None) if data: data = {host: data} return data
[ "def", "_do", "(", "name", ",", "fun", ",", "path", "=", "None", ")", ":", "host", "=", "find_guest", "(", "name", ",", "quiet", "=", "True", ",", "path", "=", "path", ")", "if", "(", "not", "host", ")", ":", "return", "False", "client", "=", "salt", ".", "client", ".", "get_local_client", "(", "__opts__", "[", "'conf_file'", "]", ")", "cmd_ret", "=", "client", ".", "cmd_iter", "(", "host", ",", "'lxc.{0}'", ".", "format", "(", "fun", ")", ",", "[", "name", "]", ",", "kwarg", "=", "{", "'path'", ":", "path", "}", ",", "timeout", "=", "60", ")", "data", "=", "next", "(", "cmd_ret", ")", "data", "=", "data", ".", "get", "(", "host", ",", "{", "}", ")", ".", "get", "(", "'ret'", ",", "None", ")", "if", "data", ":", "data", "=", "{", "host", ":", "data", "}", "return", "data" ]
worker function for the others to use this handles all the gsetting magic .
train
true
49,663
def get_interfaces(zone, permanent=True): cmd = '--zone={0} --list-interfaces'.format(zone) if permanent: cmd += ' --permanent' return __firewall_cmd(cmd).split()
[ "def", "get_interfaces", "(", "zone", ",", "permanent", "=", "True", ")", ":", "cmd", "=", "'--zone={0} --list-interfaces'", ".", "format", "(", "zone", ")", "if", "permanent", ":", "cmd", "+=", "' --permanent'", "return", "__firewall_cmd", "(", "cmd", ")", ".", "split", "(", ")" ]
returns a map of servers to connected interfaces .
train
true
49,664
def get_unsubscribed(mailchimp, list_id): return get_members(mailchimp, list_id, 'unsubscribed')
[ "def", "get_unsubscribed", "(", "mailchimp", ",", "list_id", ")", ":", "return", "get_members", "(", "mailchimp", ",", "list_id", ",", "'unsubscribed'", ")" ]
returns a set of email addresses that have unsubscribed from list_id .
train
false
49,665
def DNSServiceRemoveRecord(sdRef, RecordRef, flags=0): _global_lock.acquire() try: _DNSServiceRemoveRecord(sdRef, RecordRef, flags) finally: _global_lock.release() RecordRef._invalidate()
[ "def", "DNSServiceRemoveRecord", "(", "sdRef", ",", "RecordRef", ",", "flags", "=", "0", ")", ":", "_global_lock", ".", "acquire", "(", ")", "try", ":", "_DNSServiceRemoveRecord", "(", "sdRef", ",", "RecordRef", ",", "flags", ")", "finally", ":", "_global_lock", ".", "release", "(", ")", "RecordRef", ".", "_invalidate", "(", ")" ]
remove a record previously added to a service record set via dnsserviceaddrecord() .
train
false
49,666
def format_plural(number, possessive=False): if (not possessive): return (u'' if (number == 1) else u's') return (u"'s" if (number == 1) else u"s'")
[ "def", "format_plural", "(", "number", ",", "possessive", "=", "False", ")", ":", "if", "(", "not", "possessive", ")", ":", "return", "(", "u''", "if", "(", "number", "==", "1", ")", "else", "u's'", ")", "return", "(", "u\"'s\"", "if", "(", "number", "==", "1", ")", "else", "u\"s'\"", ")" ]
cosmetic ditty to provide the proper string formatting variable to handle singular/plural situations .
train
false
49,667
def multistep_change_log(parent, children): def verify(self, logger): [parent_action] = LoggedAction.of_type(logger.messages, parent) children_actions = [LoggedAction.of_type(logger.messages, child_action)[0] for child_action in children] self.assertEqual(children_actions, parent_action.children) return verify
[ "def", "multistep_change_log", "(", "parent", ",", "children", ")", ":", "def", "verify", "(", "self", ",", "logger", ")", ":", "[", "parent_action", "]", "=", "LoggedAction", ".", "of_type", "(", "logger", ".", "messages", ",", "parent", ")", "children_actions", "=", "[", "LoggedAction", ".", "of_type", "(", "logger", ".", "messages", ",", "child_action", ")", "[", "0", "]", "for", "child_action", "in", "children", "]", "self", ".", "assertEqual", "(", "children_actions", ",", "parent_action", ".", "children", ")", "return", "verify" ]
create an eliot logging validation function which asserts that the given parent action is logged with the given children actions .
train
false
49,668
def app_name(request): context = {} name = get_app_name(request) if (name is not None): context.update({'app_name': name}) return context
[ "def", "app_name", "(", "request", ")", ":", "context", "=", "{", "}", "name", "=", "get_app_name", "(", "request", ")", "if", "(", "name", "is", "not", "None", ")", ":", "context", ".", "update", "(", "{", "'app_name'", ":", "name", "}", ")", "return", "context" ]
add the name of the app to the template context .
train
false
49,670
def formatExceptionInfo(): (cla, exc) = sys.exc_info()[:2] return (cla.__name__, str(exc))
[ "def", "formatExceptionInfo", "(", ")", ":", "(", "cla", ",", "exc", ")", "=", "sys", ".", "exc_info", "(", ")", "[", ":", "2", "]", "return", "(", "cla", ".", "__name__", ",", "str", "(", "exc", ")", ")" ]
consistently format exception information .
train
false
49,673
def axis_ticklabels_overlap(labels): if (not labels): return False try: bboxes = [l.get_window_extent() for l in labels] overlaps = [b.count_overlaps(bboxes) for b in bboxes] return (max(overlaps) > 1) except RuntimeError: return False
[ "def", "axis_ticklabels_overlap", "(", "labels", ")", ":", "if", "(", "not", "labels", ")", ":", "return", "False", "try", ":", "bboxes", "=", "[", "l", ".", "get_window_extent", "(", ")", "for", "l", "in", "labels", "]", "overlaps", "=", "[", "b", ".", "count_overlaps", "(", "bboxes", ")", "for", "b", "in", "bboxes", "]", "return", "(", "max", "(", "overlaps", ")", ">", "1", ")", "except", "RuntimeError", ":", "return", "False" ]
return a boolean for whether the list of ticklabels have overlaps .
train
false
49,675
def join_contexts(s1, s2): if s1: if s2: return ('%s --> %s' % (s1, s2)) else: return s1 else: return s2
[ "def", "join_contexts", "(", "s1", ",", "s2", ")", ":", "if", "s1", ":", "if", "s2", ":", "return", "(", "'%s --> %s'", "%", "(", "s1", ",", "s2", ")", ")", "else", ":", "return", "s1", "else", ":", "return", "s2" ]
join two context strings .
train
false
49,676
def read_plain_double(file_obj, count): return struct.unpack('<{0}d'.format(count).encode(u'utf-8'), file_obj.read((8 * count)))
[ "def", "read_plain_double", "(", "file_obj", ",", "count", ")", ":", "return", "struct", ".", "unpack", "(", "'<{0}d'", ".", "format", "(", "count", ")", ".", "encode", "(", "u'utf-8'", ")", ",", "file_obj", ".", "read", "(", "(", "8", "*", "count", ")", ")", ")" ]
read count 64-bit float using the plain encoding .
train
true
49,677
def get_network_fn(name, num_classes, weight_decay=0.0, is_training=False): if (name not in networks_map): raise ValueError(('Name of network unknown %s' % name)) arg_scope = arg_scopes_map[name](weight_decay=weight_decay) func = networks_map[name] @functools.wraps(func) def network_fn(images): with slim.arg_scope(arg_scope): return func(images, num_classes, is_training=is_training) if hasattr(func, 'default_image_size'): network_fn.default_image_size = func.default_image_size return network_fn
[ "def", "get_network_fn", "(", "name", ",", "num_classes", ",", "weight_decay", "=", "0.0", ",", "is_training", "=", "False", ")", ":", "if", "(", "name", "not", "in", "networks_map", ")", ":", "raise", "ValueError", "(", "(", "'Name of network unknown %s'", "%", "name", ")", ")", "arg_scope", "=", "arg_scopes_map", "[", "name", "]", "(", "weight_decay", "=", "weight_decay", ")", "func", "=", "networks_map", "[", "name", "]", "@", "functools", ".", "wraps", "(", "func", ")", "def", "network_fn", "(", "images", ")", ":", "with", "slim", ".", "arg_scope", "(", "arg_scope", ")", ":", "return", "func", "(", "images", ",", "num_classes", ",", "is_training", "=", "is_training", ")", "if", "hasattr", "(", "func", ",", "'default_image_size'", ")", ":", "network_fn", ".", "default_image_size", "=", "func", ".", "default_image_size", "return", "network_fn" ]
returns a network_fn such as logits .
train
false
49,678
@deprecated_network def do_secgroup_list_default_rules(cs, args): _print_secgroup_rules(cs.security_group_default_rules.list(), show_source_group=False)
[ "@", "deprecated_network", "def", "do_secgroup_list_default_rules", "(", "cs", ",", "args", ")", ":", "_print_secgroup_rules", "(", "cs", ".", "security_group_default_rules", ".", "list", "(", ")", ",", "show_source_group", "=", "False", ")" ]
list rules that will be added to the default security group for new tenants .
train
false
49,679
def set_version(): fnull = open(os.devnull, 'w') process = subprocess.call(['/usr/bin/dpkg-query', '-l', 'rbp-mediacenter-osmc'], stderr=fnull, stdout=fnull) fnull.close() if (process == 0): version_string = 'Unsupported OSMC Alpha release' else: version = [] with open('/etc/os-release', 'r') as f: tags = ['NAME=', 'VERSION=', 'VERSION_ID='] lines = f.readlines() for line in lines: for tag in tags: if line.startswith(tag): version.append(line[len(tag):].replace('"', '').replace('\n', '')) version_string = ' '.join(version) log(('Current Version: %s' % version_string)) WINDOW.setProperty('osmc_version', version_string)
[ "def", "set_version", "(", ")", ":", "fnull", "=", "open", "(", "os", ".", "devnull", ",", "'w'", ")", "process", "=", "subprocess", ".", "call", "(", "[", "'/usr/bin/dpkg-query'", ",", "'-l'", ",", "'rbp-mediacenter-osmc'", "]", ",", "stderr", "=", "fnull", ",", "stdout", "=", "fnull", ")", "fnull", ".", "close", "(", ")", "if", "(", "process", "==", "0", ")", ":", "version_string", "=", "'Unsupported OSMC Alpha release'", "else", ":", "version", "=", "[", "]", "with", "open", "(", "'/etc/os-release'", ",", "'r'", ")", "as", "f", ":", "tags", "=", "[", "'NAME='", ",", "'VERSION='", ",", "'VERSION_ID='", "]", "lines", "=", "f", ".", "readlines", "(", ")", "for", "line", "in", "lines", ":", "for", "tag", "in", "tags", ":", "if", "line", ".", "startswith", "(", "tag", ")", ":", "version", ".", "append", "(", "line", "[", "len", "(", "tag", ")", ":", "]", ".", "replace", "(", "'\"'", ",", "''", ")", ".", "replace", "(", "'\\n'", ",", "''", ")", ")", "version_string", "=", "' '", ".", "join", "(", "version", ")", "log", "(", "(", "'Current Version: %s'", "%", "version_string", ")", ")", "WINDOW", ".", "setProperty", "(", "'osmc_version'", ",", "version_string", ")" ]
loads the current osmc version into the home window for display in myosmc .
train
false
49,684
@task @needs('pavelib.prereqs.install_python_prereqs') @cmdopts([('system=', 's', 'System to act on')]) @timed def find_fixme(options): num_fixme = 0 systems = getattr(options, 'system', ALL_SYSTEMS).split(',') for system in systems: report_dir = (Env.REPORT_DIR / system).makedirs_p() apps_list = ' '.join(top_python_dirs(system)) pythonpath_prefix = 'PYTHONPATH={system}/djangoapps:common/djangoapps:common/lib'.format(system=system) sh('{pythonpath_prefix} pylint --disable R,C,W,E --enable=fixme --msg-template={msg_template} {apps} | tee {report_dir}/pylint_fixme.report'.format(pythonpath_prefix=pythonpath_prefix, msg_template='"{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}"', apps=apps_list, report_dir=report_dir)) num_fixme += _count_pylint_violations('{report_dir}/pylint_fixme.report'.format(report_dir=report_dir)) print ('Number of pylint fixmes: ' + str(num_fixme))
[ "@", "task", "@", "needs", "(", "'pavelib.prereqs.install_python_prereqs'", ")", "@", "cmdopts", "(", "[", "(", "'system='", ",", "'s'", ",", "'System to act on'", ")", "]", ")", "@", "timed", "def", "find_fixme", "(", "options", ")", ":", "num_fixme", "=", "0", "systems", "=", "getattr", "(", "options", ",", "'system'", ",", "ALL_SYSTEMS", ")", ".", "split", "(", "','", ")", "for", "system", "in", "systems", ":", "report_dir", "=", "(", "Env", ".", "REPORT_DIR", "/", "system", ")", ".", "makedirs_p", "(", ")", "apps_list", "=", "' '", ".", "join", "(", "top_python_dirs", "(", "system", ")", ")", "pythonpath_prefix", "=", "'PYTHONPATH={system}/djangoapps:common/djangoapps:common/lib'", ".", "format", "(", "system", "=", "system", ")", "sh", "(", "'{pythonpath_prefix} pylint --disable R,C,W,E --enable=fixme --msg-template={msg_template} {apps} | tee {report_dir}/pylint_fixme.report'", ".", "format", "(", "pythonpath_prefix", "=", "pythonpath_prefix", ",", "msg_template", "=", "'\"{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}\"'", ",", "apps", "=", "apps_list", ",", "report_dir", "=", "report_dir", ")", ")", "num_fixme", "+=", "_count_pylint_violations", "(", "'{report_dir}/pylint_fixme.report'", ".", "format", "(", "report_dir", "=", "report_dir", ")", ")", "print", "(", "'Number of pylint fixmes: '", "+", "str", "(", "num_fixme", ")", ")" ]
run pylint on system code .
train
false
49,685
def calcNXX(lens, percent): lenSum = sum(lens) threshold = ((float(percent) / 100) * lenSum) runningSum = 0 nxx = 0 nxxLen = 0 for i in range((len(lens) - 1), (-1), (-1)): myLen = lens[i] nxx += 1 runningSum += myLen if (runningSum >= threshold): nxxLen = myLen break return (nxx, nxxLen)
[ "def", "calcNXX", "(", "lens", ",", "percent", ")", ":", "lenSum", "=", "sum", "(", "lens", ")", "threshold", "=", "(", "(", "float", "(", "percent", ")", "/", "100", ")", "*", "lenSum", ")", "runningSum", "=", "0", "nxx", "=", "0", "nxxLen", "=", "0", "for", "i", "in", "range", "(", "(", "len", "(", "lens", ")", "-", "1", ")", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ":", "myLen", "=", "lens", "[", "i", "]", "nxx", "+=", "1", "runningSum", "+=", "myLen", "if", "(", "runningSum", ">=", "threshold", ")", ":", "nxxLen", "=", "myLen", "break", "return", "(", "nxx", ",", "nxxLen", ")" ]
calculates any nxx statistic .
train
false
49,687
def field(name, type=STRING, **kwargs): (default, index, optional) = (kwargs.get('default', (((type == DATE) and NOW) or None)), kwargs.get('index', False), kwargs.get('optional', True)) if (type == STRING): type = STRING() if (type == FLOAT): type = 'real' if (type == BOOLEAN): type = 'tinyint(1)' if (type == DATE): type = 'timestamp' if (str(index) in '01'): index = bool(int(index)) if (str(optional) in '01'): optional = bool(int(optional)) return (name, type, default, index, optional)
[ "def", "field", "(", "name", ",", "type", "=", "STRING", ",", "**", "kwargs", ")", ":", "(", "default", ",", "index", ",", "optional", ")", "=", "(", "kwargs", ".", "get", "(", "'default'", ",", "(", "(", "(", "type", "==", "DATE", ")", "and", "NOW", ")", "or", "None", ")", ")", ",", "kwargs", ".", "get", "(", "'index'", ",", "False", ")", ",", "kwargs", ".", "get", "(", "'optional'", ",", "True", ")", ")", "if", "(", "type", "==", "STRING", ")", ":", "type", "=", "STRING", "(", ")", "if", "(", "type", "==", "FLOAT", ")", ":", "type", "=", "'real'", "if", "(", "type", "==", "BOOLEAN", ")", ":", "type", "=", "'tinyint(1)'", "if", "(", "type", "==", "DATE", ")", ":", "type", "=", "'timestamp'", "if", "(", "str", "(", "index", ")", "in", "'01'", ")", ":", "index", "=", "bool", "(", "int", "(", "index", ")", ")", "if", "(", "str", "(", "optional", ")", "in", "'01'", ")", ":", "optional", "=", "bool", "(", "int", "(", "optional", ")", ")", "return", "(", "name", ",", "type", ",", "default", ",", "index", ",", "optional", ")" ]
returns a table field definition that can be passed to database .
train
false
49,688
def robins_alexander_clustering(G): if ((G.order() < 4) or (G.size() < 3)): return 0 L_3 = _threepaths(G) if (L_3 == 0): return 0 C_4 = _four_cycles(G) return ((4.0 * C_4) / L_3)
[ "def", "robins_alexander_clustering", "(", "G", ")", ":", "if", "(", "(", "G", ".", "order", "(", ")", "<", "4", ")", "or", "(", "G", ".", "size", "(", ")", "<", "3", ")", ")", ":", "return", "0", "L_3", "=", "_threepaths", "(", "G", ")", "if", "(", "L_3", "==", "0", ")", ":", "return", "0", "C_4", "=", "_four_cycles", "(", "G", ")", "return", "(", "(", "4.0", "*", "C_4", ")", "/", "L_3", ")" ]
compute the bipartite clustering of g .
train
false
49,690
def exec_command(*cmdargs, **kwargs): encoding = kwargs.pop('encoding', None) out = subprocess.Popen(cmdargs, stdout=subprocess.PIPE, **kwargs).communicate()[0] if is_py3: if encoding: out = out.decode(encoding) else: out = os.fsdecode(out) return out
[ "def", "exec_command", "(", "*", "cmdargs", ",", "**", "kwargs", ")", ":", "encoding", "=", "kwargs", ".", "pop", "(", "'encoding'", ",", "None", ")", "out", "=", "subprocess", ".", "Popen", "(", "cmdargs", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "**", "kwargs", ")", ".", "communicate", "(", ")", "[", "0", "]", "if", "is_py3", ":", "if", "encoding", ":", "out", "=", "out", ".", "decode", "(", "encoding", ")", "else", ":", "out", "=", "os", ".", "fsdecode", "(", "out", ")", "return", "out" ]
run the command specified by the passed positional arguments .
train
false
49,692
def is_main_process(): return (multiprocessing.current_process().name == 'MainProcess')
[ "def", "is_main_process", "(", ")", ":", "return", "(", "multiprocessing", ".", "current_process", "(", ")", ".", "name", "==", "'MainProcess'", ")" ]
good for wrapping some code sections which you dont want to run during the import of a sub-process .
train
false
49,693
def get_course_with_access(user, action, course_key, depth=0, check_if_enrolled=False): course = get_course_by_id(course_key, depth) check_course_access(course, user, action, check_if_enrolled) return course
[ "def", "get_course_with_access", "(", "user", ",", "action", ",", "course_key", ",", "depth", "=", "0", ",", "check_if_enrolled", "=", "False", ")", ":", "course", "=", "get_course_by_id", "(", "course_key", ",", "depth", ")", "check_course_access", "(", "course", ",", "user", ",", "action", ",", "check_if_enrolled", ")", "return", "course" ]
given a course_key .
train
false
49,694
def _is_leap(year): return (((year % 4) == 0) and (((year % 100) != 0) or ((year % 400) == 0)))
[ "def", "_is_leap", "(", "year", ")", ":", "return", "(", "(", "(", "year", "%", "4", ")", "==", "0", ")", "and", "(", "(", "(", "year", "%", "100", ")", "!=", "0", ")", "or", "(", "(", "year", "%", "400", ")", "==", "0", ")", ")", ")" ]
year -> 1 if leap year .
train
false
49,695
def _get_cachedir(): return _get_config_or_cache_dir(_get_xdg_cache_dir())
[ "def", "_get_cachedir", "(", ")", ":", "return", "_get_config_or_cache_dir", "(", "_get_xdg_cache_dir", "(", ")", ")" ]
return the location of the cache directory .
train
false
49,696
def test_lambda_list_keywords_rest(): can_compile(u'(fn (x &rest xs) (print xs))') cant_compile(u'(fn (x &rest xs &rest ys) (print xs))') can_compile(u'(fn (&optional a &rest xs) (print xs))')
[ "def", "test_lambda_list_keywords_rest", "(", ")", ":", "can_compile", "(", "u'(fn (x &rest xs) (print xs))'", ")", "cant_compile", "(", "u'(fn (x &rest xs &rest ys) (print xs))'", ")", "can_compile", "(", "u'(fn (&optional a &rest xs) (print xs))'", ")" ]
ensure we can compile functions with lambda list keywords .
train
false
49,698
def migrating_cached_query(model, filter_fn=filter_identity): decorator = cached_query(model, filter_fn) def migrating_cached_query_decorator(fn): wrapped = decorator(fn) def migrating_cached_query_wrapper(*args): new_query = wrapped(*args) old_query = make_results(new_query.query, filter_fn) old_query.new_query = new_query return old_query return migrating_cached_query_wrapper return migrating_cached_query_decorator
[ "def", "migrating_cached_query", "(", "model", ",", "filter_fn", "=", "filter_identity", ")", ":", "decorator", "=", "cached_query", "(", "model", ",", "filter_fn", ")", "def", "migrating_cached_query_decorator", "(", "fn", ")", ":", "wrapped", "=", "decorator", "(", "fn", ")", "def", "migrating_cached_query_wrapper", "(", "*", "args", ")", ":", "new_query", "=", "wrapped", "(", "*", "args", ")", "old_query", "=", "make_results", "(", "new_query", ".", "query", ",", "filter_fn", ")", "old_query", ".", "new_query", "=", "new_query", "return", "old_query", "return", "migrating_cached_query_wrapper", "return", "migrating_cached_query_decorator" ]
returns a cachedresults object that has a new-style cached query attached as "new_query" .
train
false
49,700
def agent_maintenance(consul_url=None, **kwargs): ret = {} query_params = {} if (not consul_url): consul_url = _get_config() if (not consul_url): log.error('No Consul URL found.') ret['message'] = 'No Consul URL found.' ret['res'] = False return ret if ('enable' in kwargs): query_params['enable'] = kwargs['enable'] else: ret['message'] = 'Required parameter "enable" is missing.' ret['res'] = False return ret if ('reason' in kwargs): query_params['reason'] = kwargs['reason'] function = 'agent/maintenance' res = _query(consul_url=consul_url, function=function, method='GET', query_params=query_params) if res['res']: ret['res'] = True ret['message'] = 'Agent maintenance mode {0}ed.'.format(kwargs['enable']) else: ret['res'] = True ret['message'] = 'Unable to change maintenance mode for agent.' return ret
[ "def", "agent_maintenance", "(", "consul_url", "=", "None", ",", "**", "kwargs", ")", ":", "ret", "=", "{", "}", "query_params", "=", "{", "}", "if", "(", "not", "consul_url", ")", ":", "consul_url", "=", "_get_config", "(", ")", "if", "(", "not", "consul_url", ")", ":", "log", ".", "error", "(", "'No Consul URL found.'", ")", "ret", "[", "'message'", "]", "=", "'No Consul URL found.'", "ret", "[", "'res'", "]", "=", "False", "return", "ret", "if", "(", "'enable'", "in", "kwargs", ")", ":", "query_params", "[", "'enable'", "]", "=", "kwargs", "[", "'enable'", "]", "else", ":", "ret", "[", "'message'", "]", "=", "'Required parameter \"enable\" is missing.'", "ret", "[", "'res'", "]", "=", "False", "return", "ret", "if", "(", "'reason'", "in", "kwargs", ")", ":", "query_params", "[", "'reason'", "]", "=", "kwargs", "[", "'reason'", "]", "function", "=", "'agent/maintenance'", "res", "=", "_query", "(", "consul_url", "=", "consul_url", ",", "function", "=", "function", ",", "method", "=", "'GET'", ",", "query_params", "=", "query_params", ")", "if", "res", "[", "'res'", "]", ":", "ret", "[", "'res'", "]", "=", "True", "ret", "[", "'message'", "]", "=", "'Agent maintenance mode {0}ed.'", ".", "format", "(", "kwargs", "[", "'enable'", "]", ")", "else", ":", "ret", "[", "'res'", "]", "=", "True", "ret", "[", "'message'", "]", "=", "'Unable to change maintenance mode for agent.'", "return", "ret" ]
manages node maintenance mode .
train
true
49,705
def __get_aliases_filename(): return __salt__['config.option']('aliases.file')
[ "def", "__get_aliases_filename", "(", ")", ":", "return", "__salt__", "[", "'config.option'", "]", "(", "'aliases.file'", ")" ]
return the path to the appropriate aliases file .
train
false
49,706
def setLocalAttribute(elementNode): if (elementNode.xmlObject != None): return for key in elementNode.attributes: if key[:1].isalpha(): value = getEvaluatorSplitWords(getLocalAttributeValueString(key, elementNode.attributes[key].strip())) elementNode.xmlObject = KeyValue(key, value) return elementNode.xmlObject = KeyValue()
[ "def", "setLocalAttribute", "(", "elementNode", ")", ":", "if", "(", "elementNode", ".", "xmlObject", "!=", "None", ")", ":", "return", "for", "key", "in", "elementNode", ".", "attributes", ":", "if", "key", "[", ":", "1", "]", ".", "isalpha", "(", ")", ":", "value", "=", "getEvaluatorSplitWords", "(", "getLocalAttributeValueString", "(", "key", ",", "elementNode", ".", "attributes", "[", "key", "]", ".", "strip", "(", ")", ")", ")", "elementNode", ".", "xmlObject", "=", "KeyValue", "(", "key", ",", "value", ")", "return", "elementNode", ".", "xmlObject", "=", "KeyValue", "(", ")" ]
set the local attribute if any .
train
false
49,707
def _split_namespace(tag): try: return tag[1:].split('}', 1) except: return ('', tag)
[ "def", "_split_namespace", "(", "tag", ")", ":", "try", ":", "return", "tag", "[", "1", ":", "]", ".", "split", "(", "'}'", ",", "1", ")", "except", ":", "return", "(", "''", ",", "tag", ")" ]
split a tag into namespace and local tag strings .
train
false
49,708
def safe_translation(f): @wraps(f) def wrapper(locale, *args, **kwargs): try: with translation.override(locale): return f(locale, *args, **kwargs) except (TypeError, KeyError, ValueError, IndexError) as e: log.error('Bad translation in locale "%s": %s', locale, e) with translation.override(settings.WIKI_DEFAULT_LANGUAGE): return f(settings.WIKI_DEFAULT_LANGUAGE, *args, **kwargs) return wrapper
[ "def", "safe_translation", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "locale", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "with", "translation", ".", "override", "(", "locale", ")", ":", "return", "f", "(", "locale", ",", "*", "args", ",", "**", "kwargs", ")", "except", "(", "TypeError", ",", "KeyError", ",", "ValueError", ",", "IndexError", ")", "as", "e", ":", "log", ".", "error", "(", "'Bad translation in locale \"%s\": %s'", ",", "locale", ",", "e", ")", "with", "translation", ".", "override", "(", "settings", ".", "WIKI_DEFAULT_LANGUAGE", ")", ":", "return", "f", "(", "settings", ".", "WIKI_DEFAULT_LANGUAGE", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper" ]
call f which has first argument locale .
train
false
49,710
def grant_admin_privileges(name, **client_args): client = _client(**client_args) client.grant_admin_privileges(name) return True
[ "def", "grant_admin_privileges", "(", "name", ",", "**", "client_args", ")", ":", "client", "=", "_client", "(", "**", "client_args", ")", "client", ".", "grant_admin_privileges", "(", "name", ")", "return", "True" ]
grant cluster administration privileges to a user .
train
true
49,711
def validate_app_structure(value): try: if any(((int(v) < 0) for v in value.viewvalues())): raise ValueError(u'Must be greater than or equal to zero') except ValueError as err: raise ValidationError(err)
[ "def", "validate_app_structure", "(", "value", ")", ":", "try", ":", "if", "any", "(", "(", "(", "int", "(", "v", ")", "<", "0", ")", "for", "v", "in", "value", ".", "viewvalues", "(", ")", ")", ")", ":", "raise", "ValueError", "(", "u'Must be greater than or equal to zero'", ")", "except", "ValueError", "as", "err", ":", "raise", "ValidationError", "(", "err", ")" ]
error if the dict values arent ints >= 0 .
train
false
49,714
def lena(): raise RuntimeError('lena() is no longer included in SciPy, please use ascent() or face() instead')
[ "def", "lena", "(", ")", ":", "raise", "RuntimeError", "(", "'lena() is no longer included in SciPy, please use ascent() or face() instead'", ")" ]
function that previously returned an example image .
train
false
49,715
def _TopologicallySortedEnvVarKeys(env): regex = re.compile('\\$\\{([a-zA-Z0-9\\-_]+)\\}') def GetEdges(node): matches = set([v for v in regex.findall(env[node]) if (v in env)]) for dependee in matches: assert ('${' not in dependee), ('Nested variables not supported: ' + dependee) return matches try: order = gyp.common.TopologicallySorted(env.keys(), GetEdges) order.reverse() return order except gyp.common.CycleError as e: raise GypError(('Xcode environment variables are cyclically dependent: ' + str(e.nodes)))
[ "def", "_TopologicallySortedEnvVarKeys", "(", "env", ")", ":", "regex", "=", "re", ".", "compile", "(", "'\\\\$\\\\{([a-zA-Z0-9\\\\-_]+)\\\\}'", ")", "def", "GetEdges", "(", "node", ")", ":", "matches", "=", "set", "(", "[", "v", "for", "v", "in", "regex", ".", "findall", "(", "env", "[", "node", "]", ")", "if", "(", "v", "in", "env", ")", "]", ")", "for", "dependee", "in", "matches", ":", "assert", "(", "'${'", "not", "in", "dependee", ")", ",", "(", "'Nested variables not supported: '", "+", "dependee", ")", "return", "matches", "try", ":", "order", "=", "gyp", ".", "common", ".", "TopologicallySorted", "(", "env", ".", "keys", "(", ")", ",", "GetEdges", ")", "order", ".", "reverse", "(", ")", "return", "order", "except", "gyp", ".", "common", ".", "CycleError", "as", "e", ":", "raise", "GypError", "(", "(", "'Xcode environment variables are cyclically dependent: '", "+", "str", "(", "e", ".", "nodes", ")", ")", ")" ]
takes a dict |env| whose values are strings that can refer to other keys .
train
false
49,716
@register def validate_fun_facts(): for fact in FunFact.objects.published(): try: _validate_query(fact.number) if fact.divisor: _validate_query(fact.divisor) except ValidationError: logger.error(('Unpublishing fact "%s"' % fact.name)) fact.published = False fact.save()
[ "@", "register", "def", "validate_fun_facts", "(", ")", ":", "for", "fact", "in", "FunFact", ".", "objects", ".", "published", "(", ")", ":", "try", ":", "_validate_query", "(", "fact", ".", "number", ")", "if", "fact", ".", "divisor", ":", "_validate_query", "(", "fact", ".", "divisor", ")", "except", "ValidationError", ":", "logger", ".", "error", "(", "(", "'Unpublishing fact \"%s\"'", "%", "fact", ".", "name", ")", ")", "fact", ".", "published", "=", "False", "fact", ".", "save", "(", ")" ]
validate all published facts and unpublish those failing .
train
false
49,717
def _GetUniquePlatforms(spec): platforms = OrderedSet() for configuration in spec['configurations']: platforms.add(_ConfigPlatform(spec['configurations'][configuration])) platforms = list(platforms) return platforms
[ "def", "_GetUniquePlatforms", "(", "spec", ")", ":", "platforms", "=", "OrderedSet", "(", ")", "for", "configuration", "in", "spec", "[", "'configurations'", "]", ":", "platforms", ".", "add", "(", "_ConfigPlatform", "(", "spec", "[", "'configurations'", "]", "[", "configuration", "]", ")", ")", "platforms", "=", "list", "(", "platforms", ")", "return", "platforms" ]
returns the list of unique platforms for this spec .
train
false
49,718
def chunkiter(fp, chunk_size=65536): while True: chunk = fp.read(chunk_size) if chunk: (yield chunk) else: break
[ "def", "chunkiter", "(", "fp", ",", "chunk_size", "=", "65536", ")", ":", "while", "True", ":", "chunk", "=", "fp", ".", "read", "(", "chunk_size", ")", "if", "chunk", ":", "(", "yield", "chunk", ")", "else", ":", "break" ]
return an iterator to a file-like obj which yields fixed size chunks .
train
false
49,719
def _privileged(original): @wraps(original) def permissionChecker(self, *args, **kwargs): if (original.__name__ not in self.permissions): raise IOError(EPERM, 'Operation not permitted') return original(self, *args, **kwargs) return permissionChecker
[ "def", "_privileged", "(", "original", ")", ":", "@", "wraps", "(", "original", ")", "def", "permissionChecker", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "original", ".", "__name__", "not", "in", "self", ".", "permissions", ")", ":", "raise", "IOError", "(", "EPERM", ",", "'Operation not permitted'", ")", "return", "original", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "permissionChecker" ]
wrap a l{memoryiosystem} method with permission-checking logic .
train
false
49,720
def is_task_stub_mutable(context, task_stub): if context.is_admin: return True if (context.owner is None): return False return (task_stub.owner == context.owner)
[ "def", "is_task_stub_mutable", "(", "context", ",", "task_stub", ")", ":", "if", "context", ".", "is_admin", ":", "return", "True", "if", "(", "context", ".", "owner", "is", "None", ")", ":", "return", "False", "return", "(", "task_stub", ".", "owner", "==", "context", ".", "owner", ")" ]
return true if the task stub is mutable in this context .
train
false
49,721
def medianabs(x1, x2, axis=0): x1 = np.asanyarray(x1) x2 = np.asanyarray(x2) return np.median(np.abs((x1 - x2)), axis=axis)
[ "def", "medianabs", "(", "x1", ",", "x2", ",", "axis", "=", "0", ")", ":", "x1", "=", "np", ".", "asanyarray", "(", "x1", ")", "x2", "=", "np", ".", "asanyarray", "(", "x2", ")", "return", "np", ".", "median", "(", "np", ".", "abs", "(", "(", "x1", "-", "x2", ")", ")", ",", "axis", "=", "axis", ")" ]
median absolute error parameters x1 .
train
false
49,724
@core_helper def user_in_org_or_group(group_id): if (not c.userobj): return False if c.userobj.sysadmin: return True query = model.Session.query(model.Member).filter((model.Member.state == 'active')).filter((model.Member.table_name == 'user')).filter((model.Member.group_id == group_id)).filter((model.Member.table_id == c.userobj.id)) return (len(query.all()) != 0)
[ "@", "core_helper", "def", "user_in_org_or_group", "(", "group_id", ")", ":", "if", "(", "not", "c", ".", "userobj", ")", ":", "return", "False", "if", "c", ".", "userobj", ".", "sysadmin", ":", "return", "True", "query", "=", "model", ".", "Session", ".", "query", "(", "model", ".", "Member", ")", ".", "filter", "(", "(", "model", ".", "Member", ".", "state", "==", "'active'", ")", ")", ".", "filter", "(", "(", "model", ".", "Member", ".", "table_name", "==", "'user'", ")", ")", ".", "filter", "(", "(", "model", ".", "Member", ".", "group_id", "==", "group_id", ")", ")", ".", "filter", "(", "(", "model", ".", "Member", ".", "table_id", "==", "c", ".", "userobj", ".", "id", ")", ")", "return", "(", "len", "(", "query", ".", "all", "(", ")", ")", "!=", "0", ")" ]
check if user is in a group or organization .
train
false
49,725
def Hist(hist, **options): (xs, ys) = hist.Render() if ('width' not in options): try: options['width'] = (0.9 * np.diff(xs).min()) except TypeError: warnings.warn("Hist: Can't compute bar width automatically.Check for non-numeric types in Hist.Or try providing width option.") options = _Underride(options, label=hist.label) options = _Underride(options, align='center') if (options['align'] == 'left'): options['align'] = 'edge' elif (options['align'] == 'right'): options['align'] = 'edge' options['width'] *= (-1) Bar(xs, ys, **options)
[ "def", "Hist", "(", "hist", ",", "**", "options", ")", ":", "(", "xs", ",", "ys", ")", "=", "hist", ".", "Render", "(", ")", "if", "(", "'width'", "not", "in", "options", ")", ":", "try", ":", "options", "[", "'width'", "]", "=", "(", "0.9", "*", "np", ".", "diff", "(", "xs", ")", ".", "min", "(", ")", ")", "except", "TypeError", ":", "warnings", ".", "warn", "(", "\"Hist: Can't compute bar width automatically.Check for non-numeric types in Hist.Or try providing width option.\"", ")", "options", "=", "_Underride", "(", "options", ",", "label", "=", "hist", ".", "label", ")", "options", "=", "_Underride", "(", "options", ",", "align", "=", "'center'", ")", "if", "(", "options", "[", "'align'", "]", "==", "'left'", ")", ":", "options", "[", "'align'", "]", "=", "'edge'", "elif", "(", "options", "[", "'align'", "]", "==", "'right'", ")", ":", "options", "[", "'align'", "]", "=", "'edge'", "options", "[", "'width'", "]", "*=", "(", "-", "1", ")", "Bar", "(", "xs", ",", "ys", ",", "**", "options", ")" ]
plots a pmf or hist with a bar plot .
train
false
49,726
def test_lazy_roles(): @roles('r1') def command(): pass eq_hosts(command, ['a', 'b'], env={'roledefs': lazy_role})
[ "def", "test_lazy_roles", "(", ")", ":", "@", "roles", "(", "'r1'", ")", "def", "command", "(", ")", ":", "pass", "eq_hosts", "(", "command", ",", "[", "'a'", ",", "'b'", "]", ",", "env", "=", "{", "'roledefs'", ":", "lazy_role", "}", ")" ]
roles may be callables returning lists .
train
false
49,727
def _logging(original): @wraps(original) def logger(self, request, **routeArguments): logger = _get_logger(self) action = REQUEST(logger, request_path=request.path, method=request.method) incidentIdentifier = action.serialize_task_id() with action.context(): d = DeferredContext(original(self, request, **routeArguments)) def failure(reason): if reason.check(BadRequest): code = reason.value.code result = reason.value.result else: writeFailure(reason, logger, LOG_SYSTEM) code = INTERNAL_SERVER_ERROR result = incidentIdentifier request.setResponseCode(code) request.responseHeaders.setRawHeaders('content-type', ['application/json']) return dumps(result) d.addErrback(failure) d.addActionFinish() return d.result return logger
[ "def", "_logging", "(", "original", ")", ":", "@", "wraps", "(", "original", ")", "def", "logger", "(", "self", ",", "request", ",", "**", "routeArguments", ")", ":", "logger", "=", "_get_logger", "(", "self", ")", "action", "=", "REQUEST", "(", "logger", ",", "request_path", "=", "request", ".", "path", ",", "method", "=", "request", ".", "method", ")", "incidentIdentifier", "=", "action", ".", "serialize_task_id", "(", ")", "with", "action", ".", "context", "(", ")", ":", "d", "=", "DeferredContext", "(", "original", "(", "self", ",", "request", ",", "**", "routeArguments", ")", ")", "def", "failure", "(", "reason", ")", ":", "if", "reason", ".", "check", "(", "BadRequest", ")", ":", "code", "=", "reason", ".", "value", ".", "code", "result", "=", "reason", ".", "value", ".", "result", "else", ":", "writeFailure", "(", "reason", ",", "logger", ",", "LOG_SYSTEM", ")", "code", "=", "INTERNAL_SERVER_ERROR", "result", "=", "incidentIdentifier", "request", ".", "setResponseCode", "(", "code", ")", "request", ".", "responseHeaders", ".", "setRawHeaders", "(", "'content-type'", ",", "[", "'application/json'", "]", ")", "return", "dumps", "(", "result", ")", "d", ".", "addErrback", "(", "failure", ")", "d", ".", "addActionFinish", "(", ")", "return", "d", ".", "result", "return", "logger" ]
decorate a method which implements an api endpoint to add eliot-based logging .
train
false
49,728
def test_improve_memory_error_message(): try: improve_memory_error_message(MemoryError(), 'test') except MemoryError as e: assert len(str(e)) try: improve_memory_error_message(MemoryError('test'), 'should not') except MemoryError as e: assert (str(e) == 'test')
[ "def", "test_improve_memory_error_message", "(", ")", ":", "try", ":", "improve_memory_error_message", "(", "MemoryError", "(", ")", ",", "'test'", ")", "except", "MemoryError", "as", "e", ":", "assert", "len", "(", "str", "(", "e", ")", ")", "try", ":", "improve_memory_error_message", "(", "MemoryError", "(", "'test'", ")", ",", "'should not'", ")", "except", "MemoryError", "as", "e", ":", "assert", "(", "str", "(", "e", ")", "==", "'test'", ")" ]
tests that the memoryerrors message is improved correctly .
train
false
49,730
def get_messages(request): return getattr(request, '_messages', [])
[ "def", "get_messages", "(", "request", ")", ":", "return", "getattr", "(", "request", ",", "'_messages'", ",", "[", "]", ")" ]
return a queryset with the message data .
train
false
49,731
def get_filelist(htmlfile, dir, opts, log): log.info('Building file list...') filelist = traverse(htmlfile, max_levels=int(opts.max_levels), verbose=opts.verbose, encoding=opts.input_encoding)[(0 if opts.breadth_first else 1)] if opts.verbose: log.debug(' DCTB Found files...') for f in filelist: log.debug(' DCTB DCTB ', f) return filelist
[ "def", "get_filelist", "(", "htmlfile", ",", "dir", ",", "opts", ",", "log", ")", ":", "log", ".", "info", "(", "'Building file list...'", ")", "filelist", "=", "traverse", "(", "htmlfile", ",", "max_levels", "=", "int", "(", "opts", ".", "max_levels", ")", ",", "verbose", "=", "opts", ".", "verbose", ",", "encoding", "=", "opts", ".", "input_encoding", ")", "[", "(", "0", "if", "opts", ".", "breadth_first", "else", "1", ")", "]", "if", "opts", ".", "verbose", ":", "log", ".", "debug", "(", "' DCTB Found files...'", ")", "for", "f", "in", "filelist", ":", "log", ".", "debug", "(", "' DCTB DCTB '", ",", "f", ")", "return", "filelist" ]
build list of files referenced by html file or try to detect and use an opf file instead .
train
false
49,732
def _is_png(filename): return ('.png' in filename)
[ "def", "_is_png", "(", "filename", ")", ":", "return", "(", "'.png'", "in", "filename", ")" ]
determine if a file contains a png format image .
train
false
49,733
def validate_external_credential(external_credential): if ((not external_credential) or ('#' not in external_credential)): return False (profile_name, technical_id) = external_credential.split('#', 1) if (profile_name not in settings.EXTERNAL_IDENTITY_PROFILE): return False if (len(technical_id) <= 0): return False provider = settings.EXTERNAL_IDENTITY_PROFILE[profile_name] return {'provider': provider, 'id': technical_id}
[ "def", "validate_external_credential", "(", "external_credential", ")", ":", "if", "(", "(", "not", "external_credential", ")", "or", "(", "'#'", "not", "in", "external_credential", ")", ")", ":", "return", "False", "(", "profile_name", ",", "technical_id", ")", "=", "external_credential", ".", "split", "(", "'#'", ",", "1", ")", "if", "(", "profile_name", "not", "in", "settings", ".", "EXTERNAL_IDENTITY_PROFILE", ")", ":", "return", "False", "if", "(", "len", "(", "technical_id", ")", "<=", "0", ")", ":", "return", "False", "provider", "=", "settings", ".", "EXTERNAL_IDENTITY_PROFILE", "[", "profile_name", "]", "return", "{", "'provider'", ":", "provider", ",", "'id'", ":", "technical_id", "}" ]
validate the external credential .
train
false
49,734
@app.before_request def before_remote_request(): if ((request.endpoint in ('namespace_api.message_search_api', 'namespace_api.thread_search_api', 'namespace_api.message_streaming_search_api', 'namespace_api.thread_streaming_search_api')) or (request.method in ('POST', 'PUT', 'PATCH', 'DELETE'))): if g.namespace: request.environ['log_context']['provider'] = g.namespace.account.provider valid_account(g.namespace)
[ "@", "app", ".", "before_request", "def", "before_remote_request", "(", ")", ":", "if", "(", "(", "request", ".", "endpoint", "in", "(", "'namespace_api.message_search_api'", ",", "'namespace_api.thread_search_api'", ",", "'namespace_api.message_streaming_search_api'", ",", "'namespace_api.thread_streaming_search_api'", ")", ")", "or", "(", "request", ".", "method", "in", "(", "'POST'", ",", "'PUT'", ",", "'PATCH'", ",", "'DELETE'", ")", ")", ")", ":", "if", "g", ".", "namespace", ":", "request", ".", "environ", "[", "'log_context'", "]", "[", "'provider'", "]", "=", "g", ".", "namespace", ".", "account", ".", "provider", "valid_account", "(", "g", ".", "namespace", ")" ]
verify the validity of the accounts credentials before performing a request to the remote server .
train
false
49,735
@task(queue='web') def clear_artifacts(version_pk): version = Version.objects.get(pk=version_pk) clear_pdf_artifacts(version) clear_epub_artifacts(version) clear_htmlzip_artifacts(version) clear_html_artifacts(version)
[ "@", "task", "(", "queue", "=", "'web'", ")", "def", "clear_artifacts", "(", "version_pk", ")", ":", "version", "=", "Version", ".", "objects", ".", "get", "(", "pk", "=", "version_pk", ")", "clear_pdf_artifacts", "(", "version", ")", "clear_epub_artifacts", "(", "version", ")", "clear_htmlzip_artifacts", "(", "version", ")", "clear_html_artifacts", "(", "version", ")" ]
remove artifacts from the web servers .
train
false
49,737
def _init_multiprocessing(): global _buffer global _n if _multiprocessing: print('[i] preparing capture buffer...') try: _buffer = mmap.mmap((-1), config.CAPTURE_BUFFER) _ = ('\x00' * MMAP_ZFILL_CHUNK_LENGTH) for i in xrange((config.CAPTURE_BUFFER / MMAP_ZFILL_CHUNK_LENGTH)): _buffer.write(_) _buffer.seek(0) except KeyboardInterrupt: raise except: exit("[!] unable to allocate network capture buffer. Please adjust value of 'CAPTURE_BUFFER'") print(('[i] creating %d more processes (out of total %d)' % ((config.PROCESS_COUNT - 1), config.PROCESS_COUNT))) _n = _multiprocessing.Value('L', lock=False) for i in xrange((config.PROCESS_COUNT - 1)): process = _multiprocessing.Process(target=worker, name=str(i), args=(_buffer, _n, i, (config.PROCESS_COUNT - 1), _process_packet)) process.daemon = True process.start()
[ "def", "_init_multiprocessing", "(", ")", ":", "global", "_buffer", "global", "_n", "if", "_multiprocessing", ":", "print", "(", "'[i] preparing capture buffer...'", ")", "try", ":", "_buffer", "=", "mmap", ".", "mmap", "(", "(", "-", "1", ")", ",", "config", ".", "CAPTURE_BUFFER", ")", "_", "=", "(", "'\\x00'", "*", "MMAP_ZFILL_CHUNK_LENGTH", ")", "for", "i", "in", "xrange", "(", "(", "config", ".", "CAPTURE_BUFFER", "/", "MMAP_ZFILL_CHUNK_LENGTH", ")", ")", ":", "_buffer", ".", "write", "(", "_", ")", "_buffer", ".", "seek", "(", "0", ")", "except", "KeyboardInterrupt", ":", "raise", "except", ":", "exit", "(", "\"[!] unable to allocate network capture buffer. Please adjust value of 'CAPTURE_BUFFER'\"", ")", "print", "(", "(", "'[i] creating %d more processes (out of total %d)'", "%", "(", "(", "config", ".", "PROCESS_COUNT", "-", "1", ")", ",", "config", ".", "PROCESS_COUNT", ")", ")", ")", "_n", "=", "_multiprocessing", ".", "Value", "(", "'L'", ",", "lock", "=", "False", ")", "for", "i", "in", "xrange", "(", "(", "config", ".", "PROCESS_COUNT", "-", "1", ")", ")", ":", "process", "=", "_multiprocessing", ".", "Process", "(", "target", "=", "worker", ",", "name", "=", "str", "(", "i", ")", ",", "args", "=", "(", "_buffer", ",", "_n", ",", "i", ",", "(", "config", ".", "PROCESS_COUNT", "-", "1", ")", ",", "_process_packet", ")", ")", "process", ".", "daemon", "=", "True", "process", ".", "start", "(", ")" ]
inits worker processes used in multiprocessing mode .
train
false
49,738
def _abs_auto(ms): num_acs = min(11, len(ms)) if num_acs: auto_corrs = [numpy.corrcoef(ms, numpy.append(ms[i:], ms[:i]))[1][0] for i in range(1, num_acs)] return map(abs, auto_corrs)
[ "def", "_abs_auto", "(", "ms", ")", ":", "num_acs", "=", "min", "(", "11", ",", "len", "(", "ms", ")", ")", "if", "num_acs", ":", "auto_corrs", "=", "[", "numpy", ".", "corrcoef", "(", "ms", ",", "numpy", ".", "append", "(", "ms", "[", "i", ":", "]", ",", "ms", "[", ":", "i", "]", ")", ")", "[", "1", "]", "[", "0", "]", "for", "i", "in", "range", "(", "1", ",", "num_acs", ")", "]", "return", "map", "(", "abs", ",", "auto_corrs", ")" ]
return absolute value of auto-correlations for lags 1 to 10 .
train
false
49,739
def guess_mode(args, config): compose = True export = False if ((args.decrypt is not False) or (args.encrypt is not False) or (args.export is not False) or any((args.short, args.tags, args.edit))): compose = False export = True elif any((args.start_date, args.end_date, args.on_date, args.limit, args.strict, args.starred)): compose = False elif (args.text and all(((word[0] in config[u'tagsymbols']) for word in u' '.join(args.text).split()))): compose = False return (compose, export)
[ "def", "guess_mode", "(", "args", ",", "config", ")", ":", "compose", "=", "True", "export", "=", "False", "if", "(", "(", "args", ".", "decrypt", "is", "not", "False", ")", "or", "(", "args", ".", "encrypt", "is", "not", "False", ")", "or", "(", "args", ".", "export", "is", "not", "False", ")", "or", "any", "(", "(", "args", ".", "short", ",", "args", ".", "tags", ",", "args", ".", "edit", ")", ")", ")", ":", "compose", "=", "False", "export", "=", "True", "elif", "any", "(", "(", "args", ".", "start_date", ",", "args", ".", "end_date", ",", "args", ".", "on_date", ",", "args", ".", "limit", ",", "args", ".", "strict", ",", "args", ".", "starred", ")", ")", ":", "compose", "=", "False", "elif", "(", "args", ".", "text", "and", "all", "(", "(", "(", "word", "[", "0", "]", "in", "config", "[", "u'tagsymbols'", "]", ")", "for", "word", "in", "u' '", ".", "join", "(", "args", ".", "text", ")", ".", "split", "(", ")", ")", ")", ")", ":", "compose", "=", "False", "return", "(", "compose", ",", "export", ")" ]
guesses the mode from the given arguments .
train
false
49,740
def buildHL0bTrainingSet(numOnes=5): numPatterns = 23 p = getSimplePatterns(numOnes, numPatterns) s = [] s.append(p[rgen.randint(5, numPatterns)]) for i in xrange(50): r = rgen.randint(5, numPatterns) print r, s.append(p[r]) if (rgen.binomial(1, 0.5) > 0): print 'S1', s.append(p[0]) s.append(p[1]) s.append(p[2]) s.append(p[4]) else: print 'S2', s.append(p[1]) s.append(p[2]) s.append(p[3]) r = rgen.randint(5, numPatterns) s.append(p[r]) print r, print return ([s], [[p[0], p[1], p[2], p[4]], [p[1], p[2], p[3]]])
[ "def", "buildHL0bTrainingSet", "(", "numOnes", "=", "5", ")", ":", "numPatterns", "=", "23", "p", "=", "getSimplePatterns", "(", "numOnes", ",", "numPatterns", ")", "s", "=", "[", "]", "s", ".", "append", "(", "p", "[", "rgen", ".", "randint", "(", "5", ",", "numPatterns", ")", "]", ")", "for", "i", "in", "xrange", "(", "50", ")", ":", "r", "=", "rgen", ".", "randint", "(", "5", ",", "numPatterns", ")", "print", "r", ",", "s", ".", "append", "(", "p", "[", "r", "]", ")", "if", "(", "rgen", ".", "binomial", "(", "1", ",", "0.5", ")", ">", "0", ")", ":", "print", "'S1'", ",", "s", ".", "append", "(", "p", "[", "0", "]", ")", "s", ".", "append", "(", "p", "[", "1", "]", ")", "s", ".", "append", "(", "p", "[", "2", "]", ")", "s", ".", "append", "(", "p", "[", "4", "]", ")", "else", ":", "print", "'S2'", ",", "s", ".", "append", "(", "p", "[", "1", "]", ")", "s", ".", "append", "(", "p", "[", "2", "]", ")", "s", ".", "append", "(", "p", "[", "3", "]", ")", "r", "=", "rgen", ".", "randint", "(", "5", ",", "numPatterns", ")", "s", ".", "append", "(", "p", "[", "r", "]", ")", "print", "r", ",", "print", "return", "(", "[", "s", "]", ",", "[", "[", "p", "[", "0", "]", ",", "p", "[", "1", "]", ",", "p", "[", "2", "]", ",", "p", "[", "4", "]", "]", ",", "[", "p", "[", "1", "]", ",", "p", "[", "2", "]", ",", "p", "[", "3", "]", "]", "]", ")" ]
simple sequences for hl0b .
train
false
49,741
def kit_item(): s3.prep = (lambda r: (r.representation == 's3json')) return s3_rest_controller()
[ "def", "kit_item", "(", ")", ":", "s3", ".", "prep", "=", "(", "lambda", "r", ":", "(", "r", ".", "representation", "==", "'s3json'", ")", ")", "return", "s3_rest_controller", "(", ")" ]
rest controller to retrieve budget_kit_item field options .
train
false
49,743
def key_has_dot_or_dollar(d): for (k, v) in d.items(): if ((('.' in k) or ('$' in k)) or (isinstance(v, dict) and key_has_dot_or_dollar(v))): return True
[ "def", "key_has_dot_or_dollar", "(", "d", ")", ":", "for", "(", "k", ",", "v", ")", "in", "d", ".", "items", "(", ")", ":", "if", "(", "(", "(", "'.'", "in", "k", ")", "or", "(", "'$'", "in", "k", ")", ")", "or", "(", "isinstance", "(", "v", ",", "dict", ")", "and", "key_has_dot_or_dollar", "(", "v", ")", ")", ")", ":", "return", "True" ]
helper function to recursively determine if any key in a dictionary contains a dot or a dollar sign .
train
false
49,745
def doctest_bad(x, y=1, **k): print ('x:', x) print ('y:', y) print ('k:', k)
[ "def", "doctest_bad", "(", "x", ",", "y", "=", "1", ",", "**", "k", ")", ":", "print", "(", "'x:'", ",", "x", ")", "print", "(", "'y:'", ",", "y", ")", "print", "(", "'k:'", ",", "k", ")" ]
a function whose doctest we need to skip .
train
false
49,746
def write_datavalidation(worksheet): required_dvs = [x for x in worksheet._data_validations if (len(x.cells) or len(x.ranges))] if (not required_dvs): return dvs = Element('dataValidations', count=str(len(required_dvs))) for dv in required_dvs: dvs.append(dv.to_tree()) return dvs
[ "def", "write_datavalidation", "(", "worksheet", ")", ":", "required_dvs", "=", "[", "x", "for", "x", "in", "worksheet", ".", "_data_validations", "if", "(", "len", "(", "x", ".", "cells", ")", "or", "len", "(", "x", ".", "ranges", ")", ")", "]", "if", "(", "not", "required_dvs", ")", ":", "return", "dvs", "=", "Element", "(", "'dataValidations'", ",", "count", "=", "str", "(", "len", "(", "required_dvs", ")", ")", ")", "for", "dv", "in", "required_dvs", ":", "dvs", ".", "append", "(", "dv", ".", "to_tree", "(", ")", ")", "return", "dvs" ]
write data validation(s) to xml .
train
false
49,747
def getimage(name): try: sizes = imgfile.getsizes(name) except imgfile.error: name = get_qualified_path(name) sizes = imgfile.getsizes(name) if verbose: print ('imgfile opening test image: %s, sizes: %s' % (name, str(sizes))) image = imgfile.read(name) return (image, sizes[0], sizes[1])
[ "def", "getimage", "(", "name", ")", ":", "try", ":", "sizes", "=", "imgfile", ".", "getsizes", "(", "name", ")", "except", "imgfile", ".", "error", ":", "name", "=", "get_qualified_path", "(", "name", ")", "sizes", "=", "imgfile", ".", "getsizes", "(", "name", ")", "if", "verbose", ":", "print", "(", "'imgfile opening test image: %s, sizes: %s'", "%", "(", "name", ",", "str", "(", "sizes", ")", ")", ")", "image", "=", "imgfile", ".", "read", "(", "name", ")", "return", "(", "image", ",", "sizes", "[", "0", "]", ",", "sizes", "[", "1", "]", ")" ]
return a tuple consisting of image width and height .
train
false
49,748
def efuse_perform_write(esp): esp.write_reg(EFUSE_REG_CONF, EFUSE_CONF_WRITE) esp.write_reg(EFUSE_REG_CMD, EFUSE_CMD_WRITE) def wait_idle(): for _ in range(10): if (esp.read_reg(EFUSE_REG_CMD) == 0): return raise esptool.FatalError('Timed out waiting for Efuse controller command to complete') wait_idle() esp.write_reg(EFUSE_REG_CONF, EFUSE_CONF_READ) esp.write_reg(EFUSE_REG_CMD, EFUSE_CMD_READ) wait_idle()
[ "def", "efuse_perform_write", "(", "esp", ")", ":", "esp", ".", "write_reg", "(", "EFUSE_REG_CONF", ",", "EFUSE_CONF_WRITE", ")", "esp", ".", "write_reg", "(", "EFUSE_REG_CMD", ",", "EFUSE_CMD_WRITE", ")", "def", "wait_idle", "(", ")", ":", "for", "_", "in", "range", "(", "10", ")", ":", "if", "(", "esp", ".", "read_reg", "(", "EFUSE_REG_CMD", ")", "==", "0", ")", ":", "return", "raise", "esptool", ".", "FatalError", "(", "'Timed out waiting for Efuse controller command to complete'", ")", "wait_idle", "(", ")", "esp", ".", "write_reg", "(", "EFUSE_REG_CONF", ",", "EFUSE_CONF_READ", ")", "esp", ".", "write_reg", "(", "EFUSE_REG_CMD", ",", "EFUSE_CMD_READ", ")", "wait_idle", "(", ")" ]
write the values in the efuse write registers to the efuse hardware .
train
false
49,749
def pickleStringO(stringo): 'support function for copy_reg to pickle StringIO.OutputTypes' return (unpickleStringO, (stringo.getvalue(), stringo.tell()))
[ "def", "pickleStringO", "(", "stringo", ")", ":", "return", "(", "unpickleStringO", ",", "(", "stringo", ".", "getvalue", "(", ")", ",", "stringo", ".", "tell", "(", ")", ")", ")" ]
reduce the given cstringo .
train
false
49,751
def beacon(config): ret = [] changes = {} txt = {} global LAST_GRAINS _validate = __validate__(config) if (not _validate[0]): log.warning('Beacon {0} configuration invalid, not adding. {1}'.format(__virtualname__, _validate[1])) return ret if ('servicename' in config): servicename = config['servicename'] else: servicename = __grains__['host'] for item in config['txt']: if config['txt'][item].startswith('grains.'): grain = config['txt'][item][7:] grain_index = None square_bracket = grain.find('[') if ((square_bracket != (-1)) and (grain[(-1)] == ']')): grain_index = int(grain[(square_bracket + 1):(-1)]) grain = grain[:square_bracket] grain_value = __grains__.get(grain, '') if isinstance(grain_value, list): if (grain_index is not None): grain_value = grain_value[grain_index] else: grain_value = ','.join(grain_value) txt[item] = grain_value if (LAST_GRAINS and (LAST_GRAINS.get(grain, '') != __grains__.get(grain, ''))): changes[str(('txt.' + item))] = txt[item] else: txt[item] = config['txt'][item] if (not LAST_GRAINS): changes[str(('txt.' + item))] = txt[item] if changes: if (not LAST_GRAINS): changes['servicename'] = servicename changes['servicetype'] = config['servicetype'] changes['port'] = config['port'] GROUP.AddService(avahi.IF_UNSPEC, avahi.PROTO_UNSPEC, dbus.UInt32(0), servicename, config['servicetype'], '', '', dbus.UInt16(config['port']), avahi.dict_to_txt_array(txt)) GROUP.Commit() elif config.get('reset_on_change', False): GROUP.Reset() reset_wait = config.get('reset_wait', 0) if (reset_wait > 0): time.sleep(reset_wait) GROUP.AddService(avahi.IF_UNSPEC, avahi.PROTO_UNSPEC, dbus.UInt32(0), servicename, config['servicetype'], '', '', dbus.UInt16(config['port']), avahi.dict_to_txt_array(txt)) GROUP.Commit() else: GROUP.UpdateServiceTxt(avahi.IF_UNSPEC, avahi.PROTO_UNSPEC, dbus.UInt32(0), servicename, config['servicetype'], '', avahi.dict_to_txt_array(txt)) ret.append({'tag': 'result', 'changes': changes}) if config.get('copy_grains', False): LAST_GRAINS = __grains__.copy() else: LAST_GRAINS = __grains__ return ret
[ "def", "beacon", "(", "config", ")", ":", "ret", "=", "[", "]", "changes", "=", "{", "}", "txt", "=", "{", "}", "global", "LAST_GRAINS", "_validate", "=", "__validate__", "(", "config", ")", "if", "(", "not", "_validate", "[", "0", "]", ")", ":", "log", ".", "warning", "(", "'Beacon {0} configuration invalid, not adding. {1}'", ".", "format", "(", "__virtualname__", ",", "_validate", "[", "1", "]", ")", ")", "return", "ret", "if", "(", "'servicename'", "in", "config", ")", ":", "servicename", "=", "config", "[", "'servicename'", "]", "else", ":", "servicename", "=", "__grains__", "[", "'host'", "]", "for", "item", "in", "config", "[", "'txt'", "]", ":", "if", "config", "[", "'txt'", "]", "[", "item", "]", ".", "startswith", "(", "'grains.'", ")", ":", "grain", "=", "config", "[", "'txt'", "]", "[", "item", "]", "[", "7", ":", "]", "grain_index", "=", "None", "square_bracket", "=", "grain", ".", "find", "(", "'['", ")", "if", "(", "(", "square_bracket", "!=", "(", "-", "1", ")", ")", "and", "(", "grain", "[", "(", "-", "1", ")", "]", "==", "']'", ")", ")", ":", "grain_index", "=", "int", "(", "grain", "[", "(", "square_bracket", "+", "1", ")", ":", "(", "-", "1", ")", "]", ")", "grain", "=", "grain", "[", ":", "square_bracket", "]", "grain_value", "=", "__grains__", ".", "get", "(", "grain", ",", "''", ")", "if", "isinstance", "(", "grain_value", ",", "list", ")", ":", "if", "(", "grain_index", "is", "not", "None", ")", ":", "grain_value", "=", "grain_value", "[", "grain_index", "]", "else", ":", "grain_value", "=", "','", ".", "join", "(", "grain_value", ")", "txt", "[", "item", "]", "=", "grain_value", "if", "(", "LAST_GRAINS", "and", "(", "LAST_GRAINS", ".", "get", "(", "grain", ",", "''", ")", "!=", "__grains__", ".", "get", "(", "grain", ",", "''", ")", ")", ")", ":", "changes", "[", "str", "(", "(", "'txt.'", "+", "item", ")", ")", "]", "=", "txt", "[", "item", "]", "else", ":", "txt", "[", "item", "]", "=", "config", "[", "'txt'", "]", "[", "item", "]", "if", "(", "not", "LAST_GRAINS", ")", ":", "changes", "[", "str", "(", "(", "'txt.'", "+", "item", ")", ")", "]", "=", "txt", "[", "item", "]", "if", "changes", ":", "if", "(", "not", "LAST_GRAINS", ")", ":", "changes", "[", "'servicename'", "]", "=", "servicename", "changes", "[", "'servicetype'", "]", "=", "config", "[", "'servicetype'", "]", "changes", "[", "'port'", "]", "=", "config", "[", "'port'", "]", "GROUP", ".", "AddService", "(", "avahi", ".", "IF_UNSPEC", ",", "avahi", ".", "PROTO_UNSPEC", ",", "dbus", ".", "UInt32", "(", "0", ")", ",", "servicename", ",", "config", "[", "'servicetype'", "]", ",", "''", ",", "''", ",", "dbus", ".", "UInt16", "(", "config", "[", "'port'", "]", ")", ",", "avahi", ".", "dict_to_txt_array", "(", "txt", ")", ")", "GROUP", ".", "Commit", "(", ")", "elif", "config", ".", "get", "(", "'reset_on_change'", ",", "False", ")", ":", "GROUP", ".", "Reset", "(", ")", "reset_wait", "=", "config", ".", "get", "(", "'reset_wait'", ",", "0", ")", "if", "(", "reset_wait", ">", "0", ")", ":", "time", ".", "sleep", "(", "reset_wait", ")", "GROUP", ".", "AddService", "(", "avahi", ".", "IF_UNSPEC", ",", "avahi", ".", "PROTO_UNSPEC", ",", "dbus", ".", "UInt32", "(", "0", ")", ",", "servicename", ",", "config", "[", "'servicetype'", "]", ",", "''", ",", "''", ",", "dbus", ".", "UInt16", "(", "config", "[", "'port'", "]", ")", ",", "avahi", ".", "dict_to_txt_array", "(", "txt", ")", ")", "GROUP", ".", "Commit", "(", ")", "else", ":", "GROUP", ".", "UpdateServiceTxt", "(", "avahi", ".", "IF_UNSPEC", ",", "avahi", ".", "PROTO_UNSPEC", ",", "dbus", ".", "UInt32", "(", "0", ")", ",", "servicename", ",", "config", "[", "'servicetype'", "]", ",", "''", ",", "avahi", ".", "dict_to_txt_array", "(", "txt", ")", ")", "ret", ".", "append", "(", "{", "'tag'", ":", "'result'", ",", "'changes'", ":", "changes", "}", ")", "if", "config", ".", "get", "(", "'copy_grains'", ",", "False", ")", ":", "LAST_GRAINS", "=", "__grains__", ".", "copy", "(", ")", "else", ":", "LAST_GRAINS", "=", "__grains__", "return", "ret" ]
return status for requested information .
train
false
49,752
def after_categorize(f): f.after = True return f
[ "def", "after_categorize", "(", "f", ")", ":", "f", ".", "after", "=", "True", "return", "f" ]
a decorator to mark a function to be run before categorization has happened .
train
false
49,754
def parse_playlist(res): return _playlist_schema.validate(res.text)
[ "def", "parse_playlist", "(", "res", ")", ":", "return", "_playlist_schema", ".", "validate", "(", "res", ".", "text", ")" ]
attempts to parse a jwplayer playlist in a http response body .
train
false
49,755
def _is_env_per_bucket(): buckets = _get_buckets() if isinstance(buckets, dict): return True elif isinstance(buckets, list): return False else: raise ValueError('Incorrect s3.buckets type given in config')
[ "def", "_is_env_per_bucket", "(", ")", ":", "buckets", "=", "_get_buckets", "(", ")", "if", "isinstance", "(", "buckets", ",", "dict", ")", ":", "return", "True", "elif", "isinstance", "(", "buckets", ",", "list", ")", ":", "return", "False", "else", ":", "raise", "ValueError", "(", "'Incorrect s3.buckets type given in config'", ")" ]
return the configuration mode .
train
true
49,756
def get_all_page_context_from_doctypes(): routes = frappe.cache().get_value(u'website_generator_routes') if (not routes): routes = get_page_info_from_doctypes() frappe.cache().set_value(u'website_generator_routes', routes) return routes
[ "def", "get_all_page_context_from_doctypes", "(", ")", ":", "routes", "=", "frappe", ".", "cache", "(", ")", ".", "get_value", "(", "u'website_generator_routes'", ")", "if", "(", "not", "routes", ")", ":", "routes", "=", "get_page_info_from_doctypes", "(", ")", "frappe", ".", "cache", "(", ")", ".", "set_value", "(", "u'website_generator_routes'", ",", "routes", ")", "return", "routes" ]
get all doctype generated routes .
train
false
49,757
def calculate_keys(p, q, nbits): phi_n = ((p - 1) * (q - 1)) e = 65537 try: d = rsa.common.inverse(e, phi_n) except ValueError: raise ValueError(('e (%d) and phi_n (%d) are not relatively prime' % (e, phi_n))) if (((e * d) % phi_n) != 1): raise ValueError(('e (%d) and d (%d) are not mult. inv. modulo phi_n (%d)' % (e, d, phi_n))) return (e, d)
[ "def", "calculate_keys", "(", "p", ",", "q", ",", "nbits", ")", ":", "phi_n", "=", "(", "(", "p", "-", "1", ")", "*", "(", "q", "-", "1", ")", ")", "e", "=", "65537", "try", ":", "d", "=", "rsa", ".", "common", ".", "inverse", "(", "e", ",", "phi_n", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "(", "'e (%d) and phi_n (%d) are not relatively prime'", "%", "(", "e", ",", "phi_n", ")", ")", ")", "if", "(", "(", "(", "e", "*", "d", ")", "%", "phi_n", ")", "!=", "1", ")", ":", "raise", "ValueError", "(", "(", "'e (%d) and d (%d) are not mult. inv. modulo phi_n (%d)'", "%", "(", "e", ",", "d", ",", "phi_n", ")", ")", ")", "return", "(", "e", ",", "d", ")" ]
calculates an encryption and a decryption key for p and q .
train
false
49,758
def test_super_class(): class custDescr(object, ): def __get__(self, instance, owner): AreEqual(instance, None) return 'abc' class base(object, ): aProp = property((lambda self: 'foo')) aDescr = custDescr() class sub(base, ): def test1(cls): return super(sub, cls).aProp def test2(cls): return super(sub, cls).aDescr test1 = classmethod(test1) test2 = classmethod(test2) AreEqual(sub.test2(), 'abc') AreEqual(type(sub.test1()), property)
[ "def", "test_super_class", "(", ")", ":", "class", "custDescr", "(", "object", ",", ")", ":", "def", "__get__", "(", "self", ",", "instance", ",", "owner", ")", ":", "AreEqual", "(", "instance", ",", "None", ")", "return", "'abc'", "class", "base", "(", "object", ",", ")", ":", "aProp", "=", "property", "(", "(", "lambda", "self", ":", "'foo'", ")", ")", "aDescr", "=", "custDescr", "(", ")", "class", "sub", "(", "base", ",", ")", ":", "def", "test1", "(", "cls", ")", ":", "return", "super", "(", "sub", ",", "cls", ")", ".", "aProp", "def", "test2", "(", "cls", ")", ":", "return", "super", "(", "sub", ",", "cls", ")", ".", "aDescr", "test1", "=", "classmethod", "(", "test1", ")", "test2", "=", "classmethod", "(", "test2", ")", "AreEqual", "(", "sub", ".", "test2", "(", ")", ",", "'abc'", ")", "AreEqual", "(", "type", "(", "sub", ".", "test1", "(", ")", ")", ",", "property", ")" ]
verify super on a class passes none for the instance .
train
false
49,759
def add_host_keys(name, hostname): from fabtools.require.files import directory as _require_directory, file as _require_file ssh_dir = posixpath.join(home_directory(name), '.ssh') _require_directory(ssh_dir, mode='700', owner=name, use_sudo=True) known_hosts_filename = posixpath.join(ssh_dir, 'known_hosts') _require_file(known_hosts_filename, mode='644', owner=name, use_sudo=True) known_hosts = uncommented_lines(known_hosts_filename, use_sudo=True) with hide('running', 'stdout'): res = run(('ssh-keyscan -t rsa,dsa %s 2>/dev/null' % hostname)) for host_key in res.splitlines(): if (host_key not in known_hosts): sudo(('echo %s >>%s' % (quote(host_key), quote(known_hosts_filename))))
[ "def", "add_host_keys", "(", "name", ",", "hostname", ")", ":", "from", "fabtools", ".", "require", ".", "files", "import", "directory", "as", "_require_directory", ",", "file", "as", "_require_file", "ssh_dir", "=", "posixpath", ".", "join", "(", "home_directory", "(", "name", ")", ",", "'.ssh'", ")", "_require_directory", "(", "ssh_dir", ",", "mode", "=", "'700'", ",", "owner", "=", "name", ",", "use_sudo", "=", "True", ")", "known_hosts_filename", "=", "posixpath", ".", "join", "(", "ssh_dir", ",", "'known_hosts'", ")", "_require_file", "(", "known_hosts_filename", ",", "mode", "=", "'644'", ",", "owner", "=", "name", ",", "use_sudo", "=", "True", ")", "known_hosts", "=", "uncommented_lines", "(", "known_hosts_filename", ",", "use_sudo", "=", "True", ")", "with", "hide", "(", "'running'", ",", "'stdout'", ")", ":", "res", "=", "run", "(", "(", "'ssh-keyscan -t rsa,dsa %s 2>/dev/null'", "%", "hostname", ")", ")", "for", "host_key", "in", "res", ".", "splitlines", "(", ")", ":", "if", "(", "host_key", "not", "in", "known_hosts", ")", ":", "sudo", "(", "(", "'echo %s >>%s'", "%", "(", "quote", "(", "host_key", ")", ",", "quote", "(", "known_hosts_filename", ")", ")", ")", ")" ]
add all public keys of a host to the users ssh known hosts file .
train
false
49,760
def update_mtime(path): try: execute('touch', '-c', path, run_as_root=True) except processutils.ProcessExecutionError as exc: LOG.warning(_LW('Failed to update mtime on path %(path)s. Error: %(error)s'), {'path': path, 'error': exc})
[ "def", "update_mtime", "(", "path", ")", ":", "try", ":", "execute", "(", "'touch'", ",", "'-c'", ",", "path", ",", "run_as_root", "=", "True", ")", "except", "processutils", ".", "ProcessExecutionError", "as", "exc", ":", "LOG", ".", "warning", "(", "_LW", "(", "'Failed to update mtime on path %(path)s. Error: %(error)s'", ")", ",", "{", "'path'", ":", "path", ",", "'error'", ":", "exc", "}", ")" ]
touch a file without being the owner .
train
false
49,761
def chop(s): return ' '.join(s.split()[:(-1)])
[ "def", "chop", "(", "s", ")", ":", "return", "' '", ".", "join", "(", "s", ".", "split", "(", ")", "[", ":", "(", "-", "1", ")", "]", ")" ]
chop off the last bit of a file object repr .
train
false
49,762
def reset_db(): model.Session.close_all() model.repo.rebuild_db()
[ "def", "reset_db", "(", ")", ":", "model", ".", "Session", ".", "close_all", "(", ")", "model", ".", "repo", ".", "rebuild_db", "(", ")" ]
reset ckans database .
train
false
49,763
def _append_param_insert_pk(compiler, stmt, c, values, kw): if (((c.default is not None) and ((not c.default.is_sequence) or compiler.dialect.supports_sequences)) or ((c is stmt.table._autoincrement_column) and (compiler.dialect.supports_sequences or compiler.dialect.preexecute_autoincrement_sequences))): values.append((c, _create_insert_prefetch_bind_param(compiler, c))) elif ((c.default is None) and (c.server_default is None) and (not c.nullable)): _warn_pk_with_no_anticipated_value(c)
[ "def", "_append_param_insert_pk", "(", "compiler", ",", "stmt", ",", "c", ",", "values", ",", "kw", ")", ":", "if", "(", "(", "(", "c", ".", "default", "is", "not", "None", ")", "and", "(", "(", "not", "c", ".", "default", ".", "is_sequence", ")", "or", "compiler", ".", "dialect", ".", "supports_sequences", ")", ")", "or", "(", "(", "c", "is", "stmt", ".", "table", ".", "_autoincrement_column", ")", "and", "(", "compiler", ".", "dialect", ".", "supports_sequences", "or", "compiler", ".", "dialect", ".", "preexecute_autoincrement_sequences", ")", ")", ")", ":", "values", ".", "append", "(", "(", "c", ",", "_create_insert_prefetch_bind_param", "(", "compiler", ",", "c", ")", ")", ")", "elif", "(", "(", "c", ".", "default", "is", "None", ")", "and", "(", "c", ".", "server_default", "is", "None", ")", "and", "(", "not", "c", ".", "nullable", ")", ")", ":", "_warn_pk_with_no_anticipated_value", "(", "c", ")" ]
create a bound parameter in the insert statement to receive a prefetched default value .
train
false
49,765
def security_group_get_by_instance(context, instance_id): return IMPL.security_group_get_by_instance(context, instance_id)
[ "def", "security_group_get_by_instance", "(", "context", ",", "instance_id", ")", ":", "return", "IMPL", ".", "security_group_get_by_instance", "(", "context", ",", "instance_id", ")" ]
get security groups to which the instance is assigned .
train
false
49,766
def check_simple(ncfileobj): assert_equal(ncfileobj.history, 'Created for a test') time = ncfileobj.variables['time'] assert_equal(time.units, 'days since 2008-01-01') assert_equal(time.shape, (N_EG_ELS,)) assert_equal(time[(-1)], (N_EG_ELS - 1))
[ "def", "check_simple", "(", "ncfileobj", ")", ":", "assert_equal", "(", "ncfileobj", ".", "history", ",", "'Created for a test'", ")", "time", "=", "ncfileobj", ".", "variables", "[", "'time'", "]", "assert_equal", "(", "time", ".", "units", ",", "'days since 2008-01-01'", ")", "assert_equal", "(", "time", ".", "shape", ",", "(", "N_EG_ELS", ",", ")", ")", "assert_equal", "(", "time", "[", "(", "-", "1", ")", "]", ",", "(", "N_EG_ELS", "-", "1", ")", ")" ]
example fileobj tests .
train
false
49,767
def BasicTransactionHandler(application, factory): def basic_transaction(environ, start_response): conn = factory(environ) environ['paste.connection'] = conn should_commit = [500] def finalizer(exc_info=None): if exc_info: if isinstance(exc_info[1], HTTPException): should_commit.append(exc_info[1].code) if (should_commit.pop() < 400): conn.commit() else: try: conn.rollback() except: return conn.close() def basictrans_start_response(status, headers, exc_info=None): should_commit.append(int(status.split(' ')[0])) return start_response(status, headers, exc_info) return catch_errors(application, environ, basictrans_start_response, finalizer, finalizer) return basic_transaction
[ "def", "BasicTransactionHandler", "(", "application", ",", "factory", ")", ":", "def", "basic_transaction", "(", "environ", ",", "start_response", ")", ":", "conn", "=", "factory", "(", "environ", ")", "environ", "[", "'paste.connection'", "]", "=", "conn", "should_commit", "=", "[", "500", "]", "def", "finalizer", "(", "exc_info", "=", "None", ")", ":", "if", "exc_info", ":", "if", "isinstance", "(", "exc_info", "[", "1", "]", ",", "HTTPException", ")", ":", "should_commit", ".", "append", "(", "exc_info", "[", "1", "]", ".", "code", ")", "if", "(", "should_commit", ".", "pop", "(", ")", "<", "400", ")", ":", "conn", ".", "commit", "(", ")", "else", ":", "try", ":", "conn", ".", "rollback", "(", ")", "except", ":", "return", "conn", ".", "close", "(", ")", "def", "basictrans_start_response", "(", "status", ",", "headers", ",", "exc_info", "=", "None", ")", ":", "should_commit", ".", "append", "(", "int", "(", "status", ".", "split", "(", "' '", ")", "[", "0", "]", ")", ")", "return", "start_response", "(", "status", ",", "headers", ",", "exc_info", ")", "return", "catch_errors", "(", "application", ",", "environ", ",", "basictrans_start_response", ",", "finalizer", ",", "finalizer", ")", "return", "basic_transaction" ]
provides a simple mechanism for starting a transaction based on the factory; and for either committing or rolling back the transaction depending on the result .
train
false
49,768
def allretweet(): t = Twitter(auth=authen()) try: id = int(g['stuff'].split()[0]) except: printNicely(red("Sorry I can't understand.")) return tid = c['tweet_dict'][id] try: num = int(g['stuff'].split()[1]) except: num = c['RETWEETS_SHOW_NUM'] rt_ary = t.statuses.retweets(id=tid, count=num) if (not rt_ary): printNicely(magenta('This tweet has no retweet.')) return for tweet in reversed(rt_ary): draw(t=tweet) printNicely('')
[ "def", "allretweet", "(", ")", ":", "t", "=", "Twitter", "(", "auth", "=", "authen", "(", ")", ")", "try", ":", "id", "=", "int", "(", "g", "[", "'stuff'", "]", ".", "split", "(", ")", "[", "0", "]", ")", "except", ":", "printNicely", "(", "red", "(", "\"Sorry I can't understand.\"", ")", ")", "return", "tid", "=", "c", "[", "'tweet_dict'", "]", "[", "id", "]", "try", ":", "num", "=", "int", "(", "g", "[", "'stuff'", "]", ".", "split", "(", ")", "[", "1", "]", ")", "except", ":", "num", "=", "c", "[", "'RETWEETS_SHOW_NUM'", "]", "rt_ary", "=", "t", ".", "statuses", ".", "retweets", "(", "id", "=", "tid", ",", "count", "=", "num", ")", "if", "(", "not", "rt_ary", ")", ":", "printNicely", "(", "magenta", "(", "'This tweet has no retweet.'", ")", ")", "return", "for", "tweet", "in", "reversed", "(", "rt_ary", ")", ":", "draw", "(", "t", "=", "tweet", ")", "printNicely", "(", "''", ")" ]
list all retweet .
train
false
49,771
def test_uninstall_wheel(script, data): package = data.packages.join('simple.dist-0.1-py2.py3-none-any.whl') result = script.pip('install', package, '--no-index') dist_info_folder = (script.site_packages / 'simple.dist-0.1.dist-info') assert (dist_info_folder in result.files_created) result2 = script.pip('uninstall', 'simple.dist', '-y') assert_all_changes(result, result2, [])
[ "def", "test_uninstall_wheel", "(", "script", ",", "data", ")", ":", "package", "=", "data", ".", "packages", ".", "join", "(", "'simple.dist-0.1-py2.py3-none-any.whl'", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "package", ",", "'--no-index'", ")", "dist_info_folder", "=", "(", "script", ".", "site_packages", "/", "'simple.dist-0.1.dist-info'", ")", "assert", "(", "dist_info_folder", "in", "result", ".", "files_created", ")", "result2", "=", "script", ".", "pip", "(", "'uninstall'", ",", "'simple.dist'", ",", "'-y'", ")", "assert_all_changes", "(", "result", ",", "result2", ",", "[", "]", ")" ]
test uninstalling a wheel .
train
false
49,773
def test_linear(): xs = [1, 5, True, None, 'foo'] for x in xs: assert (x == activations.linear(x))
[ "def", "test_linear", "(", ")", ":", "xs", "=", "[", "1", ",", "5", ",", "True", ",", "None", ",", "'foo'", "]", "for", "x", "in", "xs", ":", "assert", "(", "x", "==", "activations", ".", "linear", "(", "x", ")", ")" ]
this function does no input validation .
train
false
49,774
def validate_dir_exists(root, value, default): p = sabnzbd.misc.real_path(root, value) if os.path.exists(p): return (None, value) else: return ((T('Folder "%s" does not exist') % p), None)
[ "def", "validate_dir_exists", "(", "root", ",", "value", ",", "default", ")", ":", "p", "=", "sabnzbd", ".", "misc", ".", "real_path", "(", "root", ",", "value", ")", "if", "os", ".", "path", ".", "exists", "(", "p", ")", ":", "return", "(", "None", ",", "value", ")", "else", ":", "return", "(", "(", "T", "(", "'Folder \"%s\" does not exist'", ")", "%", "p", ")", ",", "None", ")" ]
check if directory exists .
train
false
49,775
@domain_constructor(loss_target=0.398) def branin(): x = hp.uniform('x', (-5.0), 10.0) y = hp.uniform('y', 0.0, 15.0) pi = float(np.pi) loss = ((((((y - (old_div(5.1, (4 * (pi ** 2))) * (x ** 2))) + ((5 * x) / pi)) - 6) ** 2) + ((10 * (1 - old_div(1, (8 * pi)))) * scope.cos(x))) + 10) return {'loss': loss, 'loss_variance': 0, 'status': base.STATUS_OK}
[ "@", "domain_constructor", "(", "loss_target", "=", "0.398", ")", "def", "branin", "(", ")", ":", "x", "=", "hp", ".", "uniform", "(", "'x'", ",", "(", "-", "5.0", ")", ",", "10.0", ")", "y", "=", "hp", ".", "uniform", "(", "'y'", ",", "0.0", ",", "15.0", ")", "pi", "=", "float", "(", "np", ".", "pi", ")", "loss", "=", "(", "(", "(", "(", "(", "(", "y", "-", "(", "old_div", "(", "5.1", ",", "(", "4", "*", "(", "pi", "**", "2", ")", ")", ")", "*", "(", "x", "**", "2", ")", ")", ")", "+", "(", "(", "5", "*", "x", ")", "/", "pi", ")", ")", "-", "6", ")", "**", "2", ")", "+", "(", "(", "10", "*", "(", "1", "-", "old_div", "(", "1", ",", "(", "8", "*", "pi", ")", ")", ")", ")", "*", "scope", ".", "cos", "(", "x", ")", ")", ")", "+", "10", ")", "return", "{", "'loss'", ":", "loss", ",", "'loss_variance'", ":", "0", ",", "'status'", ":", "base", ".", "STATUS_OK", "}" ]
the branin .
train
false
49,776
def _load_connection_error(hostname, error): ret = {'code': None, 'content': 'Error: Unable to connect to the bigip device: {host}\n{error}'.format(host=hostname, error=error)} return ret
[ "def", "_load_connection_error", "(", "hostname", ",", "error", ")", ":", "ret", "=", "{", "'code'", ":", "None", ",", "'content'", ":", "'Error: Unable to connect to the bigip device: {host}\\n{error}'", ".", "format", "(", "host", "=", "hostname", ",", "error", "=", "error", ")", "}", "return", "ret" ]
format and return a connection error .
train
true
49,777
@utils.arg('id', metavar='<id>', help=_('ID of the agent-build.')) def do_agent_delete(cs, args): cs.agents.delete(args.id)
[ "@", "utils", ".", "arg", "(", "'id'", ",", "metavar", "=", "'<id>'", ",", "help", "=", "_", "(", "'ID of the agent-build.'", ")", ")", "def", "do_agent_delete", "(", "cs", ",", "args", ")", ":", "cs", ".", "agents", ".", "delete", "(", "args", ".", "id", ")" ]
delete existing agent build .
train
false
49,779
def search_object_by_tag(key=None, category=None): return ObjectDB.objects.get_by_tag(key=key, category=category)
[ "def", "search_object_by_tag", "(", "key", "=", "None", ",", "category", "=", "None", ")", ":", "return", "ObjectDB", ".", "objects", ".", "get_by_tag", "(", "key", "=", "key", ",", "category", "=", "category", ")" ]
find object based on tag or category .
train
false
49,780
def searchupwards(start, files=[], dirs=[]): start = os.path.abspath(start) parents = start.split(os.sep) exists = os.path.exists join = os.sep.join isdir = os.path.isdir while len(parents): candidate = (join(parents) + os.sep) allpresent = 1 for f in files: if (not exists(('%s%s' % (candidate, f)))): allpresent = 0 break if allpresent: for d in dirs: if (not isdir(('%s%s' % (candidate, d)))): allpresent = 0 break if allpresent: return candidate parents.pop((-1)) return None
[ "def", "searchupwards", "(", "start", ",", "files", "=", "[", "]", ",", "dirs", "=", "[", "]", ")", ":", "start", "=", "os", ".", "path", ".", "abspath", "(", "start", ")", "parents", "=", "start", ".", "split", "(", "os", ".", "sep", ")", "exists", "=", "os", ".", "path", ".", "exists", "join", "=", "os", ".", "sep", ".", "join", "isdir", "=", "os", ".", "path", ".", "isdir", "while", "len", "(", "parents", ")", ":", "candidate", "=", "(", "join", "(", "parents", ")", "+", "os", ".", "sep", ")", "allpresent", "=", "1", "for", "f", "in", "files", ":", "if", "(", "not", "exists", "(", "(", "'%s%s'", "%", "(", "candidate", ",", "f", ")", ")", ")", ")", ":", "allpresent", "=", "0", "break", "if", "allpresent", ":", "for", "d", "in", "dirs", ":", "if", "(", "not", "isdir", "(", "(", "'%s%s'", "%", "(", "candidate", ",", "d", ")", ")", ")", ")", ":", "allpresent", "=", "0", "break", "if", "allpresent", ":", "return", "candidate", "parents", ".", "pop", "(", "(", "-", "1", ")", ")", "return", "None" ]
walk upwards from start .
train
false
49,781
def dh_private_key(digit=10, seed=None): p = nextprime((2 ** digit)) g = primitive_root(p) randrange = _randrange(seed) a = randrange(2, p) return (p, g, a)
[ "def", "dh_private_key", "(", "digit", "=", "10", ",", "seed", "=", "None", ")", ":", "p", "=", "nextprime", "(", "(", "2", "**", "digit", ")", ")", "g", "=", "primitive_root", "(", "p", ")", "randrange", "=", "_randrange", "(", "seed", ")", "a", "=", "randrange", "(", "2", ",", "p", ")", "return", "(", "p", ",", "g", ",", "a", ")" ]
return three integer tuple as private key .
train
false
49,783
def cache_key_mangler(key): if six.PY2: key = key.encode('utf-8') return uuid.uuid5(CACHE_NAMESPACE, key).hex
[ "def", "cache_key_mangler", "(", "key", ")", ":", "if", "six", ".", "PY2", ":", "key", "=", "key", ".", "encode", "(", "'utf-8'", ")", "return", "uuid", ".", "uuid5", "(", "CACHE_NAMESPACE", ",", "key", ")", ".", "hex" ]
construct an opaque cache key .
train
false
49,785
def my_request_classifier(environ): request_method = REQUEST_METHOD(environ) if (request_method in _DAV_METHODS): return 'dav' useragent = USER_AGENT(environ) if useragent: for agent in _DAV_USERAGENTS: if (useragent.find(agent) != (-1)): return 'dav' if (request_method == 'POST'): if (CONTENT_TYPE(environ) == 'text/xml'): return 'xmlpost' elif (CONTENT_TYPE(environ) == 'application/soap+xml'): return 'soap' return 'browser'
[ "def", "my_request_classifier", "(", "environ", ")", ":", "request_method", "=", "REQUEST_METHOD", "(", "environ", ")", "if", "(", "request_method", "in", "_DAV_METHODS", ")", ":", "return", "'dav'", "useragent", "=", "USER_AGENT", "(", "environ", ")", "if", "useragent", ":", "for", "agent", "in", "_DAV_USERAGENTS", ":", "if", "(", "useragent", ".", "find", "(", "agent", ")", "!=", "(", "-", "1", ")", ")", ":", "return", "'dav'", "if", "(", "request_method", "==", "'POST'", ")", ":", "if", "(", "CONTENT_TYPE", "(", "environ", ")", "==", "'text/xml'", ")", ":", "return", "'xmlpost'", "elif", "(", "CONTENT_TYPE", "(", "environ", ")", "==", "'application/soap+xml'", ")", ":", "return", "'soap'", "return", "'browser'" ]
returns one of the classifiers dav .
train
true
49,786
def notBefore(cert_path): return _notAfterBefore(cert_path, OpenSSL.crypto.X509.get_notBefore)
[ "def", "notBefore", "(", "cert_path", ")", ":", "return", "_notAfterBefore", "(", "cert_path", ",", "OpenSSL", ".", "crypto", ".", "X509", ".", "get_notBefore", ")" ]
when does the cert at cert_path start being valid? .
train
false
49,787
def format_preserving_redirect(request, target, get_dict=None): my_get_dict = QueryDict('', mutable=True) if get_dict: my_get_dict.update(get_dict) if is_jframe_request(request): logging.info(('JFrame redirection' + target)) my_get_dict['format'] = 'embed' elif request.ajax: my_get_dict['format'] = 'json' param = my_get_dict.urlencode() if param: if ('?' not in target): param = ('?' + param) else: param = ('&' + param) return HttpResponseRedirect((target + param))
[ "def", "format_preserving_redirect", "(", "request", ",", "target", ",", "get_dict", "=", "None", ")", ":", "my_get_dict", "=", "QueryDict", "(", "''", ",", "mutable", "=", "True", ")", "if", "get_dict", ":", "my_get_dict", ".", "update", "(", "get_dict", ")", "if", "is_jframe_request", "(", "request", ")", ":", "logging", ".", "info", "(", "(", "'JFrame redirection'", "+", "target", ")", ")", "my_get_dict", "[", "'format'", "]", "=", "'embed'", "elif", "request", ".", "ajax", ":", "my_get_dict", "[", "'format'", "]", "=", "'json'", "param", "=", "my_get_dict", ".", "urlencode", "(", ")", "if", "param", ":", "if", "(", "'?'", "not", "in", "target", ")", ":", "param", "=", "(", "'?'", "+", "param", ")", "else", ":", "param", "=", "(", "'&'", "+", "param", ")", "return", "HttpResponseRedirect", "(", "(", "target", "+", "param", ")", ")" ]
if request represents an ajax or embeddable "format" .
train
false
49,788
def _asarray_2d_null_rows(x): x = np.asarray(x) if (x.ndim == 1): x = x[:, None] return np.any(isnull(x), axis=1)[:, None]
[ "def", "_asarray_2d_null_rows", "(", "x", ")", ":", "x", "=", "np", ".", "asarray", "(", "x", ")", "if", "(", "x", ".", "ndim", "==", "1", ")", ":", "x", "=", "x", "[", ":", ",", "None", "]", "return", "np", ".", "any", "(", "isnull", "(", "x", ")", ",", "axis", "=", "1", ")", "[", ":", ",", "None", "]" ]
makes sure input is an array and is 2d .
train
false
49,789
def mm(value): return dpi2px(value, 'mm')
[ "def", "mm", "(", "value", ")", ":", "return", "dpi2px", "(", "value", ",", "'mm'", ")" ]
convert from millimeters to pixels .
train
false
49,790
def CompareEntityPbByKey(a, b): return cmp(datastore_types.Key._FromPb(a.key()), datastore_types.Key._FromPb(b.key()))
[ "def", "CompareEntityPbByKey", "(", "a", ",", "b", ")", ":", "return", "cmp", "(", "datastore_types", ".", "Key", ".", "_FromPb", "(", "a", ".", "key", "(", ")", ")", ",", "datastore_types", ".", "Key", ".", "_FromPb", "(", "b", ".", "key", "(", ")", ")", ")" ]
compare two entity protobufs by key .
train
false
49,791
@task def unindex_documents(ids, index_pk): cls = WikiDocumentType es = cls.get_connection('indexing') index = Index.objects.get(pk=index_pk) cls.bulk_delete(ids, es=es, index=index.prefixed_name)
[ "@", "task", "def", "unindex_documents", "(", "ids", ",", "index_pk", ")", ":", "cls", "=", "WikiDocumentType", "es", "=", "cls", ".", "get_connection", "(", "'indexing'", ")", "index", "=", "Index", ".", "objects", ".", "get", "(", "pk", "=", "index_pk", ")", "cls", ".", "bulk_delete", "(", "ids", ",", "es", "=", "es", ",", "index", "=", "index", ".", "prefixed_name", ")" ]
delete a list of documents from the provided index .
train
false
49,792
def copy_proto_go_source(target, source, env): shutil.copy2(str(source[0]), str(target[0])) return None
[ "def", "copy_proto_go_source", "(", "target", ",", "source", ",", "env", ")", ":", "shutil", ".", "copy2", "(", "str", "(", "source", "[", "0", "]", ")", ",", "str", "(", "target", "[", "0", "]", ")", ")", "return", "None" ]
copy go source file generated by protobuf into go standard directory .
train
false