id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
48,041
def request_repair(): path = os.path.join(cfg.admin_dir.get_path(), REPAIR_REQUEST) try: f = open(path, 'w') f.write('\n') f.close() except: pass
[ "def", "request_repair", "(", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "cfg", ".", "admin_dir", ".", "get_path", "(", ")", ",", "REPAIR_REQUEST", ")", "try", ":", "f", "=", "open", "(", "path", ",", "'w'", ")", "f", ".", "write", "(", "'\\n'", ")", "f", ".", "close", "(", ")", "except", ":", "pass" ]
request a full repair on next restart .
train
false
48,044
def send_new_session_organizer(email, event_name, link): message_settings = MessageSettings.query.filter_by(action=NEW_SESSION).first() if ((not message_settings) or (message_settings.mail_status == 1)): send_email(to=email, action=NEW_SESSION, subject=MAILS[NEW_SESSION]['subject'].format(event_name=event_name), html=MAILS[NEW_SESSION]['message'].format(email=str(email), event_name=str(event_name), link=link))
[ "def", "send_new_session_organizer", "(", "email", ",", "event_name", ",", "link", ")", ":", "message_settings", "=", "MessageSettings", ".", "query", ".", "filter_by", "(", "action", "=", "NEW_SESSION", ")", ".", "first", "(", ")", "if", "(", "(", "not", "message_settings", ")", "or", "(", "message_settings", ".", "mail_status", "==", "1", ")", ")", ":", "send_email", "(", "to", "=", "email", ",", "action", "=", "NEW_SESSION", ",", "subject", "=", "MAILS", "[", "NEW_SESSION", "]", "[", "'subject'", "]", ".", "format", "(", "event_name", "=", "event_name", ")", ",", "html", "=", "MAILS", "[", "NEW_SESSION", "]", "[", "'message'", "]", ".", "format", "(", "email", "=", "str", "(", "email", ")", ",", "event_name", "=", "str", "(", "event_name", ")", ",", "link", "=", "link", ")", ")" ]
send email after new sessions proposal .
train
false
48,045
def test_magic_magic(): ip = get_ipython() with capture_output() as captured: ip.magic('magic') stdout = captured.stdout nt.assert_in('%magic', stdout) nt.assert_in('IPython', stdout) nt.assert_in('Available', stdout)
[ "def", "test_magic_magic", "(", ")", ":", "ip", "=", "get_ipython", "(", ")", "with", "capture_output", "(", ")", "as", "captured", ":", "ip", ".", "magic", "(", "'magic'", ")", "stdout", "=", "captured", ".", "stdout", "nt", ".", "assert_in", "(", "'%magic'", ",", "stdout", ")", "nt", ".", "assert_in", "(", "'IPython'", ",", "stdout", ")", "nt", ".", "assert_in", "(", "'Available'", ",", "stdout", ")" ]
test %magic .
train
false
48,046
def func_name(x): typ = type(x) if str(typ).startswith("<type '"): typ = str(typ).split("'")[1].split("'")[0] return getattr(getattr(x, 'func', x), '__name__', typ)
[ "def", "func_name", "(", "x", ")", ":", "typ", "=", "type", "(", "x", ")", "if", "str", "(", "typ", ")", ".", "startswith", "(", "\"<type '\"", ")", ":", "typ", "=", "str", "(", "typ", ")", ".", "split", "(", "\"'\"", ")", "[", "1", "]", ".", "split", "(", "\"'\"", ")", "[", "0", "]", "return", "getattr", "(", "getattr", "(", "x", ",", "'func'", ",", "x", ")", ",", "'__name__'", ",", "typ", ")" ]
return function name of x else the type(x) .
train
false
48,047
def linear_assignment(X): indices = _hungarian(X).tolist() indices.sort() indices = np.array(indices, dtype=int) indices.shape = ((-1), 2) return indices
[ "def", "linear_assignment", "(", "X", ")", ":", "indices", "=", "_hungarian", "(", "X", ")", ".", "tolist", "(", ")", "indices", ".", "sort", "(", ")", "indices", "=", "np", ".", "array", "(", "indices", ",", "dtype", "=", "int", ")", "indices", ".", "shape", "=", "(", "(", "-", "1", ")", ",", "2", ")", "return", "indices" ]
solve the linear assignment problem using the hungarian algorithm .
train
false
48,048
def list_installed(): cmd = 'Get-WindowsFeature -ErrorAction SilentlyContinue -WarningAction SilentlyContinue | Select DisplayName,Name,Installed' features = _pshell_json(cmd) ret = {} for entry in features: if entry['Installed']: ret[entry['Name']] = entry['DisplayName'] return ret
[ "def", "list_installed", "(", ")", ":", "cmd", "=", "'Get-WindowsFeature -ErrorAction SilentlyContinue -WarningAction SilentlyContinue | Select DisplayName,Name,Installed'", "features", "=", "_pshell_json", "(", "cmd", ")", "ret", "=", "{", "}", "for", "entry", "in", "features", ":", "if", "entry", "[", "'Installed'", "]", ":", "ret", "[", "entry", "[", "'Name'", "]", "]", "=", "entry", "[", "'DisplayName'", "]", "return", "ret" ]
list installed features .
train
true
48,049
def force_leave_swarm(client): while True: try: return client.swarm.leave(force=True) except docker.errors.APIError as e: if (e.explanation == 'context deadline exceeded'): continue else: return
[ "def", "force_leave_swarm", "(", "client", ")", ":", "while", "True", ":", "try", ":", "return", "client", ".", "swarm", ".", "leave", "(", "force", "=", "True", ")", "except", "docker", ".", "errors", ".", "APIError", "as", "e", ":", "if", "(", "e", ".", "explanation", "==", "'context deadline exceeded'", ")", ":", "continue", "else", ":", "return" ]
actually force leave a swarm .
train
false
48,050
def split_double_braces(input): double_open_brace_re = re.compile('(.*?[\\[\\{\\(,])(\\s*)([\\[\\{\\(])') double_close_brace_re = re.compile('(.*?[\\]\\}\\)],?)(\\s*)([\\]\\}\\)])') masked_input = mask_quotes(input) masked_input = mask_comments(masked_input) (output, mask_output) = do_split(input, masked_input, double_open_brace_re) (output, mask_output) = do_split(output, mask_output, double_close_brace_re) return output
[ "def", "split_double_braces", "(", "input", ")", ":", "double_open_brace_re", "=", "re", ".", "compile", "(", "'(.*?[\\\\[\\\\{\\\\(,])(\\\\s*)([\\\\[\\\\{\\\\(])'", ")", "double_close_brace_re", "=", "re", ".", "compile", "(", "'(.*?[\\\\]\\\\}\\\\)],?)(\\\\s*)([\\\\]\\\\}\\\\)])'", ")", "masked_input", "=", "mask_quotes", "(", "input", ")", "masked_input", "=", "mask_comments", "(", "masked_input", ")", "(", "output", ",", "mask_output", ")", "=", "do_split", "(", "input", ",", "masked_input", ",", "double_open_brace_re", ")", "(", "output", ",", "mask_output", ")", "=", "do_split", "(", "output", ",", "mask_output", ",", "double_close_brace_re", ")", "return", "output" ]
masks out the quotes and comments .
train
false
48,051
def is_ssl(pkt): if (set(pkt) == set(['00', '00', '00', '08', '04', 'd2', '16', '2f'])): return True return False
[ "def", "is_ssl", "(", "pkt", ")", ":", "if", "(", "set", "(", "pkt", ")", "==", "set", "(", "[", "'00'", ",", "'00'", ",", "'00'", ",", "'08'", ",", "'04'", ",", "'d2'", ",", "'16'", ",", "'2f'", "]", ")", ")", ":", "return", "True", "return", "False" ]
check if the packet is an ssl request .
train
false
48,052
def get_registered_from(registration): if registration.registered_from: return registration.registered_from_id else: first_log_id = db['node'].find_one({'_id': registration._id})['logs'][0] log = NodeLog.load(first_log_id) return (log.params.get('node') or log.params.get('project'))
[ "def", "get_registered_from", "(", "registration", ")", ":", "if", "registration", ".", "registered_from", ":", "return", "registration", ".", "registered_from_id", "else", ":", "first_log_id", "=", "db", "[", "'node'", "]", ".", "find_one", "(", "{", "'_id'", ":", "registration", ".", "_id", "}", ")", "[", "'logs'", "]", "[", "0", "]", "log", "=", "NodeLog", ".", "load", "(", "first_log_id", ")", "return", "(", "log", ".", "params", ".", "get", "(", "'node'", ")", "or", "log", ".", "params", ".", "get", "(", "'project'", ")", ")" ]
gets node registration was registered from .
train
false
48,053
def parse_cookies(http_cookie): if ('"' in http_cookie): cookie = Cookie.SimpleCookie() try: cookie.load(http_cookie) except Cookie.CookieError: cookie = Cookie.SimpleCookie() for attr_value in http_cookie.split(';'): try: cookie.load(attr_value) except Cookie.CookieError: pass cookies = dict(((k, urllib.unquote(v.value)) for (k, v) in cookie.iteritems())) else: cookies = {} for key_value in http_cookie.split(';'): key_value = key_value.split('=', 1) if (len(key_value) == 2): (key, value) = key_value cookies[key.strip()] = urllib.unquote(value.strip()) return cookies
[ "def", "parse_cookies", "(", "http_cookie", ")", ":", "if", "(", "'\"'", "in", "http_cookie", ")", ":", "cookie", "=", "Cookie", ".", "SimpleCookie", "(", ")", "try", ":", "cookie", ".", "load", "(", "http_cookie", ")", "except", "Cookie", ".", "CookieError", ":", "cookie", "=", "Cookie", ".", "SimpleCookie", "(", ")", "for", "attr_value", "in", "http_cookie", ".", "split", "(", "';'", ")", ":", "try", ":", "cookie", ".", "load", "(", "attr_value", ")", "except", "Cookie", ".", "CookieError", ":", "pass", "cookies", "=", "dict", "(", "(", "(", "k", ",", "urllib", ".", "unquote", "(", "v", ".", "value", ")", ")", "for", "(", "k", ",", "v", ")", "in", "cookie", ".", "iteritems", "(", ")", ")", ")", "else", ":", "cookies", "=", "{", "}", "for", "key_value", "in", "http_cookie", ".", "split", "(", "';'", ")", ":", "key_value", "=", "key_value", ".", "split", "(", "'='", ",", "1", ")", "if", "(", "len", "(", "key_value", ")", "==", "2", ")", ":", "(", "key", ",", "value", ")", "=", "key_value", "cookies", "[", "key", ".", "strip", "(", ")", "]", "=", "urllib", ".", "unquote", "(", "value", ".", "strip", "(", ")", ")", "return", "cookies" ]
parse a http_cookie header and return dict of cookie names and decoded values .
train
false
48,054
def fanout_cast_to_server(conf, context, server_params, topic, msg): return rpc_amqp.fanout_cast_to_server(conf, context, server_params, topic, msg, rpc_amqp.get_connection_pool(conf, Connection))
[ "def", "fanout_cast_to_server", "(", "conf", ",", "context", ",", "server_params", ",", "topic", ",", "msg", ")", ":", "return", "rpc_amqp", ".", "fanout_cast_to_server", "(", "conf", ",", "context", ",", "server_params", ",", "topic", ",", "msg", ",", "rpc_amqp", ".", "get_connection_pool", "(", "conf", ",", "Connection", ")", ")" ]
sends a message on a fanout exchange to a specific server .
train
false
48,055
def task_install_docker_plugin(distribution=None, package_source=PackageSource()): return task_package_install('clusterhq-flocker-docker-plugin', distribution, package_source)
[ "def", "task_install_docker_plugin", "(", "distribution", "=", "None", ",", "package_source", "=", "PackageSource", "(", ")", ")", ":", "return", "task_package_install", "(", "'clusterhq-flocker-docker-plugin'", ",", "distribution", ",", "package_source", ")" ]
install flocker docker plugin on a distribution .
train
false
48,056
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
48,058
def default_navigator_url(): from metadata.metadata_sites import get_navigator_server_url return (get_navigator_server_url() + '/api')
[ "def", "default_navigator_url", "(", ")", ":", "from", "metadata", ".", "metadata_sites", "import", "get_navigator_server_url", "return", "(", "get_navigator_server_url", "(", ")", "+", "'/api'", ")" ]
get from usual main hue config directory .
train
false
48,061
def deepmap(func, *seqs): if isinstance(seqs[0], (list, Iterator)): return [deepmap(func, *items) for items in zip(*seqs)] else: return func(*seqs)
[ "def", "deepmap", "(", "func", ",", "*", "seqs", ")", ":", "if", "isinstance", "(", "seqs", "[", "0", "]", ",", "(", "list", ",", "Iterator", ")", ")", ":", "return", "[", "deepmap", "(", "func", ",", "*", "items", ")", "for", "items", "in", "zip", "(", "*", "seqs", ")", "]", "else", ":", "return", "func", "(", "*", "seqs", ")" ]
apply function inside nested lists .
train
false
48,063
def trg_create(uid, res_type, res_id, cr): return WorkflowService.new(cr, uid, res_type, res_id).create()
[ "def", "trg_create", "(", "uid", ",", "res_type", ",", "res_id", ",", "cr", ")", ":", "return", "WorkflowService", ".", "new", "(", "cr", ",", "uid", ",", "res_type", ",", "res_id", ")", ".", "create", "(", ")" ]
create a new workflow instance .
train
false
48,064
def format_commit_lines(web_url, repo, base, tip): if web_url: rev_base_url = (web_url.rstrip('/') + '/rev/') commit_summaries = [] for rev in range(base, tip): rev_node = repo.changelog.node(rev) rev_ctx = repo.changectx(rev_node) one_liner = rev_ctx.description().split('\n')[0] if web_url: summary_url = (rev_base_url + str(rev_ctx)) summary = '* [{summary}]({url})'.format(summary=one_liner, url=summary_url) else: summary = '* {summary}'.format(summary=one_liner) commit_summaries.append(summary) return '\n'.join((summary for summary in commit_summaries))
[ "def", "format_commit_lines", "(", "web_url", ",", "repo", ",", "base", ",", "tip", ")", ":", "if", "web_url", ":", "rev_base_url", "=", "(", "web_url", ".", "rstrip", "(", "'/'", ")", "+", "'/rev/'", ")", "commit_summaries", "=", "[", "]", "for", "rev", "in", "range", "(", "base", ",", "tip", ")", ":", "rev_node", "=", "repo", ".", "changelog", ".", "node", "(", "rev", ")", "rev_ctx", "=", "repo", ".", "changectx", "(", "rev_node", ")", "one_liner", "=", "rev_ctx", ".", "description", "(", ")", ".", "split", "(", "'\\n'", ")", "[", "0", "]", "if", "web_url", ":", "summary_url", "=", "(", "rev_base_url", "+", "str", "(", "rev_ctx", ")", ")", "summary", "=", "'* [{summary}]({url})'", ".", "format", "(", "summary", "=", "one_liner", ",", "url", "=", "summary_url", ")", "else", ":", "summary", "=", "'* {summary}'", ".", "format", "(", "summary", "=", "one_liner", ")", "commit_summaries", ".", "append", "(", "summary", ")", "return", "'\\n'", ".", "join", "(", "(", "summary", "for", "summary", "in", "commit_summaries", ")", ")" ]
format the per-commit information for the message .
train
false
48,067
def data_callback_factory(variable): def callback(data): variable.write(data) return return callback
[ "def", "data_callback_factory", "(", "variable", ")", ":", "def", "callback", "(", "data", ")", ":", "variable", ".", "write", "(", "data", ")", "return", "return", "callback" ]
returns a callback suitable for use by the ftp library .
train
false
48,068
def ipart(x): return np.modf(x)[1]
[ "def", "ipart", "(", "x", ")", ":", "return", "np", ".", "modf", "(", "x", ")", "[", "1", "]" ]
return integer part of given number .
train
false
48,069
@yield_once def _iter_alternatives(pattern): (start_pos, end_pos) = _boundary_of_alternatives_indices(pattern) if (None in (start_pos, end_pos)): (yield pattern) else: for choice in _iter_choices(pattern[start_pos:end_pos]): variant = ((pattern[:(start_pos - 1)] + choice) + pattern[(end_pos + 1):]) for glob_pattern in _iter_alternatives(variant): (yield glob_pattern)
[ "@", "yield_once", "def", "_iter_alternatives", "(", "pattern", ")", ":", "(", "start_pos", ",", "end_pos", ")", "=", "_boundary_of_alternatives_indices", "(", "pattern", ")", "if", "(", "None", "in", "(", "start_pos", ",", "end_pos", ")", ")", ":", "(", "yield", "pattern", ")", "else", ":", "for", "choice", "in", "_iter_choices", "(", "pattern", "[", "start_pos", ":", "end_pos", "]", ")", ":", "variant", "=", "(", "(", "pattern", "[", ":", "(", "start_pos", "-", "1", ")", "]", "+", "choice", ")", "+", "pattern", "[", "(", "end_pos", "+", "1", ")", ":", "]", ")", "for", "glob_pattern", "in", "_iter_alternatives", "(", "variant", ")", ":", "(", "yield", "glob_pattern", ")" ]
iterates through all glob patterns that can be obtaines by combination of all choices for each alternative .
train
false
48,070
def _fake_run_horcmgr(*args): return vsp_horcm._HORCM_RUNNING
[ "def", "_fake_run_horcmgr", "(", "*", "args", ")", ":", "return", "vsp_horcm", ".", "_HORCM_RUNNING" ]
assume cci is running .
train
false
48,072
def _submit_create_and_load(request, create_hql, table_name, path, load_data, database): on_success_params = QueryDict('', mutable=True) app_name = get_app_name(request) if (load_data == 'IMPORT'): on_success_params['table'] = table_name on_success_params['path'] = path on_success_url = ((reverse((app_name + ':load_after_create'), kwargs={'database': database}) + '?') + on_success_params.urlencode()) else: on_success_url = reverse('metastore:describe_table', kwargs={'database': database, 'table': table_name}) query = hql_query(create_hql, database=database) return execute_directly(request, query, on_success_url=on_success_url, on_success_params=on_success_params)
[ "def", "_submit_create_and_load", "(", "request", ",", "create_hql", ",", "table_name", ",", "path", ",", "load_data", ",", "database", ")", ":", "on_success_params", "=", "QueryDict", "(", "''", ",", "mutable", "=", "True", ")", "app_name", "=", "get_app_name", "(", "request", ")", "if", "(", "load_data", "==", "'IMPORT'", ")", ":", "on_success_params", "[", "'table'", "]", "=", "table_name", "on_success_params", "[", "'path'", "]", "=", "path", "on_success_url", "=", "(", "(", "reverse", "(", "(", "app_name", "+", "':load_after_create'", ")", ",", "kwargs", "=", "{", "'database'", ":", "database", "}", ")", "+", "'?'", ")", "+", "on_success_params", ".", "urlencode", "(", ")", ")", "else", ":", "on_success_url", "=", "reverse", "(", "'metastore:describe_table'", ",", "kwargs", "=", "{", "'database'", ":", "database", ",", "'table'", ":", "table_name", "}", ")", "query", "=", "hql_query", "(", "create_hql", ",", "database", "=", "database", ")", "return", "execute_directly", "(", "request", ",", "query", ",", "on_success_url", "=", "on_success_url", ",", "on_success_params", "=", "on_success_params", ")" ]
submit the table creation .
train
false
48,074
def _check_recursive(paths, reporter): num_warnings = 0 for path in api.iterSourceCode(paths): if path.endswith('.py'): num_warnings += api.checkPath(path, reporter) return num_warnings
[ "def", "_check_recursive", "(", "paths", ",", "reporter", ")", ":", "num_warnings", "=", "0", "for", "path", "in", "api", ".", "iterSourceCode", "(", "paths", ")", ":", "if", "path", ".", "endswith", "(", "'.py'", ")", ":", "num_warnings", "+=", "api", ".", "checkPath", "(", "path", ",", "reporter", ")", "return", "num_warnings" ]
the builtin recursive checker tries to check .
train
false
48,075
def _lint_js_files(node_path, jscs_path, config_jscsrc, files_to_lint, stdout, result): start_time = time.time() num_files_with_errors = 0 num_js_files = len(files_to_lint) if (not files_to_lint): result.put('') print 'There are no JavaScript files to lint.' return print 'Total js files: ', num_js_files jscs_cmd_args = [node_path, jscs_path, config_jscsrc] for (_, filename) in enumerate(files_to_lint): print 'Linting: ', filename proc_args = (jscs_cmd_args + [filename]) proc = subprocess.Popen(proc_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (linter_stdout, linter_stderr) = proc.communicate() if linter_stderr: print 'LINTER FAILED' print linter_stderr sys.exit(1) if linter_stdout: num_files_with_errors += 1 stdout.put(linter_stdout) if num_files_with_errors: result.put(('%s %s JavaScript files' % (_MESSAGE_TYPE_FAILED, num_files_with_errors))) else: result.put(('%s %s JavaScript files linted (%.1f secs)' % (_MESSAGE_TYPE_SUCCESS, num_js_files, (time.time() - start_time)))) print 'Js linting finished.'
[ "def", "_lint_js_files", "(", "node_path", ",", "jscs_path", ",", "config_jscsrc", ",", "files_to_lint", ",", "stdout", ",", "result", ")", ":", "start_time", "=", "time", ".", "time", "(", ")", "num_files_with_errors", "=", "0", "num_js_files", "=", "len", "(", "files_to_lint", ")", "if", "(", "not", "files_to_lint", ")", ":", "result", ".", "put", "(", "''", ")", "print", "'There are no JavaScript files to lint.'", "return", "print", "'Total js files: '", ",", "num_js_files", "jscs_cmd_args", "=", "[", "node_path", ",", "jscs_path", ",", "config_jscsrc", "]", "for", "(", "_", ",", "filename", ")", "in", "enumerate", "(", "files_to_lint", ")", ":", "print", "'Linting: '", ",", "filename", "proc_args", "=", "(", "jscs_cmd_args", "+", "[", "filename", "]", ")", "proc", "=", "subprocess", ".", "Popen", "(", "proc_args", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "(", "linter_stdout", ",", "linter_stderr", ")", "=", "proc", ".", "communicate", "(", ")", "if", "linter_stderr", ":", "print", "'LINTER FAILED'", "print", "linter_stderr", "sys", ".", "exit", "(", "1", ")", "if", "linter_stdout", ":", "num_files_with_errors", "+=", "1", "stdout", ".", "put", "(", "linter_stdout", ")", "if", "num_files_with_errors", ":", "result", ".", "put", "(", "(", "'%s %s JavaScript files'", "%", "(", "_MESSAGE_TYPE_FAILED", ",", "num_files_with_errors", ")", ")", ")", "else", ":", "result", ".", "put", "(", "(", "'%s %s JavaScript files linted (%.1f secs)'", "%", "(", "_MESSAGE_TYPE_SUCCESS", ",", "num_js_files", ",", "(", "time", ".", "time", "(", ")", "-", "start_time", ")", ")", ")", ")", "print", "'Js linting finished.'" ]
prints a list of lint errors in the given list of javascript files .
train
false
48,076
def _KNV0_loop(ker_pole, transfer_matrix, poles, B, maxiter, rtol): stop = False nb_try = 0 while ((nb_try < maxiter) and (not stop)): det_transfer_matrixb = np.abs(np.linalg.det(transfer_matrix)) for j in range(B.shape[0]): _KNV0(B, ker_pole, transfer_matrix, j, poles) det_transfer_matrix = np.max((np.sqrt(np.spacing(1)), np.abs(np.linalg.det(transfer_matrix)))) cur_rtol = np.abs(((det_transfer_matrix - det_transfer_matrixb) / det_transfer_matrix)) if ((cur_rtol < rtol) and (det_transfer_matrix > np.sqrt(np.spacing(1)))): stop = True nb_try += 1 return (stop, cur_rtol, nb_try)
[ "def", "_KNV0_loop", "(", "ker_pole", ",", "transfer_matrix", ",", "poles", ",", "B", ",", "maxiter", ",", "rtol", ")", ":", "stop", "=", "False", "nb_try", "=", "0", "while", "(", "(", "nb_try", "<", "maxiter", ")", "and", "(", "not", "stop", ")", ")", ":", "det_transfer_matrixb", "=", "np", ".", "abs", "(", "np", ".", "linalg", ".", "det", "(", "transfer_matrix", ")", ")", "for", "j", "in", "range", "(", "B", ".", "shape", "[", "0", "]", ")", ":", "_KNV0", "(", "B", ",", "ker_pole", ",", "transfer_matrix", ",", "j", ",", "poles", ")", "det_transfer_matrix", "=", "np", ".", "max", "(", "(", "np", ".", "sqrt", "(", "np", ".", "spacing", "(", "1", ")", ")", ",", "np", ".", "abs", "(", "np", ".", "linalg", ".", "det", "(", "transfer_matrix", ")", ")", ")", ")", "cur_rtol", "=", "np", ".", "abs", "(", "(", "(", "det_transfer_matrix", "-", "det_transfer_matrixb", ")", "/", "det_transfer_matrix", ")", ")", "if", "(", "(", "cur_rtol", "<", "rtol", ")", "and", "(", "det_transfer_matrix", ">", "np", ".", "sqrt", "(", "np", ".", "spacing", "(", "1", ")", ")", ")", ")", ":", "stop", "=", "True", "nb_try", "+=", "1", "return", "(", "stop", ",", "cur_rtol", ",", "nb_try", ")" ]
loop over all poles one by one and apply knv method 0 algorithm .
train
false
48,077
def flow_stats_to_list(flowstats): stats = [] for stat in flowstats: s = {} stats.append(s) for (k, v) in fields_of(stat).iteritems(): if (k == 'length'): continue if k.startswith('pad'): continue if (k == 'match'): v = match_to_dict(v) elif (k == 'actions'): v = [action_to_dict(a) for a in v] s[k] = v return stats
[ "def", "flow_stats_to_list", "(", "flowstats", ")", ":", "stats", "=", "[", "]", "for", "stat", "in", "flowstats", ":", "s", "=", "{", "}", "stats", ".", "append", "(", "s", ")", "for", "(", "k", ",", "v", ")", "in", "fields_of", "(", "stat", ")", ".", "iteritems", "(", ")", ":", "if", "(", "k", "==", "'length'", ")", ":", "continue", "if", "k", ".", "startswith", "(", "'pad'", ")", ":", "continue", "if", "(", "k", "==", "'match'", ")", ":", "v", "=", "match_to_dict", "(", "v", ")", "elif", "(", "k", "==", "'actions'", ")", ":", "v", "=", "[", "action_to_dict", "(", "a", ")", "for", "a", "in", "v", "]", "s", "[", "k", "]", "=", "v", "return", "stats" ]
takes a list of flow stats .
train
false
48,079
def permutation_test_score(estimator, X, y, groups=None, cv=None, n_permutations=100, n_jobs=1, random_state=0, verbose=0, scoring=None): (X, y, groups) = indexable(X, y, groups) cv = check_cv(cv, y, classifier=is_classifier(estimator)) scorer = check_scoring(estimator, scoring=scoring) random_state = check_random_state(random_state) score = _permutation_test_score(clone(estimator), X, y, groups, cv, scorer) permutation_scores = Parallel(n_jobs=n_jobs, verbose=verbose)((delayed(_permutation_test_score)(clone(estimator), X, _shuffle(y, groups, random_state), groups, cv, scorer) for _ in range(n_permutations))) permutation_scores = np.array(permutation_scores) pvalue = ((np.sum((permutation_scores >= score)) + 1.0) / (n_permutations + 1)) return (score, permutation_scores, pvalue)
[ "def", "permutation_test_score", "(", "estimator", ",", "X", ",", "y", ",", "groups", "=", "None", ",", "cv", "=", "None", ",", "n_permutations", "=", "100", ",", "n_jobs", "=", "1", ",", "random_state", "=", "0", ",", "verbose", "=", "0", ",", "scoring", "=", "None", ")", ":", "(", "X", ",", "y", ",", "groups", ")", "=", "indexable", "(", "X", ",", "y", ",", "groups", ")", "cv", "=", "check_cv", "(", "cv", ",", "y", ",", "classifier", "=", "is_classifier", "(", "estimator", ")", ")", "scorer", "=", "check_scoring", "(", "estimator", ",", "scoring", "=", "scoring", ")", "random_state", "=", "check_random_state", "(", "random_state", ")", "score", "=", "_permutation_test_score", "(", "clone", "(", "estimator", ")", ",", "X", ",", "y", ",", "groups", ",", "cv", ",", "scorer", ")", "permutation_scores", "=", "Parallel", "(", "n_jobs", "=", "n_jobs", ",", "verbose", "=", "verbose", ")", "(", "(", "delayed", "(", "_permutation_test_score", ")", "(", "clone", "(", "estimator", ")", ",", "X", ",", "_shuffle", "(", "y", ",", "groups", ",", "random_state", ")", ",", "groups", ",", "cv", ",", "scorer", ")", "for", "_", "in", "range", "(", "n_permutations", ")", ")", ")", "permutation_scores", "=", "np", ".", "array", "(", "permutation_scores", ")", "pvalue", "=", "(", "(", "np", ".", "sum", "(", "(", "permutation_scores", ">=", "score", ")", ")", "+", "1.0", ")", "/", "(", "n_permutations", "+", "1", ")", ")", "return", "(", "score", ",", "permutation_scores", ",", "pvalue", ")" ]
evaluate the significance of a cross-validated score with permutations read more in the :ref:user guide <cross_validation> .
train
true
48,080
def arg_of_sigmoid(Y_hat): assert hasattr(Y_hat, 'owner') owner = Y_hat.owner assert (owner is not None) op = owner.op if isinstance(op, Print): assert (len(owner.inputs) == 1) (Y_hat,) = owner.inputs owner = Y_hat.owner op = owner.op success = False if isinstance(op, T.Elemwise): if isinstance(op.scalar_op, T.nnet.sigm.ScalarSigmoid): success = True if (not success): raise TypeError(((('Expected Y_hat to be the output of a sigmoid, but it appears to be the output of ' + str(op)) + ' of type ') + str(type(op)))) (z,) = owner.inputs assert (z.ndim == 2) return z
[ "def", "arg_of_sigmoid", "(", "Y_hat", ")", ":", "assert", "hasattr", "(", "Y_hat", ",", "'owner'", ")", "owner", "=", "Y_hat", ".", "owner", "assert", "(", "owner", "is", "not", "None", ")", "op", "=", "owner", ".", "op", "if", "isinstance", "(", "op", ",", "Print", ")", ":", "assert", "(", "len", "(", "owner", ".", "inputs", ")", "==", "1", ")", "(", "Y_hat", ",", ")", "=", "owner", ".", "inputs", "owner", "=", "Y_hat", ".", "owner", "op", "=", "owner", ".", "op", "success", "=", "False", "if", "isinstance", "(", "op", ",", "T", ".", "Elemwise", ")", ":", "if", "isinstance", "(", "op", ".", "scalar_op", ",", "T", ".", "nnet", ".", "sigm", ".", "ScalarSigmoid", ")", ":", "success", "=", "True", "if", "(", "not", "success", ")", ":", "raise", "TypeError", "(", "(", "(", "(", "'Expected Y_hat to be the output of a sigmoid, but it appears to be the output of '", "+", "str", "(", "op", ")", ")", "+", "' of type '", ")", "+", "str", "(", "type", "(", "op", ")", ")", ")", ")", "(", "z", ",", ")", "=", "owner", ".", "inputs", "assert", "(", "z", ".", "ndim", "==", "2", ")", "return", "z" ]
given the output of a call to theano .
train
false
48,081
def cleaningTrack(track, minTrackLength=3): nFrames = track.size cleanTrack = np.copy(track) trackBegs = (np.nonzero(((track[:(nFrames - 1)] <= 0) & (track[1:] > 0)))[0] + 1) if (track[0] > 0): trackBegs = np.insert(trackBegs, 0, 0) trackEnds = (np.nonzero(((track[:(nFrames - 1)] > 0) & (track[1:] <= 0)))[0] + 1) if (track[(nFrames - 1)] > 0): trackEnds = np.append(trackEnds, (nFrames - 1)) trackLengths = ((1 + trackEnds) - trackBegs) for (i, j) in zip(trackBegs, trackLengths): if (j <= minTrackLength): cleanTrack[i:(i + j)] = 0 return cleanTrack
[ "def", "cleaningTrack", "(", "track", ",", "minTrackLength", "=", "3", ")", ":", "nFrames", "=", "track", ".", "size", "cleanTrack", "=", "np", ".", "copy", "(", "track", ")", "trackBegs", "=", "(", "np", ".", "nonzero", "(", "(", "(", "track", "[", ":", "(", "nFrames", "-", "1", ")", "]", "<=", "0", ")", "&", "(", "track", "[", "1", ":", "]", ">", "0", ")", ")", ")", "[", "0", "]", "+", "1", ")", "if", "(", "track", "[", "0", "]", ">", "0", ")", ":", "trackBegs", "=", "np", ".", "insert", "(", "trackBegs", ",", "0", ",", "0", ")", "trackEnds", "=", "(", "np", ".", "nonzero", "(", "(", "(", "track", "[", ":", "(", "nFrames", "-", "1", ")", "]", ">", "0", ")", "&", "(", "track", "[", "1", ":", "]", "<=", "0", ")", ")", ")", "[", "0", "]", "+", "1", ")", "if", "(", "track", "[", "(", "nFrames", "-", "1", ")", "]", ">", "0", ")", ":", "trackEnds", "=", "np", ".", "append", "(", "trackEnds", ",", "(", "nFrames", "-", "1", ")", ")", "trackLengths", "=", "(", "(", "1", "+", "trackEnds", ")", "-", "trackBegs", ")", "for", "(", "i", ",", "j", ")", "in", "zip", "(", "trackBegs", ",", "trackLengths", ")", ":", "if", "(", "j", "<=", "minTrackLength", ")", ":", "cleanTrack", "[", "i", ":", "(", "i", "+", "j", ")", "]", "=", "0", "return", "cleanTrack" ]
delete fragments of one single track smaller than mintracklength track: array of values; mintracklength: minimum duration of tracks in number of frames returns cleantrack: array of clean values .
train
false
48,082
@asyncio.coroutine def async_scan_devices_mock(scanner): return []
[ "@", "asyncio", ".", "coroutine", "def", "async_scan_devices_mock", "(", "scanner", ")", ":", "return", "[", "]" ]
mock async_scan_devices .
train
false
48,083
def view_student_survey(user, survey_name, course=None, redirect_url=None, is_required=False, skip_redirect_url=None): redirect_url = (redirect_url if redirect_url else reverse('dashboard')) dashboard_redirect_url = reverse('dashboard') skip_redirect_url = (skip_redirect_url if skip_redirect_url else dashboard_redirect_url) survey = SurveyForm.get(survey_name, throw_if_not_found=False) if (not survey): return HttpResponseRedirect(redirect_url) existing_answers = survey.get_answers(user=user).get(user.id, {}) platform_name = configuration_helpers.get_value('platform_name', settings.PLATFORM_NAME) context = {'existing_data_json': json.dumps(existing_answers), 'postback_url': reverse('submit_answers', args=[survey_name]), 'redirect_url': redirect_url, 'skip_redirect_url': skip_redirect_url, 'dashboard_redirect_url': dashboard_redirect_url, 'survey_form': survey.form, 'is_required': is_required, 'mail_to_link': configuration_helpers.get_value('email_from_address', settings.CONTACT_EMAIL), 'platform_name': platform_name, 'course': course} return render_to_response('survey/survey.html', context)
[ "def", "view_student_survey", "(", "user", ",", "survey_name", ",", "course", "=", "None", ",", "redirect_url", "=", "None", ",", "is_required", "=", "False", ",", "skip_redirect_url", "=", "None", ")", ":", "redirect_url", "=", "(", "redirect_url", "if", "redirect_url", "else", "reverse", "(", "'dashboard'", ")", ")", "dashboard_redirect_url", "=", "reverse", "(", "'dashboard'", ")", "skip_redirect_url", "=", "(", "skip_redirect_url", "if", "skip_redirect_url", "else", "dashboard_redirect_url", ")", "survey", "=", "SurveyForm", ".", "get", "(", "survey_name", ",", "throw_if_not_found", "=", "False", ")", "if", "(", "not", "survey", ")", ":", "return", "HttpResponseRedirect", "(", "redirect_url", ")", "existing_answers", "=", "survey", ".", "get_answers", "(", "user", "=", "user", ")", ".", "get", "(", "user", ".", "id", ",", "{", "}", ")", "platform_name", "=", "configuration_helpers", ".", "get_value", "(", "'platform_name'", ",", "settings", ".", "PLATFORM_NAME", ")", "context", "=", "{", "'existing_data_json'", ":", "json", ".", "dumps", "(", "existing_answers", ")", ",", "'postback_url'", ":", "reverse", "(", "'submit_answers'", ",", "args", "=", "[", "survey_name", "]", ")", ",", "'redirect_url'", ":", "redirect_url", ",", "'skip_redirect_url'", ":", "skip_redirect_url", ",", "'dashboard_redirect_url'", ":", "dashboard_redirect_url", ",", "'survey_form'", ":", "survey", ".", "form", ",", "'is_required'", ":", "is_required", ",", "'mail_to_link'", ":", "configuration_helpers", ".", "get_value", "(", "'email_from_address'", ",", "settings", ".", "CONTACT_EMAIL", ")", ",", "'platform_name'", ":", "platform_name", ",", "'course'", ":", "course", "}", "return", "render_to_response", "(", "'survey/survey.html'", ",", "context", ")" ]
shared utility method to render a survey form note: this method is shared between the survey and courseware djangoapps .
train
false
48,084
def response_headers(headers=None, debug=False): if debug: cherrypy.log(('Setting response headers: %s' % repr(headers)), 'TOOLS.RESPONSE_HEADERS') for (name, value) in (headers or []): cherrypy.serving.response.headers[name] = value
[ "def", "response_headers", "(", "headers", "=", "None", ",", "debug", "=", "False", ")", ":", "if", "debug", ":", "cherrypy", ".", "log", "(", "(", "'Setting response headers: %s'", "%", "repr", "(", "headers", ")", ")", ",", "'TOOLS.RESPONSE_HEADERS'", ")", "for", "(", "name", ",", "value", ")", "in", "(", "headers", "or", "[", "]", ")", ":", "cherrypy", ".", "serving", ".", "response", ".", "headers", "[", "name", "]", "=", "value" ]
set headers on the response .
train
false
48,085
def test_multi_upload(): group = worker.WalTransferGroup(FakeWalUploader()) segments = list(prepare_multi_upload_segments()) for seg in segments: group.start(seg) group.join() for seg in segments: assert success(seg)
[ "def", "test_multi_upload", "(", ")", ":", "group", "=", "worker", ".", "WalTransferGroup", "(", "FakeWalUploader", "(", ")", ")", "segments", "=", "list", "(", "prepare_multi_upload_segments", "(", ")", ")", "for", "seg", "in", "segments", ":", "group", ".", "start", "(", "seg", ")", "group", ".", "join", "(", ")", "for", "seg", "in", "segments", ":", "assert", "success", "(", "seg", ")" ]
model a case with upload concurrency .
train
false
48,086
def store_file_content(fileName, content, addExtension=True, newFile=False): if (fileName == u''): raise Exception() ext = os.path.splitext(fileName)[(-1)][1:] if ((ext == u'') and addExtension): fileName += u'.py' if (newFile and file_exists(fileName)): raise NinjaFileExistsException(fileName) try: flags = (QtCore.QIODevice.WriteOnly | QtCore.QIODevice.Truncate) f = QtCore.QFile(fileName) if settings.use_platform_specific_eol(): flags |= QtCore.QIODevice.Text if (not f.open(flags)): raise NinjaIOException(f.errorString()) stream = QtCore.QTextStream(f) encoding = get_file_encoding(content) if encoding: stream.setCodec(encoding) encoded_stream = stream.codec().fromUnicode(content) f.write(encoded_stream) f.flush() f.close() except: raise return os.path.abspath(fileName)
[ "def", "store_file_content", "(", "fileName", ",", "content", ",", "addExtension", "=", "True", ",", "newFile", "=", "False", ")", ":", "if", "(", "fileName", "==", "u''", ")", ":", "raise", "Exception", "(", ")", "ext", "=", "os", ".", "path", ".", "splitext", "(", "fileName", ")", "[", "(", "-", "1", ")", "]", "[", "1", ":", "]", "if", "(", "(", "ext", "==", "u''", ")", "and", "addExtension", ")", ":", "fileName", "+=", "u'.py'", "if", "(", "newFile", "and", "file_exists", "(", "fileName", ")", ")", ":", "raise", "NinjaFileExistsException", "(", "fileName", ")", "try", ":", "flags", "=", "(", "QtCore", ".", "QIODevice", ".", "WriteOnly", "|", "QtCore", ".", "QIODevice", ".", "Truncate", ")", "f", "=", "QtCore", ".", "QFile", "(", "fileName", ")", "if", "settings", ".", "use_platform_specific_eol", "(", ")", ":", "flags", "|=", "QtCore", ".", "QIODevice", ".", "Text", "if", "(", "not", "f", ".", "open", "(", "flags", ")", ")", ":", "raise", "NinjaIOException", "(", "f", ".", "errorString", "(", ")", ")", "stream", "=", "QtCore", ".", "QTextStream", "(", "f", ")", "encoding", "=", "get_file_encoding", "(", "content", ")", "if", "encoding", ":", "stream", ".", "setCodec", "(", "encoding", ")", "encoded_stream", "=", "stream", ".", "codec", "(", ")", ".", "fromUnicode", "(", "content", ")", "f", ".", "write", "(", "encoded_stream", ")", "f", ".", "flush", "(", ")", "f", ".", "close", "(", ")", "except", ":", "raise", "return", "os", ".", "path", ".", "abspath", "(", "fileName", ")" ]
save content on disk with the given file name .
train
false
48,087
def is_skip_dir(skip_dirs, directory): for skip_dir in skip_dirs: skip_dir_regex = re.compile('(.*/)*{}(/.*)*'.format(re.escape(skip_dir))) if (skip_dir_regex.match(directory) is not None): return True return False
[ "def", "is_skip_dir", "(", "skip_dirs", ",", "directory", ")", ":", "for", "skip_dir", "in", "skip_dirs", ":", "skip_dir_regex", "=", "re", ".", "compile", "(", "'(.*/)*{}(/.*)*'", ".", "format", "(", "re", ".", "escape", "(", "skip_dir", ")", ")", ")", "if", "(", "skip_dir_regex", ".", "match", "(", "directory", ")", "is", "not", "None", ")", ":", "return", "True", "return", "False" ]
determines whether a directory should be skipped or linted .
train
false
48,088
@task(name='geonode.tasks.deletion.delete_map', queue='cleanup', expires=300) def delete_map(object_id): try: map_obj = Map.objects.get(id=object_id) except Map.DoesNotExist: return map_obj.layer_set.all().delete() map_obj.delete()
[ "@", "task", "(", "name", "=", "'geonode.tasks.deletion.delete_map'", ",", "queue", "=", "'cleanup'", ",", "expires", "=", "300", ")", "def", "delete_map", "(", "object_id", ")", ":", "try", ":", "map_obj", "=", "Map", ".", "objects", ".", "get", "(", "id", "=", "object_id", ")", "except", "Map", ".", "DoesNotExist", ":", "return", "map_obj", ".", "layer_set", ".", "all", "(", ")", ".", "delete", "(", ")", "map_obj", ".", "delete", "(", ")" ]
deletes a map and the associated map layers .
train
false
48,089
def revoke_role_from_user(role_db, user_db): role_assignment_db = UserRoleAssignment.get(user=user_db.name, role=role_db.name) result = UserRoleAssignment.delete(role_assignment_db) return result
[ "def", "revoke_role_from_user", "(", "role_db", ",", "user_db", ")", ":", "role_assignment_db", "=", "UserRoleAssignment", ".", "get", "(", "user", "=", "user_db", ".", "name", ",", "role", "=", "role_db", ".", "name", ")", "result", "=", "UserRoleAssignment", ".", "delete", "(", "role_assignment_db", ")", "return", "result" ]
revoke role from a user .
train
false
48,090
def diff_commits(parent, a, b): dlg = FileDiffDialog(parent, a=a, b=b) dlg.show() dlg.raise_() return (dlg.exec_() == QtWidgets.QDialog.Accepted)
[ "def", "diff_commits", "(", "parent", ",", "a", ",", "b", ")", ":", "dlg", "=", "FileDiffDialog", "(", "parent", ",", "a", "=", "a", ",", "b", "=", "b", ")", "dlg", ".", "show", "(", ")", "dlg", ".", "raise_", "(", ")", "return", "(", "dlg", ".", "exec_", "(", ")", "==", "QtWidgets", ".", "QDialog", ".", "Accepted", ")" ]
show a dialog for diffing two commits .
train
false
48,091
def _is_batch_all(batch, predicate): assert (not isinstance(batch, list)) if ((batch is None) or (isinstance(batch, tuple) and (len(batch) == 0))): return True if isinstance(batch, tuple): subbatch_results = tuple((_is_batch_all(b, predicate) for b in batch)) result = all(subbatch_results) assert (result == any(subbatch_results)), 'composite batch had a mixture of numeric and symbolic subbatches. This should never happen.' return result else: return predicate(batch)
[ "def", "_is_batch_all", "(", "batch", ",", "predicate", ")", ":", "assert", "(", "not", "isinstance", "(", "batch", ",", "list", ")", ")", "if", "(", "(", "batch", "is", "None", ")", "or", "(", "isinstance", "(", "batch", ",", "tuple", ")", "and", "(", "len", "(", "batch", ")", "==", "0", ")", ")", ")", ":", "return", "True", "if", "isinstance", "(", "batch", ",", "tuple", ")", ":", "subbatch_results", "=", "tuple", "(", "(", "_is_batch_all", "(", "b", ",", "predicate", ")", "for", "b", "in", "batch", ")", ")", "result", "=", "all", "(", "subbatch_results", ")", "assert", "(", "result", "==", "any", "(", "subbatch_results", ")", ")", ",", "'composite batch had a mixture of numeric and symbolic subbatches. This should never happen.'", "return", "result", "else", ":", "return", "predicate", "(", "batch", ")" ]
implementation of is_symbolic_batch() and is_numeric_batch() .
train
false
48,092
def _add_query_parameter(url, name, value): if (value is None): return url else: parsed = list(urlparse.urlparse(url)) q = parse_qsl(parsed[4]) q.append((name, value)) parsed[4] = urllib.urlencode(q) return urlparse.urlunparse(parsed)
[ "def", "_add_query_parameter", "(", "url", ",", "name", ",", "value", ")", ":", "if", "(", "value", "is", "None", ")", ":", "return", "url", "else", ":", "parsed", "=", "list", "(", "urlparse", ".", "urlparse", "(", "url", ")", ")", "q", "=", "parse_qsl", "(", "parsed", "[", "4", "]", ")", "q", ".", "append", "(", "(", "name", ",", "value", ")", ")", "parsed", "[", "4", "]", "=", "urllib", ".", "urlencode", "(", "q", ")", "return", "urlparse", ".", "urlunparse", "(", "parsed", ")" ]
adds a query parameter to a url args: url: string .
train
false
48,093
def split_cmdline(cmdline): (path, cmd) = os.path.split(cmdline[0]) arguments = ' '.join(cmdline[1:]).replace('\n', ' ') if (LINUX and any(((x in cmdline[0]) for x in ('chrome', 'chromium')))): try: (exe, arguments) = cmdline[0].split(' ', 1) (path, cmd) = os.path.split(exe) except ValueError: arguments = None return (path, cmd, arguments)
[ "def", "split_cmdline", "(", "cmdline", ")", ":", "(", "path", ",", "cmd", ")", "=", "os", ".", "path", ".", "split", "(", "cmdline", "[", "0", "]", ")", "arguments", "=", "' '", ".", "join", "(", "cmdline", "[", "1", ":", "]", ")", ".", "replace", "(", "'\\n'", ",", "' '", ")", "if", "(", "LINUX", "and", "any", "(", "(", "(", "x", "in", "cmdline", "[", "0", "]", ")", "for", "x", "in", "(", "'chrome'", ",", "'chromium'", ")", ")", ")", ")", ":", "try", ":", "(", "exe", ",", "arguments", ")", "=", "cmdline", "[", "0", "]", ".", "split", "(", "' '", ",", "1", ")", "(", "path", ",", "cmd", ")", "=", "os", ".", "path", ".", "split", "(", "exe", ")", "except", "ValueError", ":", "arguments", "=", "None", "return", "(", "path", ",", "cmd", ",", "arguments", ")" ]
return path .
train
false
48,094
def pager_print(expr, **settings): from pydoc import pager from locale import getpreferredencoding if ('num_columns' not in settings): settings['num_columns'] = 500000 pager(pretty(expr, **settings).encode(getpreferredencoding()))
[ "def", "pager_print", "(", "expr", ",", "**", "settings", ")", ":", "from", "pydoc", "import", "pager", "from", "locale", "import", "getpreferredencoding", "if", "(", "'num_columns'", "not", "in", "settings", ")", ":", "settings", "[", "'num_columns'", "]", "=", "500000", "pager", "(", "pretty", "(", "expr", ",", "**", "settings", ")", ".", "encode", "(", "getpreferredencoding", "(", ")", ")", ")" ]
prints expr using the pager .
train
false
48,096
def cert_get_cn(cert): return cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME)[0].value.strip()
[ "def", "cert_get_cn", "(", "cert", ")", ":", "return", "cert", ".", "subject", ".", "get_attributes_for_oid", "(", "x509", ".", "OID_COMMON_NAME", ")", "[", "0", "]", ".", "value", ".", "strip", "(", ")" ]
attempts to get a sane common name from a given certificate .
train
false
48,097
def build_company_name(name_dict, _emptyString=u''): name = name_dict.get('name') if (not name): return _emptyString country = name_dict.get('country') if (country is not None): name += (' %s' % country) return name
[ "def", "build_company_name", "(", "name_dict", ",", "_emptyString", "=", "u''", ")", ":", "name", "=", "name_dict", ".", "get", "(", "'name'", ")", "if", "(", "not", "name", ")", ":", "return", "_emptyString", "country", "=", "name_dict", ".", "get", "(", "'country'", ")", "if", "(", "country", "is", "not", "None", ")", ":", "name", "+=", "(", "' %s'", "%", "country", ")", "return", "name" ]
given a dictionary that represents a "long" imdb company name .
train
false
48,098
def expected_bar_values_2d(dates, asset_info, colname): if (colname == 'volume'): dtype = uint32 missing = 0 else: dtype = float64 missing = float('nan') assets = asset_info.index data = full((len(dates), len(assets)), missing, dtype=dtype) for (j, asset) in enumerate(assets): start = asset_start(asset_info, asset) end = asset_end(asset_info, asset) for (i, date) in enumerate(dates): if (not (start <= date <= end)): continue data[(i, j)] = expected_bar_value(asset, date, colname) return data
[ "def", "expected_bar_values_2d", "(", "dates", ",", "asset_info", ",", "colname", ")", ":", "if", "(", "colname", "==", "'volume'", ")", ":", "dtype", "=", "uint32", "missing", "=", "0", "else", ":", "dtype", "=", "float64", "missing", "=", "float", "(", "'nan'", ")", "assets", "=", "asset_info", ".", "index", "data", "=", "full", "(", "(", "len", "(", "dates", ")", ",", "len", "(", "assets", ")", ")", ",", "missing", ",", "dtype", "=", "dtype", ")", "for", "(", "j", ",", "asset", ")", "in", "enumerate", "(", "assets", ")", ":", "start", "=", "asset_start", "(", "asset_info", ",", "asset", ")", "end", "=", "asset_end", "(", "asset_info", ",", "asset", ")", "for", "(", "i", ",", "date", ")", "in", "enumerate", "(", "dates", ")", ":", "if", "(", "not", "(", "start", "<=", "date", "<=", "end", ")", ")", ":", "continue", "data", "[", "(", "i", ",", "j", ")", "]", "=", "expected_bar_value", "(", "asset", ",", "date", ",", "colname", ")", "return", "data" ]
return an 2d array containing cls .
train
true
48,099
@csrf_exempt def xqueue_callback(request, course_id, userid, mod_id, dispatch): data = request.POST.copy() for key in ['xqueue_header', 'xqueue_body']: if (key not in data): raise Http404 header = json.loads(data['xqueue_header']) if ((not isinstance(header, dict)) or ('lms_key' not in header)): raise Http404 course_key = CourseKey.from_string(course_id) with modulestore().bulk_operations(course_key): course = modulestore().get_course(course_key, depth=0) instance = load_single_xblock(request, userid, course_id, mod_id, course=course) data.update({'queuekey': header['lms_key']}) try: instance.handle_ajax(dispatch, data) instance.save() except: log.exception('error processing ajax call') raise return HttpResponse('')
[ "@", "csrf_exempt", "def", "xqueue_callback", "(", "request", ",", "course_id", ",", "userid", ",", "mod_id", ",", "dispatch", ")", ":", "data", "=", "request", ".", "POST", ".", "copy", "(", ")", "for", "key", "in", "[", "'xqueue_header'", ",", "'xqueue_body'", "]", ":", "if", "(", "key", "not", "in", "data", ")", ":", "raise", "Http404", "header", "=", "json", ".", "loads", "(", "data", "[", "'xqueue_header'", "]", ")", "if", "(", "(", "not", "isinstance", "(", "header", ",", "dict", ")", ")", "or", "(", "'lms_key'", "not", "in", "header", ")", ")", ":", "raise", "Http404", "course_key", "=", "CourseKey", ".", "from_string", "(", "course_id", ")", "with", "modulestore", "(", ")", ".", "bulk_operations", "(", "course_key", ")", ":", "course", "=", "modulestore", "(", ")", ".", "get_course", "(", "course_key", ",", "depth", "=", "0", ")", "instance", "=", "load_single_xblock", "(", "request", ",", "userid", ",", "course_id", ",", "mod_id", ",", "course", "=", "course", ")", "data", ".", "update", "(", "{", "'queuekey'", ":", "header", "[", "'lms_key'", "]", "}", ")", "try", ":", "instance", ".", "handle_ajax", "(", "dispatch", ",", "data", ")", "instance", ".", "save", "(", ")", "except", ":", "log", ".", "exception", "(", "'error processing ajax call'", ")", "raise", "return", "HttpResponse", "(", "''", ")" ]
entry point for graded results from the queueing system .
train
false
48,100
def GetModuleForProgID(progid): try: iid = pywintypes.IID(progid) except pywintypes.com_error: return None return GetModuleForCLSID(iid)
[ "def", "GetModuleForProgID", "(", "progid", ")", ":", "try", ":", "iid", "=", "pywintypes", ".", "IID", "(", "progid", ")", "except", "pywintypes", ".", "com_error", ":", "return", "None", "return", "GetModuleForCLSID", "(", "iid", ")" ]
get a python module for a program id given a program id .
train
false
48,101
def tee_output(): request = cherrypy.serving.request if ('no-store' in request.headers.values('Cache-Control')): return def tee(body): 'Tee response.body into a list.' if (('no-cache' in response.headers.values('Pragma')) or ('no-store' in response.headers.values('Cache-Control'))): for chunk in body: (yield chunk) return output = [] for chunk in body: output.append(chunk) (yield chunk) body = ntob('').join(output) cherrypy._cache.put((response.status, (response.headers or {}), body, response.time), len(body)) response = cherrypy.serving.response response.body = tee(response.body)
[ "def", "tee_output", "(", ")", ":", "request", "=", "cherrypy", ".", "serving", ".", "request", "if", "(", "'no-store'", "in", "request", ".", "headers", ".", "values", "(", "'Cache-Control'", ")", ")", ":", "return", "def", "tee", "(", "body", ")", ":", "if", "(", "(", "'no-cache'", "in", "response", ".", "headers", ".", "values", "(", "'Pragma'", ")", ")", "or", "(", "'no-store'", "in", "response", ".", "headers", ".", "values", "(", "'Cache-Control'", ")", ")", ")", ":", "for", "chunk", "in", "body", ":", "(", "yield", "chunk", ")", "return", "output", "=", "[", "]", "for", "chunk", "in", "body", ":", "output", ".", "append", "(", "chunk", ")", "(", "yield", "chunk", ")", "body", "=", "ntob", "(", "''", ")", ".", "join", "(", "output", ")", "cherrypy", ".", "_cache", ".", "put", "(", "(", "response", ".", "status", ",", "(", "response", ".", "headers", "or", "{", "}", ")", ",", "body", ",", "response", ".", "time", ")", ",", "len", "(", "body", ")", ")", "response", "=", "cherrypy", ".", "serving", ".", "response", "response", ".", "body", "=", "tee", "(", "response", ".", "body", ")" ]
tee response output to cache storage .
train
false
48,103
def ensure_image_dict_v2_compliant(image): if ('is_public' in image): if ('visibility' in image): msg = _("Specifying both 'visibility' and 'is_public' is not permiitted.") raise exception.Invalid(msg) else: image['visibility'] = ('public' if image.pop('is_public') else 'shared') return image
[ "def", "ensure_image_dict_v2_compliant", "(", "image", ")", ":", "if", "(", "'is_public'", "in", "image", ")", ":", "if", "(", "'visibility'", "in", "image", ")", ":", "msg", "=", "_", "(", "\"Specifying both 'visibility' and 'is_public' is not permiitted.\"", ")", "raise", "exception", ".", "Invalid", "(", "msg", ")", "else", ":", "image", "[", "'visibility'", "]", "=", "(", "'public'", "if", "image", ".", "pop", "(", "'is_public'", ")", "else", "'shared'", ")", "return", "image" ]
accepts an image dictionary that contains a v1-style is_public member and returns the equivalent v2-style image dictionary .
train
false
48,104
def create_pipe(): (r, w) = os.pipe() if HAS_FNCTL: fcntl.fcntl(r, fcntl.F_SETFL, os.O_NONBLOCK) fcntl.fcntl(w, fcntl.F_SETFL, os.O_NONBLOCK) _set_fd_cloexec(r) _set_fd_cloexec(w) return (r, w)
[ "def", "create_pipe", "(", ")", ":", "(", "r", ",", "w", ")", "=", "os", ".", "pipe", "(", ")", "if", "HAS_FNCTL", ":", "fcntl", ".", "fcntl", "(", "r", ",", "fcntl", ".", "F_SETFL", ",", "os", ".", "O_NONBLOCK", ")", "fcntl", ".", "fcntl", "(", "w", ",", "fcntl", ".", "F_SETFL", ",", "os", ".", "O_NONBLOCK", ")", "_set_fd_cloexec", "(", "r", ")", "_set_fd_cloexec", "(", "w", ")", "return", "(", "r", ",", "w", ")" ]
create a non-blocking read/write pipe .
train
false
48,105
def network_get_all_by_host(context, host): return IMPL.network_get_all_by_host(context, host)
[ "def", "network_get_all_by_host", "(", "context", ",", "host", ")", ":", "return", "IMPL", ".", "network_get_all_by_host", "(", "context", ",", "host", ")" ]
all networks for which the given host is the network host .
train
false
48,106
def get_add_vswitch_port_group_spec(client_factory, vswitch_name, port_group_name, vlan_id): vswitch_port_group_spec = client_factory.create('ns0:HostPortGroupSpec') vswitch_port_group_spec.name = port_group_name vswitch_port_group_spec.vswitchName = vswitch_name vswitch_port_group_spec.vlanId = int(vlan_id) policy = client_factory.create('ns0:HostNetworkPolicy') nicteaming = client_factory.create('ns0:HostNicTeamingPolicy') nicteaming.notifySwitches = True policy.nicTeaming = nicteaming vswitch_port_group_spec.policy = policy return vswitch_port_group_spec
[ "def", "get_add_vswitch_port_group_spec", "(", "client_factory", ",", "vswitch_name", ",", "port_group_name", ",", "vlan_id", ")", ":", "vswitch_port_group_spec", "=", "client_factory", ".", "create", "(", "'ns0:HostPortGroupSpec'", ")", "vswitch_port_group_spec", ".", "name", "=", "port_group_name", "vswitch_port_group_spec", ".", "vswitchName", "=", "vswitch_name", "vswitch_port_group_spec", ".", "vlanId", "=", "int", "(", "vlan_id", ")", "policy", "=", "client_factory", ".", "create", "(", "'ns0:HostNetworkPolicy'", ")", "nicteaming", "=", "client_factory", ".", "create", "(", "'ns0:HostNicTeamingPolicy'", ")", "nicteaming", ".", "notifySwitches", "=", "True", "policy", ".", "nicTeaming", "=", "nicteaming", "vswitch_port_group_spec", ".", "policy", "=", "policy", "return", "vswitch_port_group_spec" ]
builds the virtual switch port group add spec .
train
false
48,107
def matrix_transform(coords, matrix): return ProjectiveTransform(matrix)(coords)
[ "def", "matrix_transform", "(", "coords", ",", "matrix", ")", ":", "return", "ProjectiveTransform", "(", "matrix", ")", "(", "coords", ")" ]
apply 2d matrix transform .
train
false
48,108
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialise module .
train
false
48,109
def ajax_request(func): @wraps(func) def wrapper(request, *args, **kwargs): response = func(request, *args, **kwargs) if isinstance(response, dict): return JsonResponse(response) else: return response return wrapper
[ "def", "ajax_request", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "response", "=", "func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "if", "isinstance", "(", "response", ",", "dict", ")", ":", "return", "JsonResponse", "(", "response", ")", "else", ":", "return", "response", "return", "wrapper" ]
if view returned serializable dict .
train
false
48,110
def JsonApiDocument(): document = {'data': None, 'jsonapi': {'version': JSONAPI_VERSION}, 'links': {}, 'meta': {}, 'included': []} return document
[ "def", "JsonApiDocument", "(", ")", ":", "document", "=", "{", "'data'", ":", "None", ",", "'jsonapi'", ":", "{", "'version'", ":", "JSONAPI_VERSION", "}", ",", "'links'", ":", "{", "}", ",", "'meta'", ":", "{", "}", ",", "'included'", ":", "[", "]", "}", "return", "document" ]
a skeleton json api document .
train
false
48,111
def polynomial_reduce(p, DE): q = Poly(0, DE.t) while (p.degree(DE.t) >= DE.d.degree(DE.t)): m = ((p.degree(DE.t) - DE.d.degree(DE.t)) + 1) q0 = Poly((DE.t ** m), DE.t).mul(Poly((p.as_poly(DE.t).LC() / (m * DE.d.LC())), DE.t)) q += q0 p = (p - derivation(q0, DE)) return (q, p)
[ "def", "polynomial_reduce", "(", "p", ",", "DE", ")", ":", "q", "=", "Poly", "(", "0", ",", "DE", ".", "t", ")", "while", "(", "p", ".", "degree", "(", "DE", ".", "t", ")", ">=", "DE", ".", "d", ".", "degree", "(", "DE", ".", "t", ")", ")", ":", "m", "=", "(", "(", "p", ".", "degree", "(", "DE", ".", "t", ")", "-", "DE", ".", "d", ".", "degree", "(", "DE", ".", "t", ")", ")", "+", "1", ")", "q0", "=", "Poly", "(", "(", "DE", ".", "t", "**", "m", ")", ",", "DE", ".", "t", ")", ".", "mul", "(", "Poly", "(", "(", "p", ".", "as_poly", "(", "DE", ".", "t", ")", ".", "LC", "(", ")", "/", "(", "m", "*", "DE", ".", "d", ".", "LC", "(", ")", ")", ")", ",", "DE", ".", "t", ")", ")", "q", "+=", "q0", "p", "=", "(", "p", "-", "derivation", "(", "q0", ",", "DE", ")", ")", "return", "(", "q", ",", "p", ")" ]
polynomial reduction .
train
false
48,112
def create_new_index(index_name=None): if (index_name is None): index_name = BaseSearchIndexer.get_index_alias() config = {'mappings': get_mappings(), 'settings': {'index': INDEX_SETTINGS}} create_index(index_name, config)
[ "def", "create_new_index", "(", "index_name", "=", "None", ")", ":", "if", "(", "index_name", "is", "None", ")", ":", "index_name", "=", "BaseSearchIndexer", ".", "get_index_alias", "(", ")", "config", "=", "{", "'mappings'", ":", "get_mappings", "(", ")", ",", "'settings'", ":", "{", "'index'", ":", "INDEX_SETTINGS", "}", "}", "create_index", "(", "index_name", ",", "config", ")" ]
create a new index for search-related documents in es .
train
false
48,113
def set_page_path_collation(apps, schema_editor): if (schema_editor.connection.vendor == u'postgresql'): schema_editor.execute(u'\n ALTER TABLE wagtailcore_page ALTER COLUMN path TYPE VARCHAR(255) COLLATE "C"\n ')
[ "def", "set_page_path_collation", "(", "apps", ",", "schema_editor", ")", ":", "if", "(", "schema_editor", ".", "connection", ".", "vendor", "==", "u'postgresql'", ")", ":", "schema_editor", ".", "execute", "(", "u'\\n ALTER TABLE wagtailcore_page ALTER COLUMN path TYPE VARCHAR(255) COLLATE \"C\"\\n '", ")" ]
treebeards path comparison logic can fail on certain locales such as sk_sk .
train
false
48,115
def add_fc_zone(initialize_connection): def decorator(self, *args, **kwargs): conn_info = initialize_connection(self, *args, **kwargs) if (not conn_info): LOG.warning(_LW("Driver didn't return connection info, can't add zone.")) return None vol_type = conn_info.get('driver_volume_type', None) if (vol_type == 'fibre_channel'): if ('initiator_target_map' in conn_info['data']): zm = create_zone_manager() if zm: LOG.debug('add_fc_zone connection info: %(conninfo)s.', {'conninfo': conn_info}) zm.add_connection(conn_info) return conn_info return decorator
[ "def", "add_fc_zone", "(", "initialize_connection", ")", ":", "def", "decorator", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "conn_info", "=", "initialize_connection", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "if", "(", "not", "conn_info", ")", ":", "LOG", ".", "warning", "(", "_LW", "(", "\"Driver didn't return connection info, can't add zone.\"", ")", ")", "return", "None", "vol_type", "=", "conn_info", ".", "get", "(", "'driver_volume_type'", ",", "None", ")", "if", "(", "vol_type", "==", "'fibre_channel'", ")", ":", "if", "(", "'initiator_target_map'", "in", "conn_info", "[", "'data'", "]", ")", ":", "zm", "=", "create_zone_manager", "(", ")", "if", "zm", ":", "LOG", ".", "debug", "(", "'add_fc_zone connection info: %(conninfo)s.'", ",", "{", "'conninfo'", ":", "conn_info", "}", ")", "zm", ".", "add_connection", "(", "conn_info", ")", "return", "conn_info", "return", "decorator" ]
decorator to add a fc zone .
train
false
48,116
def test_builtin_mro(): AreEqual(int.__mro__, (int, object))
[ "def", "test_builtin_mro", "(", ")", ":", "AreEqual", "(", "int", ".", "__mro__", ",", "(", "int", ",", "object", ")", ")" ]
int mro shouldnt include valuetype .
train
false
48,117
def split_unquoted_newlines(stmt): text = text_type(stmt) lines = SPLIT_REGEX.split(text) outputlines = [''] for line in lines: if (not line): continue elif LINE_MATCH.match(line): outputlines.append('') else: outputlines[(-1)] += line return outputlines
[ "def", "split_unquoted_newlines", "(", "stmt", ")", ":", "text", "=", "text_type", "(", "stmt", ")", "lines", "=", "SPLIT_REGEX", ".", "split", "(", "text", ")", "outputlines", "=", "[", "''", "]", "for", "line", "in", "lines", ":", "if", "(", "not", "line", ")", ":", "continue", "elif", "LINE_MATCH", ".", "match", "(", "line", ")", ":", "outputlines", ".", "append", "(", "''", ")", "else", ":", "outputlines", "[", "(", "-", "1", ")", "]", "+=", "line", "return", "outputlines" ]
split a string on all unquoted newlines .
train
true
48,118
def recover_constants(py_source, replacements): for (identifier, value) in replacements.iteritems(): if identifier.startswith('PyJsConstantRegExp'): py_source = py_source.replace(identifier, ('JsRegExp(%s)' % repr(value))) elif identifier.startswith('PyJsConstantString'): py_source = py_source.replace(identifier, ('Js(u%s)' % unify_string_literals(value))) else: py_source = py_source.replace(identifier, ('Js(%s)' % value)) return py_source
[ "def", "recover_constants", "(", "py_source", ",", "replacements", ")", ":", "for", "(", "identifier", ",", "value", ")", "in", "replacements", ".", "iteritems", "(", ")", ":", "if", "identifier", ".", "startswith", "(", "'PyJsConstantRegExp'", ")", ":", "py_source", "=", "py_source", ".", "replace", "(", "identifier", ",", "(", "'JsRegExp(%s)'", "%", "repr", "(", "value", ")", ")", ")", "elif", "identifier", ".", "startswith", "(", "'PyJsConstantString'", ")", ":", "py_source", "=", "py_source", ".", "replace", "(", "identifier", ",", "(", "'Js(u%s)'", "%", "unify_string_literals", "(", "value", ")", ")", ")", "else", ":", "py_source", "=", "py_source", ".", "replace", "(", "identifier", ",", "(", "'Js(%s)'", "%", "value", ")", ")", "return", "py_source" ]
converts identifiers representing js constants to the pyjs constants pyjsnumberconst_1_ which has the true value of 5 will be converted to pyjsnumber(5) .
train
true
48,119
def sample_weights(sizeX, sizeY, sparsity, scale, rng): sizeX = int(sizeX) sizeY = int(sizeY) sparsity = numpy.minimum(sizeY, sparsity) values = numpy.zeros((sizeX, sizeY), dtype=theano.config.floatX) for dx in xrange(sizeX): perm = rng.permutation(sizeY) new_vals = rng.uniform(low=(- scale), high=scale, size=(sparsity,)) vals_norm = numpy.sqrt((new_vals ** 2).sum()) new_vals = ((scale * new_vals) / vals_norm) values[(dx, perm[:sparsity])] = new_vals (_, v, _) = numpy.linalg.svd(values) values = ((scale * values) / v[0]) return values.astype(theano.config.floatX)
[ "def", "sample_weights", "(", "sizeX", ",", "sizeY", ",", "sparsity", ",", "scale", ",", "rng", ")", ":", "sizeX", "=", "int", "(", "sizeX", ")", "sizeY", "=", "int", "(", "sizeY", ")", "sparsity", "=", "numpy", ".", "minimum", "(", "sizeY", ",", "sparsity", ")", "values", "=", "numpy", ".", "zeros", "(", "(", "sizeX", ",", "sizeY", ")", ",", "dtype", "=", "theano", ".", "config", ".", "floatX", ")", "for", "dx", "in", "xrange", "(", "sizeX", ")", ":", "perm", "=", "rng", ".", "permutation", "(", "sizeY", ")", "new_vals", "=", "rng", ".", "uniform", "(", "low", "=", "(", "-", "scale", ")", ",", "high", "=", "scale", ",", "size", "=", "(", "sparsity", ",", ")", ")", "vals_norm", "=", "numpy", ".", "sqrt", "(", "(", "new_vals", "**", "2", ")", ".", "sum", "(", ")", ")", "new_vals", "=", "(", "(", "scale", "*", "new_vals", ")", "/", "vals_norm", ")", "values", "[", "(", "dx", ",", "perm", "[", ":", "sparsity", "]", ")", "]", "=", "new_vals", "(", "_", ",", "v", ",", "_", ")", "=", "numpy", ".", "linalg", ".", "svd", "(", "values", ")", "values", "=", "(", "(", "scale", "*", "values", ")", "/", "v", "[", "0", "]", ")", "return", "values", ".", "astype", "(", "theano", ".", "config", ".", "floatX", ")" ]
initialization that fixes the largest singular value .
train
false
48,122
def hypernyms(source): return closure(source, HYPERNYM)
[ "def", "hypernyms", "(", "source", ")", ":", "return", "closure", "(", "source", ",", "HYPERNYM", ")" ]
return source and its hypernyms .
train
false
48,123
def parse_semantic(version='Version 1.99.0-rc.1+timestamp.2011.09.19.08.23.26'): re_version = re.compile('(\\d+)\\.(\\d+)\\.(\\d+)(\\-(?P<pre>[^\\s+]*))?(\\+(?P<build>\\S*))') m = re_version.match(version.strip().split()[(-1)]) if (not m): return None (a, b, c) = (int(m.group(1)), int(m.group(2)), int(m.group(3))) pre_release = (m.group('pre') or '') build = (m.group('build') or '') if build.startswith('timestamp'): build = datetime.datetime.strptime(build.split('.', 1)[1], '%Y.%m.%d.%H.%M.%S') return (a, b, c, pre_release, build)
[ "def", "parse_semantic", "(", "version", "=", "'Version 1.99.0-rc.1+timestamp.2011.09.19.08.23.26'", ")", ":", "re_version", "=", "re", ".", "compile", "(", "'(\\\\d+)\\\\.(\\\\d+)\\\\.(\\\\d+)(\\\\-(?P<pre>[^\\\\s+]*))?(\\\\+(?P<build>\\\\S*))'", ")", "m", "=", "re_version", ".", "match", "(", "version", ".", "strip", "(", ")", ".", "split", "(", ")", "[", "(", "-", "1", ")", "]", ")", "if", "(", "not", "m", ")", ":", "return", "None", "(", "a", ",", "b", ",", "c", ")", "=", "(", "int", "(", "m", ".", "group", "(", "1", ")", ")", ",", "int", "(", "m", ".", "group", "(", "2", ")", ")", ",", "int", "(", "m", ".", "group", "(", "3", ")", ")", ")", "pre_release", "=", "(", "m", ".", "group", "(", "'pre'", ")", "or", "''", ")", "build", "=", "(", "m", ".", "group", "(", "'build'", ")", "or", "''", ")", "if", "build", ".", "startswith", "(", "'timestamp'", ")", ":", "build", "=", "datetime", ".", "datetime", ".", "strptime", "(", "build", ".", "split", "(", "'.'", ",", "1", ")", "[", "1", "]", ",", "'%Y.%m.%d.%H.%M.%S'", ")", "return", "(", "a", ",", "b", ",", "c", ",", "pre_release", ",", "build", ")" ]
parses a version string according to URL rules args: version: the semver string returns: tuple: major .
train
false
48,124
@task def render_stale_documents(log=None): stale_docs = Document.objects.get_by_stale_rendering().distinct() stale_docs_count = stale_docs.count() if (stale_docs_count == 0): return if (log is None): log = render_stale_documents.get_logger() log.info(('Found %s stale documents' % stale_docs_count)) stale_pks = stale_docs.values_list('pk', flat=True) pre_task = acquire_render_lock.si() render_tasks = [render_document_chunk.si(pks) for pks in chunked(stale_pks, 5)] post_task = release_render_lock.si() chord_flow(pre_task, render_tasks, post_task).apply_async()
[ "@", "task", "def", "render_stale_documents", "(", "log", "=", "None", ")", ":", "stale_docs", "=", "Document", ".", "objects", ".", "get_by_stale_rendering", "(", ")", ".", "distinct", "(", ")", "stale_docs_count", "=", "stale_docs", ".", "count", "(", ")", "if", "(", "stale_docs_count", "==", "0", ")", ":", "return", "if", "(", "log", "is", "None", ")", ":", "log", "=", "render_stale_documents", ".", "get_logger", "(", ")", "log", ".", "info", "(", "(", "'Found %s stale documents'", "%", "stale_docs_count", ")", ")", "stale_pks", "=", "stale_docs", ".", "values_list", "(", "'pk'", ",", "flat", "=", "True", ")", "pre_task", "=", "acquire_render_lock", ".", "si", "(", ")", "render_tasks", "=", "[", "render_document_chunk", ".", "si", "(", "pks", ")", "for", "pks", "in", "chunked", "(", "stale_pks", ",", "5", ")", "]", "post_task", "=", "release_render_lock", ".", "si", "(", ")", "chord_flow", "(", "pre_task", ",", "render_tasks", ",", "post_task", ")", ".", "apply_async", "(", ")" ]
simple task wrapper for rendering stale documents .
train
false
48,125
def get_data_disk(vm_): return config.get_cloud_config_value('allocate_data_disk', vm_, __opts__, default=False)
[ "def", "get_data_disk", "(", "vm_", ")", ":", "return", "config", ".", "get_cloud_config_value", "(", "'allocate_data_disk'", ",", "vm_", ",", "__opts__", ",", "default", "=", "False", ")" ]
return true if a data disk is requested .
train
false
48,126
def _get_rrd_server(): xs_url = urlparse.urlparse(CONF.xenserver.connection_url) return [xs_url.scheme, xs_url.netloc]
[ "def", "_get_rrd_server", "(", ")", ":", "xs_url", "=", "urlparse", ".", "urlparse", "(", "CONF", ".", "xenserver", ".", "connection_url", ")", "return", "[", "xs_url", ".", "scheme", ",", "xs_url", ".", "netloc", "]" ]
return servers scheme and address to use for retrieving rrd xmls .
train
false
48,127
def disable_nat(interface): while (not run(settings.iptables, '-t', 'nat', '-D', 'POSTROUTING', '-o', interface, '-j', 'MASQUERADE')[1]): pass
[ "def", "disable_nat", "(", "interface", ")", ":", "while", "(", "not", "run", "(", "settings", ".", "iptables", ",", "'-t'", ",", "'nat'", ",", "'-D'", ",", "'POSTROUTING'", ",", "'-o'", ",", "interface", ",", "'-j'", ",", "'MASQUERADE'", ")", "[", "1", "]", ")", ":", "pass" ]
disable nat on this interface .
train
false
48,128
def check_totp(secret, ticket): for offset in [0, 1, (-1)]: if (ticket == compute_totp(secret, offset)): return True return False
[ "def", "check_totp", "(", "secret", ",", "ticket", ")", ":", "for", "offset", "in", "[", "0", ",", "1", ",", "(", "-", "1", ")", "]", ":", "if", "(", "ticket", "==", "compute_totp", "(", "secret", ",", "offset", ")", ")", ":", "return", "True", "return", "False" ]
check a totp value received from a principal trying to authenticate against the expected value computed from the secret shared between the principal and the authenticating entity .
train
false
48,129
def build_pxe_config(deployment_id, deployment_key, deployment_iscsi_iqn, deployment_aki_path, deployment_ari_path, aki_path, ari_path): LOG.debug((_('Building PXE config for deployment %s.') % deployment_id)) pxe_options = {'deployment_id': deployment_id, 'deployment_key': deployment_key, 'deployment_iscsi_iqn': deployment_iscsi_iqn, 'deployment_aki_path': deployment_aki_path, 'deployment_ari_path': deployment_ari_path, 'aki_path': aki_path, 'ari_path': ari_path, 'pxe_append_params': CONF.baremetal.pxe_append_params} cheetah = _get_cheetah() pxe_config = str(cheetah(open(CONF.baremetal.pxe_config_template).read(), searchList=[{'pxe_options': pxe_options, 'ROOT': '${ROOT}'}])) return pxe_config
[ "def", "build_pxe_config", "(", "deployment_id", ",", "deployment_key", ",", "deployment_iscsi_iqn", ",", "deployment_aki_path", ",", "deployment_ari_path", ",", "aki_path", ",", "ari_path", ")", ":", "LOG", ".", "debug", "(", "(", "_", "(", "'Building PXE config for deployment %s.'", ")", "%", "deployment_id", ")", ")", "pxe_options", "=", "{", "'deployment_id'", ":", "deployment_id", ",", "'deployment_key'", ":", "deployment_key", ",", "'deployment_iscsi_iqn'", ":", "deployment_iscsi_iqn", ",", "'deployment_aki_path'", ":", "deployment_aki_path", ",", "'deployment_ari_path'", ":", "deployment_ari_path", ",", "'aki_path'", ":", "aki_path", ",", "'ari_path'", ":", "ari_path", ",", "'pxe_append_params'", ":", "CONF", ".", "baremetal", ".", "pxe_append_params", "}", "cheetah", "=", "_get_cheetah", "(", ")", "pxe_config", "=", "str", "(", "cheetah", "(", "open", "(", "CONF", ".", "baremetal", ".", "pxe_config_template", ")", ".", "read", "(", ")", ",", "searchList", "=", "[", "{", "'pxe_options'", ":", "pxe_options", ",", "'ROOT'", ":", "'${ROOT}'", "}", "]", ")", ")", "return", "pxe_config" ]
build the pxe config file for a node this method builds the pxe boot configuration file for a node .
train
false
48,131
@synchronized(IO_LOCK) def save_admin(data, _id, do_pickle=True): path = os.path.join(cfg.admin_dir.get_path(), _id) logging.info('Saving data for %s in %s', _id, path) try: _f = open(path, 'wb') if do_pickle: pickler = cPickle.Pickler(_f, 2) pickler.dump(data) _f.flush() _f.close() pickler.clear_memo() del pickler else: _f.write(data) _f.flush() _f.close() except: logging.error(T('Saving %s failed'), path) logging.info('Traceback: ', exc_info=True)
[ "@", "synchronized", "(", "IO_LOCK", ")", "def", "save_admin", "(", "data", ",", "_id", ",", "do_pickle", "=", "True", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "cfg", ".", "admin_dir", ".", "get_path", "(", ")", ",", "_id", ")", "logging", ".", "info", "(", "'Saving data for %s in %s'", ",", "_id", ",", "path", ")", "try", ":", "_f", "=", "open", "(", "path", ",", "'wb'", ")", "if", "do_pickle", ":", "pickler", "=", "cPickle", ".", "Pickler", "(", "_f", ",", "2", ")", "pickler", ".", "dump", "(", "data", ")", "_f", ".", "flush", "(", ")", "_f", ".", "close", "(", ")", "pickler", ".", "clear_memo", "(", ")", "del", "pickler", "else", ":", "_f", ".", "write", "(", "data", ")", "_f", ".", "flush", "(", ")", "_f", ".", "close", "(", ")", "except", ":", "logging", ".", "error", "(", "T", "(", "'Saving %s failed'", ")", ",", "path", ")", "logging", ".", "info", "(", "'Traceback: '", ",", "exc_info", "=", "True", ")" ]
save data in admin folder in specified format .
train
false
48,132
def unit_deprecation_warning(s, unit, standard_name, format_decomposed): from ..core import UnitsWarning message = u"The unit '{0}' has been deprecated in the {1} standard.".format(s, standard_name) decomposed = _try_decomposed(unit, format_decomposed) if (decomposed is not None): message += u' Suggested: {0}.'.format(decomposed) warnings.warn(message, UnitsWarning)
[ "def", "unit_deprecation_warning", "(", "s", ",", "unit", ",", "standard_name", ",", "format_decomposed", ")", ":", "from", ".", ".", "core", "import", "UnitsWarning", "message", "=", "u\"The unit '{0}' has been deprecated in the {1} standard.\"", ".", "format", "(", "s", ",", "standard_name", ")", "decomposed", "=", "_try_decomposed", "(", "unit", ",", "format_decomposed", ")", "if", "(", "decomposed", "is", "not", "None", ")", ":", "message", "+=", "u' Suggested: {0}.'", ".", "format", "(", "decomposed", ")", "warnings", ".", "warn", "(", "message", ",", "UnitsWarning", ")" ]
raises a unitswarning about a deprecated unit in a given format .
train
false
48,133
def _check_unpack_options(extensions, function, extra_args): existing_extensions = {} for (name, info) in _UNPACK_FORMATS.items(): for ext in info[0]: existing_extensions[ext] = name for extension in extensions: if (extension in existing_extensions): msg = '%s is already registered for "%s"' raise RegistryError((msg % (extension, existing_extensions[extension]))) if (not isinstance(function, collections.Callable)): raise TypeError('The registered function must be a callable')
[ "def", "_check_unpack_options", "(", "extensions", ",", "function", ",", "extra_args", ")", ":", "existing_extensions", "=", "{", "}", "for", "(", "name", ",", "info", ")", "in", "_UNPACK_FORMATS", ".", "items", "(", ")", ":", "for", "ext", "in", "info", "[", "0", "]", ":", "existing_extensions", "[", "ext", "]", "=", "name", "for", "extension", "in", "extensions", ":", "if", "(", "extension", "in", "existing_extensions", ")", ":", "msg", "=", "'%s is already registered for \"%s\"'", "raise", "RegistryError", "(", "(", "msg", "%", "(", "extension", ",", "existing_extensions", "[", "extension", "]", ")", ")", ")", "if", "(", "not", "isinstance", "(", "function", ",", "collections", ".", "Callable", ")", ")", ":", "raise", "TypeError", "(", "'The registered function must be a callable'", ")" ]
checks what gets registered as an unpacker .
train
true
48,134
@contextmanager def catch_io_errors(ifile, out, single_out, force, corrupt_files): try: (yield) except (IOError, OSError, ValueError) as error: log_error('** ERROR: {error}', error=str(error)) log_error('** Failed on {name}: ', name=ifile) if (not single_out): os.remove(out.name) if (not force): log_error('** Exiting!') sys.exit(1) else: log_error('*** Skipping error file, moving on...') corrupt_files.append(ifile)
[ "@", "contextmanager", "def", "catch_io_errors", "(", "ifile", ",", "out", ",", "single_out", ",", "force", ",", "corrupt_files", ")", ":", "try", ":", "(", "yield", ")", "except", "(", "IOError", ",", "OSError", ",", "ValueError", ")", "as", "error", ":", "log_error", "(", "'** ERROR: {error}'", ",", "error", "=", "str", "(", "error", ")", ")", "log_error", "(", "'** Failed on {name}: '", ",", "name", "=", "ifile", ")", "if", "(", "not", "single_out", ")", ":", "os", ".", "remove", "(", "out", ".", "name", ")", "if", "(", "not", "force", ")", ":", "log_error", "(", "'** Exiting!'", ")", "sys", ".", "exit", "(", "1", ")", "else", ":", "log_error", "(", "'*** Skipping error file, moving on...'", ")", "corrupt_files", ".", "append", "(", "ifile", ")" ]
context manager to do boilerplate handling of ioerrors .
train
false
48,135
def get_securitygroup(vm_): sgs = list_securitygroup() securitygroup = config.get_cloud_config_value('securitygroup', vm_, __opts__, search_global=False) if (not securitygroup): raise SaltCloudNotFound('No securitygroup ID specified for this VM.') if (securitygroup and (str(securitygroup) in sgs)): return sgs[securitygroup]['SecurityGroupId'] raise SaltCloudNotFound("The specified security group, '{0}', could not be found.".format(securitygroup))
[ "def", "get_securitygroup", "(", "vm_", ")", ":", "sgs", "=", "list_securitygroup", "(", ")", "securitygroup", "=", "config", ".", "get_cloud_config_value", "(", "'securitygroup'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ")", "if", "(", "not", "securitygroup", ")", ":", "raise", "SaltCloudNotFound", "(", "'No securitygroup ID specified for this VM.'", ")", "if", "(", "securitygroup", "and", "(", "str", "(", "securitygroup", ")", "in", "sgs", ")", ")", ":", "return", "sgs", "[", "securitygroup", "]", "[", "'SecurityGroupId'", "]", "raise", "SaltCloudNotFound", "(", "\"The specified security group, '{0}', could not be found.\"", ".", "format", "(", "securitygroup", ")", ")" ]
return the security group .
train
true
48,136
def CheckVlogArguments(filename, clean_lines, linenum, error): line = clean_lines.elided[linenum] if Search('\\bVLOG\\((INFO|ERROR|WARNING|DFATAL|FATAL)\\)', line): error(filename, linenum, 'runtime/vlog', 5, 'VLOG() should be used with numeric verbosity level. Use LOG() if you want symbolic severity levels.')
[ "def", "CheckVlogArguments", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "if", "Search", "(", "'\\\\bVLOG\\\\((INFO|ERROR|WARNING|DFATAL|FATAL)\\\\)'", ",", "line", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'runtime/vlog'", ",", "5", ",", "'VLOG() should be used with numeric verbosity level. Use LOG() if you want symbolic severity levels.'", ")" ]
checks that vlog() is only used for defining a logging level .
train
true
48,139
def get_revision_label(app, repository, changeset_revision, include_date=True, include_hash=True): repo = get_repo_for_repository(app, repository=repository, repo_path=None) ctx = get_changectx_for_changeset(repo, changeset_revision) if ctx: return get_revision_label_from_ctx(ctx, include_date=include_date, include_hash=include_hash) elif include_hash: return ('-1:%s' % changeset_revision) else: return '-1'
[ "def", "get_revision_label", "(", "app", ",", "repository", ",", "changeset_revision", ",", "include_date", "=", "True", ",", "include_hash", "=", "True", ")", ":", "repo", "=", "get_repo_for_repository", "(", "app", ",", "repository", "=", "repository", ",", "repo_path", "=", "None", ")", "ctx", "=", "get_changectx_for_changeset", "(", "repo", ",", "changeset_revision", ")", "if", "ctx", ":", "return", "get_revision_label_from_ctx", "(", "ctx", ",", "include_date", "=", "include_date", ",", "include_hash", "=", "include_hash", ")", "elif", "include_hash", ":", "return", "(", "'-1:%s'", "%", "changeset_revision", ")", "else", ":", "return", "'-1'" ]
return a string consisting of the human read-able changeset rev and the changeset revision string which includes the revision date if the receive include_date is true .
train
false
48,140
def add_port_fwd(zone, src, dest, proto='tcp', dstaddr='', permanent=True): if (not get_masquerade(zone)): add_masquerade(zone, permanent) cmd = '--zone={0} --add-forward-port=port={1}:proto={2}:toport={3}:toaddr={4}'.format(zone, src, proto, dest, dstaddr) if permanent: cmd += ' --permanent' return __firewall_cmd(cmd)
[ "def", "add_port_fwd", "(", "zone", ",", "src", ",", "dest", ",", "proto", "=", "'tcp'", ",", "dstaddr", "=", "''", ",", "permanent", "=", "True", ")", ":", "if", "(", "not", "get_masquerade", "(", "zone", ")", ")", ":", "add_masquerade", "(", "zone", ",", "permanent", ")", "cmd", "=", "'--zone={0} --add-forward-port=port={1}:proto={2}:toport={3}:toaddr={4}'", ".", "format", "(", "zone", ",", "src", ",", "proto", ",", "dest", ",", "dstaddr", ")", "if", "permanent", ":", "cmd", "+=", "' --permanent'", "return", "__firewall_cmd", "(", "cmd", ")" ]
add port forwarding .
train
true
48,141
def get_challenge(): global challenge challenge = ''.join((random.SystemRandom().choice((string.ascii_uppercase + string.digits)) for _ in range(256))) return '{}'.format(challenge)
[ "def", "get_challenge", "(", ")", ":", "global", "challenge", "challenge", "=", "''", ".", "join", "(", "(", "random", ".", "SystemRandom", "(", ")", ".", "choice", "(", "(", "string", ".", "ascii_uppercase", "+", "string", ".", "digits", ")", ")", "for", "_", "in", "range", "(", "256", ")", ")", ")", "return", "'{}'", ".", "format", "(", "challenge", ")" ]
return an ascii challenge to validate authentication in _check_challenge .
train
false
48,142
def accept_left_right_center(value): deprecated((0, 12, 4), "'left_center' and 'right_center' enumerations", "'center_left' or 'center_right' respectively") return {'left_center': 'center_left', 'right_center': 'center_right'}[value]
[ "def", "accept_left_right_center", "(", "value", ")", ":", "deprecated", "(", "(", "0", ",", "12", ",", "4", ")", ",", "\"'left_center' and 'right_center' enumerations\"", ",", "\"'center_left' or 'center_right' respectively\"", ")", "return", "{", "'left_center'", ":", "'center_left'", ",", "'right_center'", ":", "'center_right'", "}", "[", "value", "]" ]
accept and convert deprecated location values .
train
false
48,143
def backup_get(context, backup_id): return IMPL.backup_get(context, backup_id)
[ "def", "backup_get", "(", "context", ",", "backup_id", ")", ":", "return", "IMPL", ".", "backup_get", "(", "context", ",", "backup_id", ")" ]
get a backup or raise if it does not exist .
train
false
48,144
def update_enrollment(user_id, course_id, mode=None, is_active=None, enrollment_attributes=None, include_expired=False): log.info(u'Starting Update Enrollment process for user {user} in course {course} to mode {mode}'.format(user=user_id, course=course_id, mode=mode)) if (mode is not None): validate_course_mode(course_id, mode, is_active=is_active, include_expired=include_expired) enrollment = _data_api().update_course_enrollment(user_id, course_id, mode=mode, is_active=is_active) if (enrollment is None): msg = u'Course Enrollment not found for user {user} in course {course}'.format(user=user_id, course=course_id) log.warn(msg) raise errors.EnrollmentNotFoundError(msg) elif (enrollment_attributes is not None): set_enrollment_attributes(user_id, course_id, enrollment_attributes) log.info(u'Course Enrollment updated for user {user} in course {course} to mode {mode}'.format(user=user_id, course=course_id, mode=mode)) return enrollment
[ "def", "update_enrollment", "(", "user_id", ",", "course_id", ",", "mode", "=", "None", ",", "is_active", "=", "None", ",", "enrollment_attributes", "=", "None", ",", "include_expired", "=", "False", ")", ":", "log", ".", "info", "(", "u'Starting Update Enrollment process for user {user} in course {course} to mode {mode}'", ".", "format", "(", "user", "=", "user_id", ",", "course", "=", "course_id", ",", "mode", "=", "mode", ")", ")", "if", "(", "mode", "is", "not", "None", ")", ":", "validate_course_mode", "(", "course_id", ",", "mode", ",", "is_active", "=", "is_active", ",", "include_expired", "=", "include_expired", ")", "enrollment", "=", "_data_api", "(", ")", ".", "update_course_enrollment", "(", "user_id", ",", "course_id", ",", "mode", "=", "mode", ",", "is_active", "=", "is_active", ")", "if", "(", "enrollment", "is", "None", ")", ":", "msg", "=", "u'Course Enrollment not found for user {user} in course {course}'", ".", "format", "(", "user", "=", "user_id", ",", "course", "=", "course_id", ")", "log", ".", "warn", "(", "msg", ")", "raise", "errors", ".", "EnrollmentNotFoundError", "(", "msg", ")", "elif", "(", "enrollment_attributes", "is", "not", "None", ")", ":", "set_enrollment_attributes", "(", "user_id", ",", "course_id", ",", "enrollment_attributes", ")", "log", ".", "info", "(", "u'Course Enrollment updated for user {user} in course {course} to mode {mode}'", ".", "format", "(", "user", "=", "user_id", ",", "course", "=", "course_id", ",", "mode", "=", "mode", ")", ")", "return", "enrollment" ]
updates the course mode for the enrolled user .
train
false
48,145
def project_absent(name, profile=None, **connection_args): return tenant_absent(name, profile=profile, **connection_args)
[ "def", "project_absent", "(", "name", ",", "profile", "=", "None", ",", "**", "connection_args", ")", ":", "return", "tenant_absent", "(", "name", ",", "profile", "=", "profile", ",", "**", "connection_args", ")" ]
ensure that the keystone project is absent .
train
false
48,146
def addSymmetricYPaths(outputs, paths, y): for path in paths: addSymmetricYPath(outputs, path, y)
[ "def", "addSymmetricYPaths", "(", "outputs", ",", "paths", ",", "y", ")", ":", "for", "path", "in", "paths", ":", "addSymmetricYPath", "(", "outputs", ",", "path", ",", "y", ")" ]
add y paths outputs to outputs .
train
false
48,147
def implementsOnly(*interfaces): _implements('implementsOnly', interfaces, classImplementsOnly)
[ "def", "implementsOnly", "(", "*", "interfaces", ")", ":", "_implements", "(", "'implementsOnly'", ",", "interfaces", ",", "classImplementsOnly", ")" ]
declare the only interfaces implemented by instances of a class this function is called in a class definition .
train
false
48,148
@log_call def metadef_property_count(context, namespace_name): namespace = metadef_namespace_get(context, namespace_name) _check_namespace_visibility(context, namespace, namespace_name) count = 0 for property in DATA['metadef_properties']: if (property['namespace_id'] == namespace['id']): count = (count + 1) return count
[ "@", "log_call", "def", "metadef_property_count", "(", "context", ",", "namespace_name", ")", ":", "namespace", "=", "metadef_namespace_get", "(", "context", ",", "namespace_name", ")", "_check_namespace_visibility", "(", "context", ",", "namespace", ",", "namespace_name", ")", "count", "=", "0", "for", "property", "in", "DATA", "[", "'metadef_properties'", "]", ":", "if", "(", "property", "[", "'namespace_id'", "]", "==", "namespace", "[", "'id'", "]", ")", ":", "count", "=", "(", "count", "+", "1", ")", "return", "count" ]
get properties count in a namespace .
train
false
48,149
def vni_from_bin(buf): return type_desc.Int3.to_user(six.binary_type(buf))
[ "def", "vni_from_bin", "(", "buf", ")", ":", "return", "type_desc", ".", "Int3", ".", "to_user", "(", "six", ".", "binary_type", "(", "buf", ")", ")" ]
converts binary representation vni to integer .
train
false
48,150
def step_5a(w): if w.endswith('e'): if (R2(w).endswith('e') or (R1(w).endswith('e') and (not is_short_syllable(w, before=(-1))))): return w[:(-1)] return w
[ "def", "step_5a", "(", "w", ")", ":", "if", "w", ".", "endswith", "(", "'e'", ")", ":", "if", "(", "R2", "(", "w", ")", ".", "endswith", "(", "'e'", ")", "or", "(", "R1", "(", "w", ")", ".", "endswith", "(", "'e'", ")", "and", "(", "not", "is_short_syllable", "(", "w", ",", "before", "=", "(", "-", "1", ")", ")", ")", ")", ")", ":", "return", "w", "[", ":", "(", "-", "1", ")", "]", "return", "w" ]
step 5a strips suffix -e if preceded by multiple vowel-consonant pairs .
train
false
48,152
def _delete_entrance_exam(request, course_key): store = modulestore() course = store.get_course(course_key) if (course is None): return HttpResponse(status=400) remove_entrance_exam_milestone_reference(request, course_key) course = store.get_course(course_key) if course.entrance_exam_id: metadata = {'entrance_exam_enabled': False, 'entrance_exam_minimum_score_pct': None, 'entrance_exam_id': None} CourseMetadata.update_from_dict(metadata, course, request.user) remove_entrance_exam_graders(course_key, request.user) return HttpResponse(status=204)
[ "def", "_delete_entrance_exam", "(", "request", ",", "course_key", ")", ":", "store", "=", "modulestore", "(", ")", "course", "=", "store", ".", "get_course", "(", "course_key", ")", "if", "(", "course", "is", "None", ")", ":", "return", "HttpResponse", "(", "status", "=", "400", ")", "remove_entrance_exam_milestone_reference", "(", "request", ",", "course_key", ")", "course", "=", "store", ".", "get_course", "(", "course_key", ")", "if", "course", ".", "entrance_exam_id", ":", "metadata", "=", "{", "'entrance_exam_enabled'", ":", "False", ",", "'entrance_exam_minimum_score_pct'", ":", "None", ",", "'entrance_exam_id'", ":", "None", "}", "CourseMetadata", ".", "update_from_dict", "(", "metadata", ",", "course", ",", "request", ".", "user", ")", "remove_entrance_exam_graders", "(", "course_key", ",", "request", ".", "user", ")", "return", "HttpResponse", "(", "status", "=", "204", ")" ]
internal workflow operation to remove an entrance exam .
train
false
48,153
def trials_from_docs(docs, validate=True, **kwargs): rval = Trials(**kwargs) if validate: rval.insert_trial_docs(docs) else: rval._insert_trial_docs(docs) rval.refresh() return rval
[ "def", "trials_from_docs", "(", "docs", ",", "validate", "=", "True", ",", "**", "kwargs", ")", ":", "rval", "=", "Trials", "(", "**", "kwargs", ")", "if", "validate", ":", "rval", ".", "insert_trial_docs", "(", "docs", ")", "else", ":", "rval", ".", "_insert_trial_docs", "(", "docs", ")", "rval", ".", "refresh", "(", ")", "return", "rval" ]
construct a trials base class instance from a list of trials documents .
train
false
48,156
def bottom_up(rule, fns=basic_fns): return chain((lambda expr: sall(bottom_up(rule, fns), fns)(expr)), rule)
[ "def", "bottom_up", "(", "rule", ",", "fns", "=", "basic_fns", ")", ":", "return", "chain", "(", "(", "lambda", "expr", ":", "sall", "(", "bottom_up", "(", "rule", ",", "fns", ")", ",", "fns", ")", "(", "expr", ")", ")", ",", "rule", ")" ]
apply a rule down a tree running it on the bottom nodes first .
train
false
48,157
def _foldr(f, z, xs): return reduce((lambda x, y: f(y, x)), reversed(xs), z)
[ "def", "_foldr", "(", "f", ",", "z", ",", "xs", ")", ":", "return", "reduce", "(", "(", "lambda", "x", ",", "y", ":", "f", "(", "y", ",", "x", ")", ")", ",", "reversed", "(", "xs", ")", ",", "z", ")" ]
apply a function of two arguments cumulatively to the items of a sequence .
train
false
48,158
@task def pypi_upload(): with cd('/home/vagrant/repos/sympy'): run('twine upload dist/*.tar.gz') run('twine upload dist/*.exe')
[ "@", "task", "def", "pypi_upload", "(", ")", ":", "with", "cd", "(", "'/home/vagrant/repos/sympy'", ")", ":", "run", "(", "'twine upload dist/*.tar.gz'", ")", "run", "(", "'twine upload dist/*.exe'", ")" ]
upload files to pypi .
train
false
48,159
def _FilterIndexesByKind(key, indexes): return filter((lambda index: (index.definition().entity_type() == _GetKeyKind(key))), indexes)
[ "def", "_FilterIndexesByKind", "(", "key", ",", "indexes", ")", ":", "return", "filter", "(", "(", "lambda", "index", ":", "(", "index", ".", "definition", "(", ")", ".", "entity_type", "(", ")", "==", "_GetKeyKind", "(", "key", ")", ")", ")", ",", "indexes", ")" ]
return only the indexes with the specified kind .
train
false
48,161
def get_salt_vars(): __opts__ = salt.config.client_config(os.environ.get('SALT_MINION_CONFIG', '/etc/salt/minion')) if (('grains' not in __opts__) or (not __opts__['grains'])): __opts__['grains'] = salt.loader.grains(__opts__) if (('file_client' not in __opts__) or (not __opts__['file_client'])): __opts__['file_client'] = 'local' if (('id' not in __opts__) or (not __opts__['id'])): __opts__['id'] = 'saltsh_mid' __salt__ = salt.loader.minion_mods(__opts__) __grains__ = __opts__['grains'] if (__opts__['file_client'] == 'local'): __pillar__ = salt.pillar.get_pillar(__opts__, __grains__, __opts__.get('id'), __opts__.get('environment')).compile_pillar() else: __pillar__ = {} JINJA = (lambda x, **y: jinja2.Template(x).render(grains=__grains__, salt=__salt__, opts=__opts__, pillar=__pillar__, **y)) return locals()
[ "def", "get_salt_vars", "(", ")", ":", "__opts__", "=", "salt", ".", "config", ".", "client_config", "(", "os", ".", "environ", ".", "get", "(", "'SALT_MINION_CONFIG'", ",", "'/etc/salt/minion'", ")", ")", "if", "(", "(", "'grains'", "not", "in", "__opts__", ")", "or", "(", "not", "__opts__", "[", "'grains'", "]", ")", ")", ":", "__opts__", "[", "'grains'", "]", "=", "salt", ".", "loader", ".", "grains", "(", "__opts__", ")", "if", "(", "(", "'file_client'", "not", "in", "__opts__", ")", "or", "(", "not", "__opts__", "[", "'file_client'", "]", ")", ")", ":", "__opts__", "[", "'file_client'", "]", "=", "'local'", "if", "(", "(", "'id'", "not", "in", "__opts__", ")", "or", "(", "not", "__opts__", "[", "'id'", "]", ")", ")", ":", "__opts__", "[", "'id'", "]", "=", "'saltsh_mid'", "__salt__", "=", "salt", ".", "loader", ".", "minion_mods", "(", "__opts__", ")", "__grains__", "=", "__opts__", "[", "'grains'", "]", "if", "(", "__opts__", "[", "'file_client'", "]", "==", "'local'", ")", ":", "__pillar__", "=", "salt", ".", "pillar", ".", "get_pillar", "(", "__opts__", ",", "__grains__", ",", "__opts__", ".", "get", "(", "'id'", ")", ",", "__opts__", ".", "get", "(", "'environment'", ")", ")", ".", "compile_pillar", "(", ")", "else", ":", "__pillar__", "=", "{", "}", "JINJA", "=", "(", "lambda", "x", ",", "**", "y", ":", "jinja2", ".", "Template", "(", "x", ")", ".", "render", "(", "grains", "=", "__grains__", ",", "salt", "=", "__salt__", ",", "opts", "=", "__opts__", ",", "pillar", "=", "__pillar__", ",", "**", "y", ")", ")", "return", "locals", "(", ")" ]
return all the salt-usual double-under data structures for a minion .
train
false
48,164
def create_group(ctxt, host='test_host@fakedrv#fakepool', name='test_group', description='this is a test group', status=fields.GroupStatus.AVAILABLE, availability_zone='fake_az', group_type_id=None, volume_type_ids=None, **kwargs): grp = objects.Group(ctxt) grp.host = host grp.user_id = (ctxt.user_id or fake.USER_ID) grp.project_id = (ctxt.project_id or fake.PROJECT_ID) grp.status = status grp.name = name grp.description = description grp.availability_zone = availability_zone if group_type_id: grp.group_type_id = group_type_id if volume_type_ids: grp.volume_type_ids = volume_type_ids new_id = kwargs.pop('id', None) grp.update(kwargs) grp.create() if (new_id and (new_id != grp.id)): db.group_update(ctxt, grp.id, {'id': new_id}) grp = objects.Group.get_by_id(ctxt, new_id) return grp
[ "def", "create_group", "(", "ctxt", ",", "host", "=", "'test_host@fakedrv#fakepool'", ",", "name", "=", "'test_group'", ",", "description", "=", "'this is a test group'", ",", "status", "=", "fields", ".", "GroupStatus", ".", "AVAILABLE", ",", "availability_zone", "=", "'fake_az'", ",", "group_type_id", "=", "None", ",", "volume_type_ids", "=", "None", ",", "**", "kwargs", ")", ":", "grp", "=", "objects", ".", "Group", "(", "ctxt", ")", "grp", ".", "host", "=", "host", "grp", ".", "user_id", "=", "(", "ctxt", ".", "user_id", "or", "fake", ".", "USER_ID", ")", "grp", ".", "project_id", "=", "(", "ctxt", ".", "project_id", "or", "fake", ".", "PROJECT_ID", ")", "grp", ".", "status", "=", "status", "grp", ".", "name", "=", "name", "grp", ".", "description", "=", "description", "grp", ".", "availability_zone", "=", "availability_zone", "if", "group_type_id", ":", "grp", ".", "group_type_id", "=", "group_type_id", "if", "volume_type_ids", ":", "grp", ".", "volume_type_ids", "=", "volume_type_ids", "new_id", "=", "kwargs", ".", "pop", "(", "'id'", ",", "None", ")", "grp", ".", "update", "(", "kwargs", ")", "grp", ".", "create", "(", ")", "if", "(", "new_id", "and", "(", "new_id", "!=", "grp", ".", "id", ")", ")", ":", "db", ".", "group_update", "(", "ctxt", ",", "grp", ".", "id", ",", "{", "'id'", ":", "new_id", "}", ")", "grp", "=", "objects", ".", "Group", ".", "get_by_id", "(", "ctxt", ",", "new_id", ")", "return", "grp" ]
create a group object in the db .
train
false
48,165
def setVerbosity(): if (conf.verbose is None): conf.verbose = 1 conf.verbose = int(conf.verbose) if (conf.verbose == 0): logger.setLevel(logging.ERROR) elif (conf.verbose == 1): logger.setLevel(logging.INFO) elif ((conf.verbose > 2) and conf.eta): conf.verbose = 2 logger.setLevel(logging.DEBUG) elif (conf.verbose == 2): logger.setLevel(logging.DEBUG) elif (conf.verbose == 3): logger.setLevel(CUSTOM_LOGGING.PAYLOAD) elif (conf.verbose == 4): logger.setLevel(CUSTOM_LOGGING.TRAFFIC_OUT) elif (conf.verbose >= 5): logger.setLevel(CUSTOM_LOGGING.TRAFFIC_IN)
[ "def", "setVerbosity", "(", ")", ":", "if", "(", "conf", ".", "verbose", "is", "None", ")", ":", "conf", ".", "verbose", "=", "1", "conf", ".", "verbose", "=", "int", "(", "conf", ".", "verbose", ")", "if", "(", "conf", ".", "verbose", "==", "0", ")", ":", "logger", ".", "setLevel", "(", "logging", ".", "ERROR", ")", "elif", "(", "conf", ".", "verbose", "==", "1", ")", ":", "logger", ".", "setLevel", "(", "logging", ".", "INFO", ")", "elif", "(", "(", "conf", ".", "verbose", ">", "2", ")", "and", "conf", ".", "eta", ")", ":", "conf", ".", "verbose", "=", "2", "logger", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "elif", "(", "conf", ".", "verbose", "==", "2", ")", ":", "logger", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "elif", "(", "conf", ".", "verbose", "==", "3", ")", ":", "logger", ".", "setLevel", "(", "CUSTOM_LOGGING", ".", "PAYLOAD", ")", "elif", "(", "conf", ".", "verbose", "==", "4", ")", ":", "logger", ".", "setLevel", "(", "CUSTOM_LOGGING", ".", "TRAFFIC_OUT", ")", "elif", "(", "conf", ".", "verbose", ">=", "5", ")", ":", "logger", ".", "setLevel", "(", "CUSTOM_LOGGING", ".", "TRAFFIC_IN", ")" ]
this function set the verbosity of sqlmap output messages .
train
false