id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
5,391
def get_sam_ids(map_data, map_header, colorby, cat, primary_state, secondary_state): if (colorby is None): sample_ids = [sam[0] for sam in map_data] else: sample_ids = get_sample_ids(map_data, map_header, {colorby: [cat]}) primary_states = parse_metadata_state_descriptions(primary_state) if (colorby is not None): primary_states[colorby] = [cat] state1_samids = get_sample_ids(map_data, map_header, primary_states) if (secondary_state is None): state2_samids = set(sample_ids).difference(set(state1_samids)) else: secondary_states = parse_metadata_state_descriptions(secondary_state) if (colorby is not None): secondary_states[colorby] = [cat] state2_samids = get_sample_ids(map_data, map_header, secondary_states) return (list(set(state1_samids)), list(set(state2_samids)))
[ "def", "get_sam_ids", "(", "map_data", ",", "map_header", ",", "colorby", ",", "cat", ",", "primary_state", ",", "secondary_state", ")", ":", "if", "(", "colorby", "is", "None", ")", ":", "sample_ids", "=", "[", "sam", "[", "0", "]", "for", "sam", "in", "map_data", "]", "else", ":", "sample_ids", "=", "get_sample_ids", "(", "map_data", ",", "map_header", ",", "{", "colorby", ":", "[", "cat", "]", "}", ")", "primary_states", "=", "parse_metadata_state_descriptions", "(", "primary_state", ")", "if", "(", "colorby", "is", "not", "None", ")", ":", "primary_states", "[", "colorby", "]", "=", "[", "cat", "]", "state1_samids", "=", "get_sample_ids", "(", "map_data", ",", "map_header", ",", "primary_states", ")", "if", "(", "secondary_state", "is", "None", ")", ":", "state2_samids", "=", "set", "(", "sample_ids", ")", ".", "difference", "(", "set", "(", "state1_samids", ")", ")", "else", ":", "secondary_states", "=", "parse_metadata_state_descriptions", "(", "secondary_state", ")", "if", "(", "colorby", "is", "not", "None", ")", ":", "secondary_states", "[", "colorby", "]", "=", "[", "cat", "]", "state2_samids", "=", "get_sample_ids", "(", "map_data", ",", "map_header", ",", "secondary_states", ")", "return", "(", "list", "(", "set", "(", "state1_samids", ")", ")", ",", "list", "(", "set", "(", "state2_samids", ")", ")", ")" ]
returns all sample ids matching the state strings and colorby:cat colorby: eg: country .
train
false
5,392
def test_wavefront(): fname_mesh = load_data_file('orig/triceratops.obj.gz') fname_out = op.join(temp_dir, 'temp.obj') mesh1 = read_mesh(fname_mesh) assert_raises(IOError, read_mesh, 'foo.obj') assert_raises(ValueError, read_mesh, op.abspath(__file__)) assert_raises(ValueError, write_mesh, fname_out, format='foo', *mesh1) write_mesh(fname_out, mesh1[0], mesh1[1], mesh1[2], mesh1[3]) assert_raises(IOError, write_mesh, fname_out, *mesh1) write_mesh(fname_out, overwrite=True, *mesh1) mesh2 = read_mesh(fname_out) assert_equal(len(mesh1), len(mesh2)) for (m1, m2) in zip(mesh1, mesh2): if (m1 is None): assert_equal(m2, None) else: assert_allclose(m1, m2, rtol=1e-05) assert_allclose(mesh1[2], _slow_calculate_normals(mesh1[0], mesh1[1]), rtol=1e-07, atol=1e-07)
[ "def", "test_wavefront", "(", ")", ":", "fname_mesh", "=", "load_data_file", "(", "'orig/triceratops.obj.gz'", ")", "fname_out", "=", "op", ".", "join", "(", "temp_dir", ",", "'temp.obj'", ")", "mesh1", "=", "read_mesh", "(", "fname_mesh", ")", "assert_raises", "(", "IOError", ",", "read_mesh", ",", "'foo.obj'", ")", "assert_raises", "(", "ValueError", ",", "read_mesh", ",", "op", ".", "abspath", "(", "__file__", ")", ")", "assert_raises", "(", "ValueError", ",", "write_mesh", ",", "fname_out", ",", "format", "=", "'foo'", ",", "*", "mesh1", ")", "write_mesh", "(", "fname_out", ",", "mesh1", "[", "0", "]", ",", "mesh1", "[", "1", "]", ",", "mesh1", "[", "2", "]", ",", "mesh1", "[", "3", "]", ")", "assert_raises", "(", "IOError", ",", "write_mesh", ",", "fname_out", ",", "*", "mesh1", ")", "write_mesh", "(", "fname_out", ",", "overwrite", "=", "True", ",", "*", "mesh1", ")", "mesh2", "=", "read_mesh", "(", "fname_out", ")", "assert_equal", "(", "len", "(", "mesh1", ")", ",", "len", "(", "mesh2", ")", ")", "for", "(", "m1", ",", "m2", ")", "in", "zip", "(", "mesh1", ",", "mesh2", ")", ":", "if", "(", "m1", "is", "None", ")", ":", "assert_equal", "(", "m2", ",", "None", ")", "else", ":", "assert_allclose", "(", "m1", ",", "m2", ",", "rtol", "=", "1e-05", ")", "assert_allclose", "(", "mesh1", "[", "2", "]", ",", "_slow_calculate_normals", "(", "mesh1", "[", "0", "]", ",", "mesh1", "[", "1", "]", ")", ",", "rtol", "=", "1e-07", ",", "atol", "=", "1e-07", ")" ]
test wavefront reader .
train
false
5,393
def buildDiscover(base_url, out_dir): test_data = discoverdata.readTests(discoverdata.default_test_file) def writeTestFile(test_name): template = test_data[test_name] data = discoverdata.fillTemplate(test_name, template, base_url, discoverdata.example_xrds) out_file_name = os.path.join(out_dir, test_name) out_file = file(out_file_name, 'w') out_file.write(data) manifest = [manifest_header] for (success, input_name, id_name, result_name) in discoverdata.testlist: if (not success): continue writeTestFile(input_name) input_url = urlparse.urljoin(base_url, input_name) id_url = urlparse.urljoin(base_url, id_name) result_url = urlparse.urljoin(base_url, result_name) manifest.append(' DCTB '.join((input_url, id_url, result_url))) manifest.append('\n') manifest_file_name = os.path.join(out_dir, 'manifest.txt') manifest_file = file(manifest_file_name, 'w') for chunk in manifest: manifest_file.write(chunk) manifest_file.close()
[ "def", "buildDiscover", "(", "base_url", ",", "out_dir", ")", ":", "test_data", "=", "discoverdata", ".", "readTests", "(", "discoverdata", ".", "default_test_file", ")", "def", "writeTestFile", "(", "test_name", ")", ":", "template", "=", "test_data", "[", "test_name", "]", "data", "=", "discoverdata", ".", "fillTemplate", "(", "test_name", ",", "template", ",", "base_url", ",", "discoverdata", ".", "example_xrds", ")", "out_file_name", "=", "os", ".", "path", ".", "join", "(", "out_dir", ",", "test_name", ")", "out_file", "=", "file", "(", "out_file_name", ",", "'w'", ")", "out_file", ".", "write", "(", "data", ")", "manifest", "=", "[", "manifest_header", "]", "for", "(", "success", ",", "input_name", ",", "id_name", ",", "result_name", ")", "in", "discoverdata", ".", "testlist", ":", "if", "(", "not", "success", ")", ":", "continue", "writeTestFile", "(", "input_name", ")", "input_url", "=", "urlparse", ".", "urljoin", "(", "base_url", ",", "input_name", ")", "id_url", "=", "urlparse", ".", "urljoin", "(", "base_url", ",", "id_name", ")", "result_url", "=", "urlparse", ".", "urljoin", "(", "base_url", ",", "result_name", ")", "manifest", ".", "append", "(", "' DCTB '", ".", "join", "(", "(", "input_url", ",", "id_url", ",", "result_url", ")", ")", ")", "manifest", ".", "append", "(", "'\\n'", ")", "manifest_file_name", "=", "os", ".", "path", ".", "join", "(", "out_dir", ",", "'manifest.txt'", ")", "manifest_file", "=", "file", "(", "manifest_file_name", ",", "'w'", ")", "for", "chunk", "in", "manifest", ":", "manifest_file", ".", "write", "(", "chunk", ")", "manifest_file", ".", "close", "(", ")" ]
convert all files in a directory to apache mod_asis files in another directory .
train
true
5,395
def reset_cache(): global MC MC = None
[ "def", "reset_cache", "(", ")", ":", "global", "MC", "MC", "=", "None" ]
reset memoized caches of an instance/module .
train
false
5,396
def application_uri(environ): url = (environ['wsgi.url_scheme'] + '://') from urllib.parse import quote if environ.get('HTTP_HOST'): url += environ['HTTP_HOST'] else: url += environ['SERVER_NAME'] if (environ['wsgi.url_scheme'] == 'https'): if (environ['SERVER_PORT'] != '443'): url += (':' + environ['SERVER_PORT']) elif (environ['SERVER_PORT'] != '80'): url += (':' + environ['SERVER_PORT']) url += quote((environ.get('SCRIPT_NAME') or '/'), encoding='latin1') return url
[ "def", "application_uri", "(", "environ", ")", ":", "url", "=", "(", "environ", "[", "'wsgi.url_scheme'", "]", "+", "'://'", ")", "from", "urllib", ".", "parse", "import", "quote", "if", "environ", ".", "get", "(", "'HTTP_HOST'", ")", ":", "url", "+=", "environ", "[", "'HTTP_HOST'", "]", "else", ":", "url", "+=", "environ", "[", "'SERVER_NAME'", "]", "if", "(", "environ", "[", "'wsgi.url_scheme'", "]", "==", "'https'", ")", ":", "if", "(", "environ", "[", "'SERVER_PORT'", "]", "!=", "'443'", ")", ":", "url", "+=", "(", "':'", "+", "environ", "[", "'SERVER_PORT'", "]", ")", "elif", "(", "environ", "[", "'SERVER_PORT'", "]", "!=", "'80'", ")", ":", "url", "+=", "(", "':'", "+", "environ", "[", "'SERVER_PORT'", "]", ")", "url", "+=", "quote", "(", "(", "environ", ".", "get", "(", "'SCRIPT_NAME'", ")", "or", "'/'", ")", ",", "encoding", "=", "'latin1'", ")", "return", "url" ]
return the applications base uri .
train
false
5,397
def jquery_url(): return get_bootstrap_setting(u'jquery_url')
[ "def", "jquery_url", "(", ")", ":", "return", "get_bootstrap_setting", "(", "u'jquery_url'", ")" ]
return the full url to jquery file to use .
train
false
5,399
def wildcard_in_db(namespace): return (namespace.find('*') < namespace.find('.'))
[ "def", "wildcard_in_db", "(", "namespace", ")", ":", "return", "(", "namespace", ".", "find", "(", "'*'", ")", "<", "namespace", ".", "find", "(", "'.'", ")", ")" ]
return true if a wildcard character appears in the database name .
train
false
5,400
def snapshot_data_get_for_project(context, project_id, session=None): return IMPL.snapshot_data_get_for_project(context, project_id, session)
[ "def", "snapshot_data_get_for_project", "(", "context", ",", "project_id", ",", "session", "=", "None", ")", ":", "return", "IMPL", ".", "snapshot_data_get_for_project", "(", "context", ",", "project_id", ",", "session", ")" ]
get count and gigabytes used for snapshots for specified project .
train
false
5,401
def cache1lvl(maxsize=100): def decorating_function(user_function): cache = {} use_count = Counter() @functools.wraps(user_function) def wrapper(key, *args, **kwargs): try: result = cache[key] except KeyError: if (len(cache) == maxsize): for (k, _) in nsmallest(((maxsize // 10) or 1), use_count.iteritems(), key=itemgetter(1)): del cache[k], use_count[k] cache[key] = user_function(key, *args, **kwargs) result = cache[key] finally: use_count[key] += 1 return result def clear(): cache.clear() use_count.clear() def delete(key): try: del cache[key] del use_count[key] except KeyError: return False else: return True wrapper.clear = clear wrapper.cache = cache wrapper.delete = delete return wrapper return decorating_function
[ "def", "cache1lvl", "(", "maxsize", "=", "100", ")", ":", "def", "decorating_function", "(", "user_function", ")", ":", "cache", "=", "{", "}", "use_count", "=", "Counter", "(", ")", "@", "functools", ".", "wraps", "(", "user_function", ")", "def", "wrapper", "(", "key", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "result", "=", "cache", "[", "key", "]", "except", "KeyError", ":", "if", "(", "len", "(", "cache", ")", "==", "maxsize", ")", ":", "for", "(", "k", ",", "_", ")", "in", "nsmallest", "(", "(", "(", "maxsize", "//", "10", ")", "or", "1", ")", ",", "use_count", ".", "iteritems", "(", ")", ",", "key", "=", "itemgetter", "(", "1", ")", ")", ":", "del", "cache", "[", "k", "]", ",", "use_count", "[", "k", "]", "cache", "[", "key", "]", "=", "user_function", "(", "key", ",", "*", "args", ",", "**", "kwargs", ")", "result", "=", "cache", "[", "key", "]", "finally", ":", "use_count", "[", "key", "]", "+=", "1", "return", "result", "def", "clear", "(", ")", ":", "cache", ".", "clear", "(", ")", "use_count", ".", "clear", "(", ")", "def", "delete", "(", "key", ")", ":", "try", ":", "del", "cache", "[", "key", "]", "del", "use_count", "[", "key", "]", "except", "KeyError", ":", "return", "False", "else", ":", "return", "True", "wrapper", ".", "clear", "=", "clear", "wrapper", ".", "cache", "=", "cache", "wrapper", ".", "delete", "=", "delete", "return", "wrapper", "return", "decorating_function" ]
modified version of URL .
train
false
5,402
def rename_branch(): branch = choose_branch(N_(u'Rename Existing Branch'), N_(u'Select')) if (not branch): return new_branch = choose_branch(N_(u'Enter New Branch Name'), N_(u'Rename')) if (not new_branch): return cmds.do(cmds.RenameBranch, branch, new_branch)
[ "def", "rename_branch", "(", ")", ":", "branch", "=", "choose_branch", "(", "N_", "(", "u'Rename Existing Branch'", ")", ",", "N_", "(", "u'Select'", ")", ")", "if", "(", "not", "branch", ")", ":", "return", "new_branch", "=", "choose_branch", "(", "N_", "(", "u'Enter New Branch Name'", ")", ",", "N_", "(", "u'Rename'", ")", ")", "if", "(", "not", "new_branch", ")", ":", "return", "cmds", ".", "do", "(", "cmds", ".", "RenameBranch", ",", "branch", ",", "new_branch", ")" ]
launch the rename branch dialogs .
train
false
5,405
def list_route(methods=['get'], **kwargs): def decorator(func): func.bind_to_methods = methods func.detail = False func.permission_classes = kwargs.get('permission_classes', []) func.kwargs = kwargs return func return decorator
[ "def", "list_route", "(", "methods", "=", "[", "'get'", "]", ",", "**", "kwargs", ")", ":", "def", "decorator", "(", "func", ")", ":", "func", ".", "bind_to_methods", "=", "methods", "func", ".", "detail", "=", "False", "func", ".", "permission_classes", "=", "kwargs", ".", "get", "(", "'permission_classes'", ",", "[", "]", ")", "func", ".", "kwargs", "=", "kwargs", "return", "func", "return", "decorator" ]
used to mark a method on a viewset that should be routed for list requests .
train
false
5,406
def get_theme_chain(theme, themes_dirs): themes = [get_theme_path_real(theme, themes_dirs)] while True: parent = get_parent_theme_name(themes[(-1)], themes_dirs=themes_dirs) if ((parent is None) or (parent in themes)): break themes.append(parent) return themes
[ "def", "get_theme_chain", "(", "theme", ",", "themes_dirs", ")", ":", "themes", "=", "[", "get_theme_path_real", "(", "theme", ",", "themes_dirs", ")", "]", "while", "True", ":", "parent", "=", "get_parent_theme_name", "(", "themes", "[", "(", "-", "1", ")", "]", ",", "themes_dirs", "=", "themes_dirs", ")", "if", "(", "(", "parent", "is", "None", ")", "or", "(", "parent", "in", "themes", ")", ")", ":", "break", "themes", ".", "append", "(", "parent", ")", "return", "themes" ]
create the full theme inheritance chain including paths .
train
false
5,407
def debug_http(): import logging try: import http.client as http_client except ImportError: import httplib as http_client http_client.HTTPConnection.debuglevel = 1 logging.basicConfig() logging.getLogger().setLevel(logging.DEBUG) requests_log = logging.getLogger(u'requests.packages.urllib3') requests_log.setLevel(logging.DEBUG) requests_log.propagate = True
[ "def", "debug_http", "(", ")", ":", "import", "logging", "try", ":", "import", "http", ".", "client", "as", "http_client", "except", "ImportError", ":", "import", "httplib", "as", "http_client", "http_client", ".", "HTTPConnection", ".", "debuglevel", "=", "1", "logging", ".", "basicConfig", "(", ")", "logging", ".", "getLogger", "(", ")", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "requests_log", "=", "logging", ".", "getLogger", "(", "u'requests.packages.urllib3'", ")", "requests_log", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "requests_log", ".", "propagate", "=", "True" ]
turns on debug logging for http traffic .
train
false
5,408
def schedule_job(function, name, hours=0, minutes=0): job = SCHED.get_job(name) if job: if ((hours == 0) and (minutes == 0)): SCHED.remove_job(name) logger.info('Removed background task: %s', name) elif (job.trigger.interval != datetime.timedelta(hours=hours, minutes=minutes)): SCHED.reschedule_job(name, trigger=IntervalTrigger(hours=hours, minutes=minutes)) logger.info('Re-scheduled background task: %s', name) elif ((hours > 0) or (minutes > 0)): SCHED.add_job(function, id=name, trigger=IntervalTrigger(hours=hours, minutes=minutes)) logger.info('Scheduled background task: %s', name)
[ "def", "schedule_job", "(", "function", ",", "name", ",", "hours", "=", "0", ",", "minutes", "=", "0", ")", ":", "job", "=", "SCHED", ".", "get_job", "(", "name", ")", "if", "job", ":", "if", "(", "(", "hours", "==", "0", ")", "and", "(", "minutes", "==", "0", ")", ")", ":", "SCHED", ".", "remove_job", "(", "name", ")", "logger", ".", "info", "(", "'Removed background task: %s'", ",", "name", ")", "elif", "(", "job", ".", "trigger", ".", "interval", "!=", "datetime", ".", "timedelta", "(", "hours", "=", "hours", ",", "minutes", "=", "minutes", ")", ")", ":", "SCHED", ".", "reschedule_job", "(", "name", ",", "trigger", "=", "IntervalTrigger", "(", "hours", "=", "hours", ",", "minutes", "=", "minutes", ")", ")", "logger", ".", "info", "(", "'Re-scheduled background task: %s'", ",", "name", ")", "elif", "(", "(", "hours", ">", "0", ")", "or", "(", "minutes", ">", "0", ")", ")", ":", "SCHED", ".", "add_job", "(", "function", ",", "id", "=", "name", ",", "trigger", "=", "IntervalTrigger", "(", "hours", "=", "hours", ",", "minutes", "=", "minutes", ")", ")", "logger", ".", "info", "(", "'Scheduled background task: %s'", ",", "name", ")" ]
start scheduled job if starting or restarting headphones .
train
false
5,409
def SearchInCurrentBuffer(pattern): return GetIntValue(u"search('{0}', 'Wcnb')".format(EscapeForVim(pattern)))
[ "def", "SearchInCurrentBuffer", "(", "pattern", ")", ":", "return", "GetIntValue", "(", "u\"search('{0}', 'Wcnb')\"", ".", "format", "(", "EscapeForVim", "(", "pattern", ")", ")", ")" ]
returns the 1-indexed line on which the pattern matches or 0 if not found .
train
false
5,410
@pytest.fixture def disable_bears(mocker): mocker.patch.object(coalib.collecting.Collectors, '_import_bears', autospec=True, return_value=[])
[ "@", "pytest", ".", "fixture", "def", "disable_bears", "(", "mocker", ")", ":", "mocker", ".", "patch", ".", "object", "(", "coalib", ".", "collecting", ".", "Collectors", ",", "'_import_bears'", ",", "autospec", "=", "True", ",", "return_value", "=", "[", "]", ")" ]
disable all bears that would otherwise be found with collect_bears .
train
false
5,412
def _GetChartFactory(chart_class, display_class): def Inner(*args, **kwargs): chart = chart_class(*args, **kwargs) chart.display = display_class(chart) return chart return Inner
[ "def", "_GetChartFactory", "(", "chart_class", ",", "display_class", ")", ":", "def", "Inner", "(", "*", "args", ",", "**", "kwargs", ")", ":", "chart", "=", "chart_class", "(", "*", "args", ",", "**", "kwargs", ")", "chart", ".", "display", "=", "display_class", "(", "chart", ")", "return", "chart", "return", "Inner" ]
create a factory method for instantiating charts with displays .
train
false
5,413
def test_smote_sample_wt_fit(): smote = SMOTE(random_state=RND_SEED) assert_raises(RuntimeError, smote.sample, X, Y)
[ "def", "test_smote_sample_wt_fit", "(", ")", ":", "smote", "=", "SMOTE", "(", "random_state", "=", "RND_SEED", ")", "assert_raises", "(", "RuntimeError", ",", "smote", ".", "sample", ",", "X", ",", "Y", ")" ]
test either if an error is raised when sample is called before fitting .
train
false
5,414
def _parse_periods(pattern): parts = pattern.split('..', 1) if (len(parts) == 1): instant = Period.parse(parts[0]) return (instant, instant) else: start = Period.parse(parts[0]) end = Period.parse(parts[1]) return (start, end)
[ "def", "_parse_periods", "(", "pattern", ")", ":", "parts", "=", "pattern", ".", "split", "(", "'..'", ",", "1", ")", "if", "(", "len", "(", "parts", ")", "==", "1", ")", ":", "instant", "=", "Period", ".", "parse", "(", "parts", "[", "0", "]", ")", "return", "(", "instant", ",", "instant", ")", "else", ":", "start", "=", "Period", ".", "parse", "(", "parts", "[", "0", "]", ")", "end", "=", "Period", ".", "parse", "(", "parts", "[", "1", "]", ")", "return", "(", "start", ",", "end", ")" ]
parse a string containing two dates separated by two dots .
train
false
5,416
def catch_processing_exceptions(func): @wraps(func) def new_func(*args, **kw): 'Executes ``func(*args, **kw)`` but catches\n :exc:`ProcessingException`s.\n\n ' try: return func(*args, **kw) except ProcessingException as exception: kw = dict(((key, getattr(exception, key)) for key in ERROR_FIELDS)) kw['code'] = kw.pop('code_') return error_response(cause=exception, **kw) return new_func
[ "def", "catch_processing_exceptions", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "new_func", "(", "*", "args", ",", "**", "kw", ")", ":", "try", ":", "return", "func", "(", "*", "args", ",", "**", "kw", ")", "except", "ProcessingException", "as", "exception", ":", "kw", "=", "dict", "(", "(", "(", "key", ",", "getattr", "(", "exception", ",", "key", ")", ")", "for", "key", "in", "ERROR_FIELDS", ")", ")", "kw", "[", "'code'", "]", "=", "kw", ".", "pop", "(", "'code_'", ")", "return", "error_response", "(", "cause", "=", "exception", ",", "**", "kw", ")", "return", "new_func" ]
decorator that catches :exc:processingexceptions and subsequently returns a json-ified error response .
train
false
5,417
@register.filter def is_sibling_of(page1, page2): try: return _is_sibling_of(page1, page2) except AttributeError: return False
[ "@", "register", ".", "filter", "def", "is_sibling_of", "(", "page1", ",", "page2", ")", ":", "try", ":", "return", "_is_sibling_of", "(", "page1", ",", "page2", ")", "except", "AttributeError", ":", "return", "False" ]
determines whether a given page is a sibling of another page {% if page|is_sibling_of:feincms_page %} .
train
false
5,420
def create_temp_profile(scan_profile): scan_profile_file = os.path.join(tempdir, ('%s.pw3af' % uuid4())) file(scan_profile_file, 'w').write(scan_profile) return (scan_profile_file, tempdir)
[ "def", "create_temp_profile", "(", "scan_profile", ")", ":", "scan_profile_file", "=", "os", ".", "path", ".", "join", "(", "tempdir", ",", "(", "'%s.pw3af'", "%", "uuid4", "(", ")", ")", ")", "file", "(", "scan_profile_file", ",", "'w'", ")", ".", "write", "(", "scan_profile", ")", "return", "(", "scan_profile_file", ",", "tempdir", ")" ]
writes the scan_profile to a file .
train
false
5,422
def python_like_exts(): exts = [] for lang in sourcecode.PYTHON_LIKE_LANGUAGES: exts.extend(list(sourcecode.ALL_LANGUAGES[lang])) return [('.' + ext) for ext in exts]
[ "def", "python_like_exts", "(", ")", ":", "exts", "=", "[", "]", "for", "lang", "in", "sourcecode", ".", "PYTHON_LIKE_LANGUAGES", ":", "exts", ".", "extend", "(", "list", "(", "sourcecode", ".", "ALL_LANGUAGES", "[", "lang", "]", ")", ")", "return", "[", "(", "'.'", "+", "ext", ")", "for", "ext", "in", "exts", "]" ]
return a list of all python-like extensions .
train
true
5,423
def EnablePrivilege(privilegeStr, hToken=None): if (hToken == None): TOKEN_ADJUST_PRIVILEGES = 32 TOKEN_QUERY = 8 hToken = HANDLE(INVALID_HANDLE_VALUE) if (not hToken): return False hProcess = windll.kernel32.OpenProcess(PROCESS_QUERY_INFORMATION, False, windll.kernel32.GetCurrentProcessId()) if (not hProcess): return False windll.advapi32.OpenProcessToken(hProcess, (TOKEN_ADJUST_PRIVILEGES | TOKEN_QUERY), byref(hToken)) e = GetLastError() if (e != 0): return False windll.kernel32.CloseHandle(hProcess) privilege_id = LUID() windll.advapi32.LookupPrivilegeValueA(None, privilegeStr, byref(privilege_id)) e = GetLastError() if (e != 0): return False SE_PRIVILEGE_ENABLED = 2 laa = LUID_AND_ATTRIBUTES(privilege_id, SE_PRIVILEGE_ENABLED) tp = TOKEN_PRIVILEGES(1, laa) windll.advapi32.AdjustTokenPrivileges(hToken, False, byref(tp), sizeof(tp), None, None) e = GetLastError() if (e != 0): return False return True
[ "def", "EnablePrivilege", "(", "privilegeStr", ",", "hToken", "=", "None", ")", ":", "if", "(", "hToken", "==", "None", ")", ":", "TOKEN_ADJUST_PRIVILEGES", "=", "32", "TOKEN_QUERY", "=", "8", "hToken", "=", "HANDLE", "(", "INVALID_HANDLE_VALUE", ")", "if", "(", "not", "hToken", ")", ":", "return", "False", "hProcess", "=", "windll", ".", "kernel32", ".", "OpenProcess", "(", "PROCESS_QUERY_INFORMATION", ",", "False", ",", "windll", ".", "kernel32", ".", "GetCurrentProcessId", "(", ")", ")", "if", "(", "not", "hProcess", ")", ":", "return", "False", "windll", ".", "advapi32", ".", "OpenProcessToken", "(", "hProcess", ",", "(", "TOKEN_ADJUST_PRIVILEGES", "|", "TOKEN_QUERY", ")", ",", "byref", "(", "hToken", ")", ")", "e", "=", "GetLastError", "(", ")", "if", "(", "e", "!=", "0", ")", ":", "return", "False", "windll", ".", "kernel32", ".", "CloseHandle", "(", "hProcess", ")", "privilege_id", "=", "LUID", "(", ")", "windll", ".", "advapi32", ".", "LookupPrivilegeValueA", "(", "None", ",", "privilegeStr", ",", "byref", "(", "privilege_id", ")", ")", "e", "=", "GetLastError", "(", ")", "if", "(", "e", "!=", "0", ")", ":", "return", "False", "SE_PRIVILEGE_ENABLED", "=", "2", "laa", "=", "LUID_AND_ATTRIBUTES", "(", "privilege_id", ",", "SE_PRIVILEGE_ENABLED", ")", "tp", "=", "TOKEN_PRIVILEGES", "(", "1", ",", "laa", ")", "windll", ".", "advapi32", ".", "AdjustTokenPrivileges", "(", "hToken", ",", "False", ",", "byref", "(", "tp", ")", ",", "sizeof", "(", "tp", ")", ",", "None", ",", "None", ")", "e", "=", "GetLastError", "(", ")", "if", "(", "e", "!=", "0", ")", ":", "return", "False", "return", "True" ]
enable privilege on token .
train
false
5,426
def test_fit_sample_half(): ratio = 0.6 ee = EasyEnsemble(ratio=ratio, random_state=RND_SEED, n_subsets=3) (X_resampled, y_resampled) = ee.fit_sample(X, Y) X_gt = np.array([[[0.85117925, 1.0185556], [(-0.58539673), 0.62515052], [1.35269503, 0.44812421], [0.5220963, 0.11349303], [(-2.10724436), 0.70263997], [1.10915364, 0.05718352], [0.22039505, 0.26469445], [0.59091459, 0.40692742]], [[0.85117925, 1.0185556], [(-0.58539673), 0.62515052], [(-1.23195149), 0.15427291], [(-2.10724436), 0.70263997], [0.5220963, 0.11349303], [0.22039505, 0.26469445], [1.10915364, 0.05718352], [0.59091459, 0.40692742]], [[0.85117925, 1.0185556], [(-0.58539673), 0.62515052], [(-1.23195149), 0.15427291], [0.5220963, 0.11349303], [1.35269503, 0.44812421], [1.10915364, 0.05718352], [0.59091459, 0.40692742], [0.22039505, 0.26469445]]]) y_gt = np.array([[0, 0, 1, 1, 1, 2, 2, 2], [0, 0, 1, 1, 1, 2, 2, 2], [0, 0, 1, 1, 1, 2, 2, 2]]) assert_array_equal(X_resampled, X_gt) assert_array_equal(y_resampled, y_gt)
[ "def", "test_fit_sample_half", "(", ")", ":", "ratio", "=", "0.6", "ee", "=", "EasyEnsemble", "(", "ratio", "=", "ratio", ",", "random_state", "=", "RND_SEED", ",", "n_subsets", "=", "3", ")", "(", "X_resampled", ",", "y_resampled", ")", "=", "ee", ".", "fit_sample", "(", "X", ",", "Y", ")", "X_gt", "=", "np", ".", "array", "(", "[", "[", "[", "0.85117925", ",", "1.0185556", "]", ",", "[", "(", "-", "0.58539673", ")", ",", "0.62515052", "]", ",", "[", "1.35269503", ",", "0.44812421", "]", ",", "[", "0.5220963", ",", "0.11349303", "]", ",", "[", "(", "-", "2.10724436", ")", ",", "0.70263997", "]", ",", "[", "1.10915364", ",", "0.05718352", "]", ",", "[", "0.22039505", ",", "0.26469445", "]", ",", "[", "0.59091459", ",", "0.40692742", "]", "]", ",", "[", "[", "0.85117925", ",", "1.0185556", "]", ",", "[", "(", "-", "0.58539673", ")", ",", "0.62515052", "]", ",", "[", "(", "-", "1.23195149", ")", ",", "0.15427291", "]", ",", "[", "(", "-", "2.10724436", ")", ",", "0.70263997", "]", ",", "[", "0.5220963", ",", "0.11349303", "]", ",", "[", "0.22039505", ",", "0.26469445", "]", ",", "[", "1.10915364", ",", "0.05718352", "]", ",", "[", "0.59091459", ",", "0.40692742", "]", "]", ",", "[", "[", "0.85117925", ",", "1.0185556", "]", ",", "[", "(", "-", "0.58539673", ")", ",", "0.62515052", "]", ",", "[", "(", "-", "1.23195149", ")", ",", "0.15427291", "]", ",", "[", "0.5220963", ",", "0.11349303", "]", ",", "[", "1.35269503", ",", "0.44812421", "]", ",", "[", "1.10915364", ",", "0.05718352", "]", ",", "[", "0.59091459", ",", "0.40692742", "]", ",", "[", "0.22039505", ",", "0.26469445", "]", "]", "]", ")", "y_gt", "=", "np", ".", "array", "(", "[", "[", "0", ",", "0", ",", "1", ",", "1", ",", "1", ",", "2", ",", "2", ",", "2", "]", ",", "[", "0", ",", "0", ",", "1", ",", "1", ",", "1", ",", "2", ",", "2", ",", "2", "]", ",", "[", "0", ",", "0", ",", "1", ",", "1", ",", "1", ",", "2", ",", "2", ",", "2", "]", "]", ")", "assert_array_equal", "(", "X_resampled", ",", "X_gt", ")", "assert_array_equal", "(", "y_resampled", ",", "y_gt", ")" ]
test fit and sample routines with ratio of .
train
false
5,427
def generate_addons(num, owner, app_name): post_save.disconnect(update_search_index, sender=Addon, dispatch_uid='addons.search.index') featured_categories = collections.defaultdict(int) user = generate_user(owner) app = APPS[app_name] default_icons = [x[0] for x in icons() if x[0].startswith('icon/')] for (name, category) in _yield_name_and_cat(num, app=app, type=ADDON_EXTENSION): icon_type = random.choice(default_icons) addon = create_addon(name=name, icon_type=icon_type, application=app) generate_addon_user_and_category(addon, user, category) generate_addon_preview(addon) generate_translations(addon) if (featured_categories[category] < 5): generate_collection(addon, app) featured_categories[category] += 1 generate_ratings(addon, 5)
[ "def", "generate_addons", "(", "num", ",", "owner", ",", "app_name", ")", ":", "post_save", ".", "disconnect", "(", "update_search_index", ",", "sender", "=", "Addon", ",", "dispatch_uid", "=", "'addons.search.index'", ")", "featured_categories", "=", "collections", ".", "defaultdict", "(", "int", ")", "user", "=", "generate_user", "(", "owner", ")", "app", "=", "APPS", "[", "app_name", "]", "default_icons", "=", "[", "x", "[", "0", "]", "for", "x", "in", "icons", "(", ")", "if", "x", "[", "0", "]", ".", "startswith", "(", "'icon/'", ")", "]", "for", "(", "name", ",", "category", ")", "in", "_yield_name_and_cat", "(", "num", ",", "app", "=", "app", ",", "type", "=", "ADDON_EXTENSION", ")", ":", "icon_type", "=", "random", ".", "choice", "(", "default_icons", ")", "addon", "=", "create_addon", "(", "name", "=", "name", ",", "icon_type", "=", "icon_type", ",", "application", "=", "app", ")", "generate_addon_user_and_category", "(", "addon", ",", "user", ",", "category", ")", "generate_addon_preview", "(", "addon", ")", "generate_translations", "(", "addon", ")", "if", "(", "featured_categories", "[", "category", "]", "<", "5", ")", ":", "generate_collection", "(", "addon", ",", "app", ")", "featured_categories", "[", "category", "]", "+=", "1", "generate_ratings", "(", "addon", ",", "5", ")" ]
generate num addons for the given owner and app_name .
train
false
5,428
def unset(bot, update, chat_data): if ('job' not in chat_data): update.message.reply_text('You have no active timer') return job = chat_data['job'] job.schedule_removal() del chat_data['job'] update.message.reply_text('Timer successfully unset!')
[ "def", "unset", "(", "bot", ",", "update", ",", "chat_data", ")", ":", "if", "(", "'job'", "not", "in", "chat_data", ")", ":", "update", ".", "message", ".", "reply_text", "(", "'You have no active timer'", ")", "return", "job", "=", "chat_data", "[", "'job'", "]", "job", ".", "schedule_removal", "(", ")", "del", "chat_data", "[", "'job'", "]", "update", ".", "message", ".", "reply_text", "(", "'Timer successfully unset!'", ")" ]
removes the job if the user changed their mind .
train
false
5,429
def parse_http_dict(header_val): if (not header_val): return {} ans = {} (sep, dquote) = ('="' if isinstance(header_val, bytes) else u'="') for item in parse_http_list(header_val): (k, v) = item.partition(sep)[::2] if k: if (v.startswith(dquote) and v.endswith(dquote)): v = v[1:(-1)] ans[k] = v return ans
[ "def", "parse_http_dict", "(", "header_val", ")", ":", "if", "(", "not", "header_val", ")", ":", "return", "{", "}", "ans", "=", "{", "}", "(", "sep", ",", "dquote", ")", "=", "(", "'=\"'", "if", "isinstance", "(", "header_val", ",", "bytes", ")", "else", "u'=\"'", ")", "for", "item", "in", "parse_http_list", "(", "header_val", ")", ":", "(", "k", ",", "v", ")", "=", "item", ".", "partition", "(", "sep", ")", "[", ":", ":", "2", "]", "if", "k", ":", "if", "(", "v", ".", "startswith", "(", "dquote", ")", "and", "v", ".", "endswith", "(", "dquote", ")", ")", ":", "v", "=", "v", "[", "1", ":", "(", "-", "1", ")", "]", "ans", "[", "k", "]", "=", "v", "return", "ans" ]
parse an http comma separated header with items of the form a=1 .
train
false
5,430
def get_pack_group(): return cfg.CONF.content.pack_group
[ "def", "get_pack_group", "(", ")", ":", "return", "cfg", ".", "CONF", ".", "content", ".", "pack_group" ]
return a name of the group with write permissions to pack directory .
train
false
5,433
def view_i18n_helper(c, cluster, encoding, content): prefix = (cluster.fs_prefix + '/test_view_i18n') filename = (prefix + u'/test-view-car\xe1cter-internacional') bytestring = content.encode(encoding) try: f = cluster.fs.open(filename, 'w') f.write(bytestring) f.close() response = c.get(('/filebrowser/view=%s?encoding=%s' % (filename, encoding))) assert_equal(response.context['view']['contents'], content) response = c.get(('/filebrowser/view=%s?encoding=%s&end=8&begin=1' % (filename, encoding))) assert_equal(response.context['view']['contents'], unicode(bytestring[0:8], encoding, errors='replace')) finally: cleanup_file(cluster, filename)
[ "def", "view_i18n_helper", "(", "c", ",", "cluster", ",", "encoding", ",", "content", ")", ":", "prefix", "=", "(", "cluster", ".", "fs_prefix", "+", "'/test_view_i18n'", ")", "filename", "=", "(", "prefix", "+", "u'/test-view-car\\xe1cter-internacional'", ")", "bytestring", "=", "content", ".", "encode", "(", "encoding", ")", "try", ":", "f", "=", "cluster", ".", "fs", ".", "open", "(", "filename", ",", "'w'", ")", "f", ".", "write", "(", "bytestring", ")", "f", ".", "close", "(", ")", "response", "=", "c", ".", "get", "(", "(", "'/filebrowser/view=%s?encoding=%s'", "%", "(", "filename", ",", "encoding", ")", ")", ")", "assert_equal", "(", "response", ".", "context", "[", "'view'", "]", "[", "'contents'", "]", ",", "content", ")", "response", "=", "c", ".", "get", "(", "(", "'/filebrowser/view=%s?encoding=%s&end=8&begin=1'", "%", "(", "filename", ",", "encoding", ")", ")", ")", "assert_equal", "(", "response", ".", "context", "[", "'view'", "]", "[", "'contents'", "]", ",", "unicode", "(", "bytestring", "[", "0", ":", "8", "]", ",", "encoding", ",", "errors", "=", "'replace'", ")", ")", "finally", ":", "cleanup_file", "(", "cluster", ",", "filename", ")" ]
write the content in the given encoding directly into the filesystem .
train
false
5,434
def assign_private_ip_addresses(network_interface_name=None, network_interface_id=None, private_ip_addresses=None, secondary_private_ip_address_count=None, allow_reassignment=False, region=None, key=None, keyid=None, profile=None): if (not salt.utils.exactly_one((network_interface_name, network_interface_id))): raise SaltInvocationError("Exactly one of 'network_interface_name', 'network_interface_id' must be provided") conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if network_interface_name: try: network_interface_id = get_network_interface_id(network_interface_name, region=region, key=key, keyid=keyid, profile=profile) except boto.exception.BotoServerError as e: log.error(e) return False if (not network_interface_id): log.error("Given network_interface_name '{0}' cannot be mapped to an network_interface_id".format(network_interface_name)) return False try: return conn.assign_private_ip_addresses(network_interface_id=network_interface_id, private_ip_addresses=private_ip_addresses, secondary_private_ip_address_count=secondary_private_ip_address_count, allow_reassignment=allow_reassignment) except boto.exception.BotoServerError as e: log.error(e) return False
[ "def", "assign_private_ip_addresses", "(", "network_interface_name", "=", "None", ",", "network_interface_id", "=", "None", ",", "private_ip_addresses", "=", "None", ",", "secondary_private_ip_address_count", "=", "None", ",", "allow_reassignment", "=", "False", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "if", "(", "not", "salt", ".", "utils", ".", "exactly_one", "(", "(", "network_interface_name", ",", "network_interface_id", ")", ")", ")", ":", "raise", "SaltInvocationError", "(", "\"Exactly one of 'network_interface_name', 'network_interface_id' must be provided\"", ")", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "network_interface_name", ":", "try", ":", "network_interface_id", "=", "get_network_interface_id", "(", "network_interface_name", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "log", ".", "error", "(", "e", ")", "return", "False", "if", "(", "not", "network_interface_id", ")", ":", "log", ".", "error", "(", "\"Given network_interface_name '{0}' cannot be mapped to an network_interface_id\"", ".", "format", "(", "network_interface_name", ")", ")", "return", "False", "try", ":", "return", "conn", ".", "assign_private_ip_addresses", "(", "network_interface_id", "=", "network_interface_id", ",", "private_ip_addresses", "=", "private_ip_addresses", ",", "secondary_private_ip_address_count", "=", "secondary_private_ip_address_count", ",", "allow_reassignment", "=", "allow_reassignment", ")", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "log", ".", "error", "(", "e", ")", "return", "False" ]
assigns one or more secondary private ip addresses to a network interface .
train
true
5,435
def support_enumeration_gen(g): try: N = g.N except: raise TypeError('input must be a 2-player NormalFormGame') if (N != 2): raise NotImplementedError('Implemented only for 2-player games') return _support_enumeration_gen(g.players[0].payoff_array, g.players[1].payoff_array)
[ "def", "support_enumeration_gen", "(", "g", ")", ":", "try", ":", "N", "=", "g", ".", "N", "except", ":", "raise", "TypeError", "(", "'input must be a 2-player NormalFormGame'", ")", "if", "(", "N", "!=", "2", ")", ":", "raise", "NotImplementedError", "(", "'Implemented only for 2-player games'", ")", "return", "_support_enumeration_gen", "(", "g", ".", "players", "[", "0", "]", ".", "payoff_array", ",", "g", ".", "players", "[", "1", "]", ".", "payoff_array", ")" ]
generator version of support_enumeration .
train
false
5,436
@require_POST @login_required def remove_coupon(request, course_id): coupon_id = request.POST.get('id', None) if (not coupon_id): return JsonResponse({'message': _('coupon id is None')}, status=400) try: coupon = Coupon.objects.get(id=coupon_id) except ObjectDoesNotExist: return JsonResponse({'message': _('coupon with the coupon id ({coupon_id}) DoesNotExist').format(coupon_id=coupon_id)}, status=400) if (not coupon.is_active): return JsonResponse({'message': _('coupon with the coupon id ({coupon_id}) is already inactive').format(coupon_id=coupon_id)}, status=400) coupon.is_active = False coupon.save() return JsonResponse({'message': _('coupon with the coupon id ({coupon_id}) updated successfully').format(coupon_id=coupon_id)})
[ "@", "require_POST", "@", "login_required", "def", "remove_coupon", "(", "request", ",", "course_id", ")", ":", "coupon_id", "=", "request", ".", "POST", ".", "get", "(", "'id'", ",", "None", ")", "if", "(", "not", "coupon_id", ")", ":", "return", "JsonResponse", "(", "{", "'message'", ":", "_", "(", "'coupon id is None'", ")", "}", ",", "status", "=", "400", ")", "try", ":", "coupon", "=", "Coupon", ".", "objects", ".", "get", "(", "id", "=", "coupon_id", ")", "except", "ObjectDoesNotExist", ":", "return", "JsonResponse", "(", "{", "'message'", ":", "_", "(", "'coupon with the coupon id ({coupon_id}) DoesNotExist'", ")", ".", "format", "(", "coupon_id", "=", "coupon_id", ")", "}", ",", "status", "=", "400", ")", "if", "(", "not", "coupon", ".", "is_active", ")", ":", "return", "JsonResponse", "(", "{", "'message'", ":", "_", "(", "'coupon with the coupon id ({coupon_id}) is already inactive'", ")", ".", "format", "(", "coupon_id", "=", "coupon_id", ")", "}", ",", "status", "=", "400", ")", "coupon", ".", "is_active", "=", "False", "coupon", ".", "save", "(", ")", "return", "JsonResponse", "(", "{", "'message'", ":", "_", "(", "'coupon with the coupon id ({coupon_id}) updated successfully'", ")", ".", "format", "(", "coupon_id", "=", "coupon_id", ")", "}", ")" ]
remove the coupon against the coupon id set the coupon is_active flag to false .
train
false
5,437
def adapt_criterion_to_null(crit, nulls): def visit_binary(binary): if (isinstance(binary.left, BindParameter) and (binary.left._identifying_key in nulls)): binary.left = binary.right binary.right = Null() binary.operator = operators.is_ binary.negate = operators.isnot elif (isinstance(binary.right, BindParameter) and (binary.right._identifying_key in nulls)): binary.right = Null() binary.operator = operators.is_ binary.negate = operators.isnot return visitors.cloned_traverse(crit, {}, {'binary': visit_binary})
[ "def", "adapt_criterion_to_null", "(", "crit", ",", "nulls", ")", ":", "def", "visit_binary", "(", "binary", ")", ":", "if", "(", "isinstance", "(", "binary", ".", "left", ",", "BindParameter", ")", "and", "(", "binary", ".", "left", ".", "_identifying_key", "in", "nulls", ")", ")", ":", "binary", ".", "left", "=", "binary", ".", "right", "binary", ".", "right", "=", "Null", "(", ")", "binary", ".", "operator", "=", "operators", ".", "is_", "binary", ".", "negate", "=", "operators", ".", "isnot", "elif", "(", "isinstance", "(", "binary", ".", "right", ",", "BindParameter", ")", "and", "(", "binary", ".", "right", ".", "_identifying_key", "in", "nulls", ")", ")", ":", "binary", ".", "right", "=", "Null", "(", ")", "binary", ".", "operator", "=", "operators", ".", "is_", "binary", ".", "negate", "=", "operators", ".", "isnot", "return", "visitors", ".", "cloned_traverse", "(", "crit", ",", "{", "}", ",", "{", "'binary'", ":", "visit_binary", "}", ")" ]
given criterion containing bind params .
train
false
5,439
def _if_modified_since_passes(last_modified, if_modified_since): return ((not last_modified) or (last_modified > if_modified_since))
[ "def", "_if_modified_since_passes", "(", "last_modified", ",", "if_modified_since", ")", ":", "return", "(", "(", "not", "last_modified", ")", "or", "(", "last_modified", ">", "if_modified_since", ")", ")" ]
test the if-modified-since comparison as defined in section 3 .
train
false
5,440
def get_input_value(page, css_selector): page.wait_for_element_presence(css_selector, 'Elements matching "{}" selector are present'.format(css_selector)) return page.q(css=css_selector).attrs('value')[0]
[ "def", "get_input_value", "(", "page", ",", "css_selector", ")", ":", "page", ".", "wait_for_element_presence", "(", "css_selector", ",", "'Elements matching \"{}\" selector are present'", ".", "format", "(", "css_selector", ")", ")", "return", "page", ".", "q", "(", "css", "=", "css_selector", ")", ".", "attrs", "(", "'value'", ")", "[", "0", "]" ]
returns the value of the field matching the css selector .
train
false
5,441
def adjust_key_parity(key_in): def parity_byte(key_byte): parity = 1 for i in xrange(1, 8): parity ^= ((key_byte >> i) & 1) return ((key_byte & 254) | parity) if (len(key_in) not in key_size): raise ValueError('Not a valid TDES key') key_out = b('').join([bchr(parity_byte(bord(x))) for x in key_in]) if ((key_out[:8] == key_out[8:16]) or (key_out[(-16):(-8)] == key_out[(-8):])): raise ValueError('Triple DES key degenerates to single DES') return key_out
[ "def", "adjust_key_parity", "(", "key_in", ")", ":", "def", "parity_byte", "(", "key_byte", ")", ":", "parity", "=", "1", "for", "i", "in", "xrange", "(", "1", ",", "8", ")", ":", "parity", "^=", "(", "(", "key_byte", ">>", "i", ")", "&", "1", ")", "return", "(", "(", "key_byte", "&", "254", ")", "|", "parity", ")", "if", "(", "len", "(", "key_in", ")", "not", "in", "key_size", ")", ":", "raise", "ValueError", "(", "'Not a valid TDES key'", ")", "key_out", "=", "b", "(", "''", ")", ".", "join", "(", "[", "bchr", "(", "parity_byte", "(", "bord", "(", "x", ")", ")", ")", "for", "x", "in", "key_in", "]", ")", "if", "(", "(", "key_out", "[", ":", "8", "]", "==", "key_out", "[", "8", ":", "16", "]", ")", "or", "(", "key_out", "[", "(", "-", "16", ")", ":", "(", "-", "8", ")", "]", "==", "key_out", "[", "(", "-", "8", ")", ":", "]", ")", ")", ":", "raise", "ValueError", "(", "'Triple DES key degenerates to single DES'", ")", "return", "key_out" ]
return the tdes key with parity bits correctly set .
train
false
5,444
def response_authenticate(): response = HttpResponse(status=401) response['WWW-Authenticate'] = 'Basic realm="Git"' return response
[ "def", "response_authenticate", "(", ")", ":", "response", "=", "HttpResponse", "(", "status", "=", "401", ")", "response", "[", "'WWW-Authenticate'", "]", "=", "'Basic realm=\"Git\"'", "return", "response" ]
returns 401 response with authenticate header .
train
false
5,445
def send_wsgi_response(status, headers, content, start_response, cors_handler=None): if cors_handler: cors_handler.update_headers(headers) content_len = (len(content) if content else 0) headers = [(header, value) for (header, value) in headers if (header.lower() != 'content-length')] headers.append(('Content-Length', ('%s' % content_len))) start_response(status, headers) return content
[ "def", "send_wsgi_response", "(", "status", ",", "headers", ",", "content", ",", "start_response", ",", "cors_handler", "=", "None", ")", ":", "if", "cors_handler", ":", "cors_handler", ".", "update_headers", "(", "headers", ")", "content_len", "=", "(", "len", "(", "content", ")", "if", "content", "else", "0", ")", "headers", "=", "[", "(", "header", ",", "value", ")", "for", "(", "header", ",", "value", ")", "in", "headers", "if", "(", "header", ".", "lower", "(", ")", "!=", "'content-length'", ")", "]", "headers", ".", "append", "(", "(", "'Content-Length'", ",", "(", "'%s'", "%", "content_len", ")", ")", ")", "start_response", "(", "status", ",", "headers", ")", "return", "content" ]
dump reformatted response to cgi start_response .
train
true
5,448
def _track_from_response(result, timeout): response = result['response'] status = response['track']['status'].lower() if (status == 'pending'): result = _wait_for_pending_track(response['track']['id'], timeout) response = result['response'] status = response['track']['status'].lower() if (not (status == 'complete')): track_id = response['track']['id'] if (status == 'pending'): raise Exception(("%s: the operation didn't complete before the timeout (%d secs)" % (track_id, timeout))) else: raise Exception(('%s: there was an error analyzing the track, status: %s' % (track_id, status))) else: track_properties = response['track'] identifier = track_properties.pop('id') md5 = track_properties.pop('md5', None) track_properties.update(track_properties.pop('audio_summary')) return Track(identifier, md5, track_properties)
[ "def", "_track_from_response", "(", "result", ",", "timeout", ")", ":", "response", "=", "result", "[", "'response'", "]", "status", "=", "response", "[", "'track'", "]", "[", "'status'", "]", ".", "lower", "(", ")", "if", "(", "status", "==", "'pending'", ")", ":", "result", "=", "_wait_for_pending_track", "(", "response", "[", "'track'", "]", "[", "'id'", "]", ",", "timeout", ")", "response", "=", "result", "[", "'response'", "]", "status", "=", "response", "[", "'track'", "]", "[", "'status'", "]", ".", "lower", "(", ")", "if", "(", "not", "(", "status", "==", "'complete'", ")", ")", ":", "track_id", "=", "response", "[", "'track'", "]", "[", "'id'", "]", "if", "(", "status", "==", "'pending'", ")", ":", "raise", "Exception", "(", "(", "\"%s: the operation didn't complete before the timeout (%d secs)\"", "%", "(", "track_id", ",", "timeout", ")", ")", ")", "else", ":", "raise", "Exception", "(", "(", "'%s: there was an error analyzing the track, status: %s'", "%", "(", "track_id", ",", "status", ")", ")", ")", "else", ":", "track_properties", "=", "response", "[", "'track'", "]", "identifier", "=", "track_properties", ".", "pop", "(", "'id'", ")", "md5", "=", "track_properties", ".", "pop", "(", "'md5'", ",", "None", ")", "track_properties", ".", "update", "(", "track_properties", ".", "pop", "(", "'audio_summary'", ")", ")", "return", "Track", "(", "identifier", ",", "md5", ",", "track_properties", ")" ]
this is the function that actually creates the track object .
train
true
5,450
def NoDefaultRoot(): global _support_default_root _support_default_root = 0 global _default_root _default_root = None del _default_root
[ "def", "NoDefaultRoot", "(", ")", ":", "global", "_support_default_root", "_support_default_root", "=", "0", "global", "_default_root", "_default_root", "=", "None", "del", "_default_root" ]
inhibit setting of default root window .
train
false
5,452
def ensure_bytecode_path(bytecode_path): try: os.mkdir(os.path.dirname(bytecode_path)) except OSError as error: if (error.errno != errno.EEXIST): raise
[ "def", "ensure_bytecode_path", "(", "bytecode_path", ")", ":", "try", ":", "os", ".", "mkdir", "(", "os", ".", "path", ".", "dirname", "(", "bytecode_path", ")", ")", "except", "OSError", "as", "error", ":", "if", "(", "error", ".", "errno", "!=", "errno", ".", "EEXIST", ")", ":", "raise" ]
ensure that the __pycache__ directory for pep 3147 pyc file exists .
train
false
5,455
def GetIncludedBuildFiles(build_file_path, aux_data, included=None): if (included == None): included = [] if (build_file_path in included): return included included.append(build_file_path) for included_build_file in aux_data[build_file_path].get('included', []): GetIncludedBuildFiles(included_build_file, aux_data, included) return included
[ "def", "GetIncludedBuildFiles", "(", "build_file_path", ",", "aux_data", ",", "included", "=", "None", ")", ":", "if", "(", "included", "==", "None", ")", ":", "included", "=", "[", "]", "if", "(", "build_file_path", "in", "included", ")", ":", "return", "included", "included", ".", "append", "(", "build_file_path", ")", "for", "included_build_file", "in", "aux_data", "[", "build_file_path", "]", ".", "get", "(", "'included'", ",", "[", "]", ")", ":", "GetIncludedBuildFiles", "(", "included_build_file", ",", "aux_data", ",", "included", ")", "return", "included" ]
return a list of all build files included into build_file_path .
train
false
5,456
@require_context @require_snapshot_exists def volume_glance_metadata_copy_to_snapshot(context, snapshot_id, volume_id): session = get_session() with session.begin(): metadata = _volume_glance_metadata_get(context, volume_id, session=session) for meta in metadata: vol_glance_metadata = models.VolumeGlanceMetadata() vol_glance_metadata.snapshot_id = snapshot_id vol_glance_metadata.key = meta['key'] vol_glance_metadata.value = meta['value'] vol_glance_metadata.save(session=session)
[ "@", "require_context", "@", "require_snapshot_exists", "def", "volume_glance_metadata_copy_to_snapshot", "(", "context", ",", "snapshot_id", ",", "volume_id", ")", ":", "session", "=", "get_session", "(", ")", "with", "session", ".", "begin", "(", ")", ":", "metadata", "=", "_volume_glance_metadata_get", "(", "context", ",", "volume_id", ",", "session", "=", "session", ")", "for", "meta", "in", "metadata", ":", "vol_glance_metadata", "=", "models", ".", "VolumeGlanceMetadata", "(", ")", "vol_glance_metadata", ".", "snapshot_id", "=", "snapshot_id", "vol_glance_metadata", ".", "key", "=", "meta", "[", "'key'", "]", "vol_glance_metadata", ".", "value", "=", "meta", "[", "'value'", "]", "vol_glance_metadata", ".", "save", "(", "session", "=", "session", ")" ]
update the glance metadata for a snapshot by copying all of the key:value pairs from the originating volume .
train
false
5,457
@utils.arg('secgroup', metavar='<secgroup>', help=_('ID or name of security group.')) @utils.arg('ip_proto', metavar='<ip-proto>', help=_('IP protocol (icmp, tcp, udp).')) @utils.arg('from_port', metavar='<from-port>', help=_('Port at start of range.')) @utils.arg('to_port', metavar='<to-port>', help=_('Port at end of range.')) @utils.arg('cidr', metavar='<cidr>', help=_('CIDR for address range.')) @deprecated_network def do_secgroup_delete_rule(cs, args): secgroup = _get_secgroup(cs, args.secgroup) for rule in secgroup.rules: if (rule['ip_protocol'] and (rule['ip_protocol'].upper() == args.ip_proto.upper()) and (rule['from_port'] == int(args.from_port)) and (rule['to_port'] == int(args.to_port)) and (rule['ip_range']['cidr'] == args.cidr)): _print_secgroup_rules([rule]) return cs.security_group_rules.delete(rule['id']) raise exceptions.CommandError(_('Rule not found'))
[ "@", "utils", ".", "arg", "(", "'secgroup'", ",", "metavar", "=", "'<secgroup>'", ",", "help", "=", "_", "(", "'ID or name of security group.'", ")", ")", "@", "utils", ".", "arg", "(", "'ip_proto'", ",", "metavar", "=", "'<ip-proto>'", ",", "help", "=", "_", "(", "'IP protocol (icmp, tcp, udp).'", ")", ")", "@", "utils", ".", "arg", "(", "'from_port'", ",", "metavar", "=", "'<from-port>'", ",", "help", "=", "_", "(", "'Port at start of range.'", ")", ")", "@", "utils", ".", "arg", "(", "'to_port'", ",", "metavar", "=", "'<to-port>'", ",", "help", "=", "_", "(", "'Port at end of range.'", ")", ")", "@", "utils", ".", "arg", "(", "'cidr'", ",", "metavar", "=", "'<cidr>'", ",", "help", "=", "_", "(", "'CIDR for address range.'", ")", ")", "@", "deprecated_network", "def", "do_secgroup_delete_rule", "(", "cs", ",", "args", ")", ":", "secgroup", "=", "_get_secgroup", "(", "cs", ",", "args", ".", "secgroup", ")", "for", "rule", "in", "secgroup", ".", "rules", ":", "if", "(", "rule", "[", "'ip_protocol'", "]", "and", "(", "rule", "[", "'ip_protocol'", "]", ".", "upper", "(", ")", "==", "args", ".", "ip_proto", ".", "upper", "(", ")", ")", "and", "(", "rule", "[", "'from_port'", "]", "==", "int", "(", "args", ".", "from_port", ")", ")", "and", "(", "rule", "[", "'to_port'", "]", "==", "int", "(", "args", ".", "to_port", ")", ")", "and", "(", "rule", "[", "'ip_range'", "]", "[", "'cidr'", "]", "==", "args", ".", "cidr", ")", ")", ":", "_print_secgroup_rules", "(", "[", "rule", "]", ")", "return", "cs", ".", "security_group_rules", ".", "delete", "(", "rule", "[", "'id'", "]", ")", "raise", "exceptions", ".", "CommandError", "(", "_", "(", "'Rule not found'", ")", ")" ]
delete a rule from a security group .
train
false
5,458
def parse_seq(tokens, options): result = [] while (tokens.current() not in [None, ']', ')', '|']): atom = parse_atom(tokens, options) if (tokens.current() == '...'): atom = [OneOrMore(*atom)] tokens.move() result += atom return result
[ "def", "parse_seq", "(", "tokens", ",", "options", ")", ":", "result", "=", "[", "]", "while", "(", "tokens", ".", "current", "(", ")", "not", "in", "[", "None", ",", "']'", ",", "')'", ",", "'|'", "]", ")", ":", "atom", "=", "parse_atom", "(", "tokens", ",", "options", ")", "if", "(", "tokens", ".", "current", "(", ")", "==", "'...'", ")", ":", "atom", "=", "[", "OneOrMore", "(", "*", "atom", ")", "]", "tokens", ".", "move", "(", ")", "result", "+=", "atom", "return", "result" ]
seq ::= * ; .
train
true
5,459
def marching_cubes(volume, level=None, spacing=(1.0, 1.0, 1.0), gradient_direction='descent', step_size=1, allow_degenerate=True, use_classic=False): try: nout = _expected_output_args() except Exception: nout = 0 if (nout <= 2): warn(skimage_deprecation('`marching_cubes` now uses a better and faster algorithm, and returns four instead of two outputs (see docstring for details). Backwards compatibility with 0.12 and prior is available with `marching_cubes_classic`.')) return marching_cubes_lewiner(volume, level, spacing, gradient_direction, step_size, allow_degenerate, use_classic)
[ "def", "marching_cubes", "(", "volume", ",", "level", "=", "None", ",", "spacing", "=", "(", "1.0", ",", "1.0", ",", "1.0", ")", ",", "gradient_direction", "=", "'descent'", ",", "step_size", "=", "1", ",", "allow_degenerate", "=", "True", ",", "use_classic", "=", "False", ")", ":", "try", ":", "nout", "=", "_expected_output_args", "(", ")", "except", "Exception", ":", "nout", "=", "0", "if", "(", "nout", "<=", "2", ")", ":", "warn", "(", "skimage_deprecation", "(", "'`marching_cubes` now uses a better and faster algorithm, and returns four instead of two outputs (see docstring for details). Backwards compatibility with 0.12 and prior is available with `marching_cubes_classic`.'", ")", ")", "return", "marching_cubes_lewiner", "(", "volume", ",", "level", ",", "spacing", ",", "gradient_direction", ",", "step_size", ",", "allow_degenerate", ",", "use_classic", ")" ]
lewiner marching cubes algorithm to find surfaces in 3d volumetric data .
train
false
5,460
def decorate(test, decorator): try: tests = iter(test) except TypeError: return decorator(test) _clearSuite(test) for case in tests: test.addTest(decorate(case, decorator)) return test
[ "def", "decorate", "(", "test", ",", "decorator", ")", ":", "try", ":", "tests", "=", "iter", "(", "test", ")", "except", "TypeError", ":", "return", "decorator", "(", "test", ")", "_clearSuite", "(", "test", ")", "for", "case", "in", "tests", ":", "test", ".", "addTest", "(", "decorate", "(", "case", ",", "decorator", ")", ")", "return", "test" ]
add metedata next to a node .
train
false
5,462
def parse_arxiv_url(url): ix = url.rfind('/') idversion = j['id'][(ix + 1):] parts = idversion.split('v') assert (len(parts) == 2), ('error parsing url ' + url) return (parts[0], int(parts[1]))
[ "def", "parse_arxiv_url", "(", "url", ")", ":", "ix", "=", "url", ".", "rfind", "(", "'/'", ")", "idversion", "=", "j", "[", "'id'", "]", "[", "(", "ix", "+", "1", ")", ":", "]", "parts", "=", "idversion", ".", "split", "(", "'v'", ")", "assert", "(", "len", "(", "parts", ")", "==", "2", ")", ",", "(", "'error parsing url '", "+", "url", ")", "return", "(", "parts", "[", "0", "]", ",", "int", "(", "parts", "[", "1", "]", ")", ")" ]
examples is URL we want to extract the raw id and the version .
train
false
5,463
def ValidHeadersRewriter(response): for (key, value) in response.headers.items(): try: key.decode('ascii') value.decode('ascii') except UnicodeDecodeError: del response.headers[key]
[ "def", "ValidHeadersRewriter", "(", "response", ")", ":", "for", "(", "key", ",", "value", ")", "in", "response", ".", "headers", ".", "items", "(", ")", ":", "try", ":", "key", ".", "decode", "(", "'ascii'", ")", "value", ".", "decode", "(", "'ascii'", ")", "except", "UnicodeDecodeError", ":", "del", "response", ".", "headers", "[", "key", "]" ]
remove invalid response headers .
train
false
5,465
def sqla_listen(*args): event.listen(*args) _REGISTERED_SQLA_EVENTS.append(args)
[ "def", "sqla_listen", "(", "*", "args", ")", ":", "event", ".", "listen", "(", "*", "args", ")", "_REGISTERED_SQLA_EVENTS", ".", "append", "(", "args", ")" ]
wrapper to track subscribers for test teardowns .
train
false
5,466
def _router_default(): router = Storage(default_application='init', applications='ALL', default_controller='default', controllers='DEFAULT', default_function='index', functions=dict(), default_language=None, languages=None, root_static=['favicon.ico', 'robots.txt'], map_static=None, domains=None, exclusive_domain=False, map_hyphen=False, acfe_match='\\w+$', file_match='([-+=@$%\\w]|(?<=[-+=@$%\\w])[./])*$', args_match='([\\w@ =-]|(?<=[\\w@ -])[.])*$') return router
[ "def", "_router_default", "(", ")", ":", "router", "=", "Storage", "(", "default_application", "=", "'init'", ",", "applications", "=", "'ALL'", ",", "default_controller", "=", "'default'", ",", "controllers", "=", "'DEFAULT'", ",", "default_function", "=", "'index'", ",", "functions", "=", "dict", "(", ")", ",", "default_language", "=", "None", ",", "languages", "=", "None", ",", "root_static", "=", "[", "'favicon.ico'", ",", "'robots.txt'", "]", ",", "map_static", "=", "None", ",", "domains", "=", "None", ",", "exclusive_domain", "=", "False", ",", "map_hyphen", "=", "False", ",", "acfe_match", "=", "'\\\\w+$'", ",", "file_match", "=", "'([-+=@$%\\\\w]|(?<=[-+=@$%\\\\w])[./])*$'", ",", "args_match", "=", "'([\\\\w@ =-]|(?<=[\\\\w@ -])[.])*$'", ")", "return", "router" ]
returns new copy of default base router .
train
false
5,468
def handle_socks4_negotiation(sock, username=None): received_version = sock.recv(1) command = sock.recv(1) port = _read_exactly(sock, 2) port = ((ord(port[0:1]) << 8) + ord(port[1:2])) addr = _read_exactly(sock, 4) provided_username = _read_until(sock, '\x00')[:(-1)] if (addr == '\x00\x00\x00\x01'): addr = _read_until(sock, '\x00')[:(-1)] else: addr = socket.inet_ntoa(addr) assert (received_version == SOCKS_VERSION_SOCKS4) assert (command == '\x01') if ((username is not None) and (username != provided_username)): sock.sendall('\x00]\x00\x00\x00\x00\x00\x00') sock.close() (yield False) return succeed = (yield (addr, port)) if succeed: response = '\x00Z\xea`\x7f\x00\x00\x01' else: response = '\x00[\x00\x00\x00\x00\x00\x00' sock.sendall(response) (yield True)
[ "def", "handle_socks4_negotiation", "(", "sock", ",", "username", "=", "None", ")", ":", "received_version", "=", "sock", ".", "recv", "(", "1", ")", "command", "=", "sock", ".", "recv", "(", "1", ")", "port", "=", "_read_exactly", "(", "sock", ",", "2", ")", "port", "=", "(", "(", "ord", "(", "port", "[", "0", ":", "1", "]", ")", "<<", "8", ")", "+", "ord", "(", "port", "[", "1", ":", "2", "]", ")", ")", "addr", "=", "_read_exactly", "(", "sock", ",", "4", ")", "provided_username", "=", "_read_until", "(", "sock", ",", "'\\x00'", ")", "[", ":", "(", "-", "1", ")", "]", "if", "(", "addr", "==", "'\\x00\\x00\\x00\\x01'", ")", ":", "addr", "=", "_read_until", "(", "sock", ",", "'\\x00'", ")", "[", ":", "(", "-", "1", ")", "]", "else", ":", "addr", "=", "socket", ".", "inet_ntoa", "(", "addr", ")", "assert", "(", "received_version", "==", "SOCKS_VERSION_SOCKS4", ")", "assert", "(", "command", "==", "'\\x01'", ")", "if", "(", "(", "username", "is", "not", "None", ")", "and", "(", "username", "!=", "provided_username", ")", ")", ":", "sock", ".", "sendall", "(", "'\\x00]\\x00\\x00\\x00\\x00\\x00\\x00'", ")", "sock", ".", "close", "(", ")", "(", "yield", "False", ")", "return", "succeed", "=", "(", "yield", "(", "addr", ",", "port", ")", ")", "if", "succeed", ":", "response", "=", "'\\x00Z\\xea`\\x7f\\x00\\x00\\x01'", "else", ":", "response", "=", "'\\x00[\\x00\\x00\\x00\\x00\\x00\\x00'", "sock", ".", "sendall", "(", "response", ")", "(", "yield", "True", ")" ]
handle the socks4 handshake .
train
false
5,470
def wait_script(name, source=None, template=None, onlyif=None, unless=None, cwd=None, runas=None, shell=None, env=None, stateful=False, umask=None, use_vt=False, output_loglevel='debug', **kwargs): if (('user' in kwargs) or ('group' in kwargs)): salt.utils.warn_until('Oxygen', 'The legacy user/group arguments are deprecated. Replace them with runas. These arguments will be removed in Salt Oxygen.') if (('user' in kwargs) and (kwargs['user'] is not None) and (runas is None)): runas = kwargs.pop('user') return {'name': name, 'changes': {}, 'result': True, 'comment': ''}
[ "def", "wait_script", "(", "name", ",", "source", "=", "None", ",", "template", "=", "None", ",", "onlyif", "=", "None", ",", "unless", "=", "None", ",", "cwd", "=", "None", ",", "runas", "=", "None", ",", "shell", "=", "None", ",", "env", "=", "None", ",", "stateful", "=", "False", ",", "umask", "=", "None", ",", "use_vt", "=", "False", ",", "output_loglevel", "=", "'debug'", ",", "**", "kwargs", ")", ":", "if", "(", "(", "'user'", "in", "kwargs", ")", "or", "(", "'group'", "in", "kwargs", ")", ")", ":", "salt", ".", "utils", ".", "warn_until", "(", "'Oxygen'", ",", "'The legacy user/group arguments are deprecated. Replace them with runas. These arguments will be removed in Salt Oxygen.'", ")", "if", "(", "(", "'user'", "in", "kwargs", ")", "and", "(", "kwargs", "[", "'user'", "]", "is", "not", "None", ")", "and", "(", "runas", "is", "None", ")", ")", ":", "runas", "=", "kwargs", ".", "pop", "(", "'user'", ")", "return", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "True", ",", "'comment'", ":", "''", "}" ]
download a script from a remote source and execute it only if a watch statement calls it .
train
false
5,472
def build_sponsor_list(sponsor_atags): sponsors = [] house_chief = senate_chief = None spontype = 'cosponsor' for atag in sponsor_atags: sponsor = atag.text if ('house' in atag.attrib['href'].split('/')): chamber = 'lower' elif ('senate' in atag.attrib['href'].split('/')): chamber = 'upper' else: chamber = None if ((chamber == 'lower') and (house_chief is None)): spontype = 'primary' official_spontype = 'chief' house_chief = sponsor elif ((chamber == 'upper') and (senate_chief is None)): spontype = 'primary' official_spontype = 'chief' senate_chief = sponsor else: spontype = 'cosponsor' official_spontype = 'cosponsor' sponsors.append((spontype, sponsor, chamber, official_spontype)) return sponsors
[ "def", "build_sponsor_list", "(", "sponsor_atags", ")", ":", "sponsors", "=", "[", "]", "house_chief", "=", "senate_chief", "=", "None", "spontype", "=", "'cosponsor'", "for", "atag", "in", "sponsor_atags", ":", "sponsor", "=", "atag", ".", "text", "if", "(", "'house'", "in", "atag", ".", "attrib", "[", "'href'", "]", ".", "split", "(", "'/'", ")", ")", ":", "chamber", "=", "'lower'", "elif", "(", "'senate'", "in", "atag", ".", "attrib", "[", "'href'", "]", ".", "split", "(", "'/'", ")", ")", ":", "chamber", "=", "'upper'", "else", ":", "chamber", "=", "None", "if", "(", "(", "chamber", "==", "'lower'", ")", "and", "(", "house_chief", "is", "None", ")", ")", ":", "spontype", "=", "'primary'", "official_spontype", "=", "'chief'", "house_chief", "=", "sponsor", "elif", "(", "(", "chamber", "==", "'upper'", ")", "and", "(", "senate_chief", "is", "None", ")", ")", ":", "spontype", "=", "'primary'", "official_spontype", "=", "'chief'", "senate_chief", "=", "sponsor", "else", ":", "spontype", "=", "'cosponsor'", "official_spontype", "=", "'cosponsor'", "sponsors", ".", "append", "(", "(", "spontype", ",", "sponsor", ",", "chamber", ",", "official_spontype", ")", ")", "return", "sponsors" ]
return a list of tuples .
train
false
5,473
def httpconnection_patched_send_output(self, message_body=None): self._buffer.extend(('', '')) msg = '\r\n'.join(self._buffer) del self._buffer[:] if isinstance(message_body, str): msg += message_body message_body = None self.send(msg) if (message_body is not None): self.send(message_body)
[ "def", "httpconnection_patched_send_output", "(", "self", ",", "message_body", "=", "None", ")", ":", "self", ".", "_buffer", ".", "extend", "(", "(", "''", ",", "''", ")", ")", "msg", "=", "'\\r\\n'", ".", "join", "(", "self", ".", "_buffer", ")", "del", "self", ".", "_buffer", "[", ":", "]", "if", "isinstance", "(", "message_body", ",", "str", ")", ":", "msg", "+=", "message_body", "message_body", "=", "None", "self", ".", "send", "(", "msg", ")", "if", "(", "message_body", "is", "not", "None", ")", ":", "self", ".", "send", "(", "message_body", ")" ]
send the currently buffered request and clear the buffer .
train
false
5,474
def get_default_username(distribution): return 'root'
[ "def", "get_default_username", "(", "distribution", ")", ":", "return", "'root'" ]
try to determine the current system users username to use as a default .
train
false
5,475
def reversed_upper_bounded_changelog(repo, included_upper_bounds_changeset_revision): return reversed_lower_upper_bounded_changelog(repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision)
[ "def", "reversed_upper_bounded_changelog", "(", "repo", ",", "included_upper_bounds_changeset_revision", ")", ":", "return", "reversed_lower_upper_bounded_changelog", "(", "repo", ",", "INITIAL_CHANGELOG_HASH", ",", "included_upper_bounds_changeset_revision", ")" ]
return a reversed list of changesets in the repository changelog up to and including the included_upper_bounds_changeset_revision .
train
false
5,476
def report_new_account(acc, config): reporter = zope.component.queryUtility(interfaces.IReporter) if (reporter is None): return reporter.add_message('Your account credentials have been saved in your Certbot configuration directory at {0}. You should make a secure backup of this folder now. This configuration directory will also contain certificates and private keys obtained by Certbot so making regular backups of this folder is ideal.'.format(config.config_dir), reporter.MEDIUM_PRIORITY) if acc.regr.body.emails: recovery_msg = 'If you lose your account credentials, you can recover through e-mails sent to {0}.'.format(', '.join(acc.regr.body.emails)) reporter.add_message(recovery_msg, reporter.MEDIUM_PRIORITY)
[ "def", "report_new_account", "(", "acc", ",", "config", ")", ":", "reporter", "=", "zope", ".", "component", ".", "queryUtility", "(", "interfaces", ".", "IReporter", ")", "if", "(", "reporter", "is", "None", ")", ":", "return", "reporter", ".", "add_message", "(", "'Your account credentials have been saved in your Certbot configuration directory at {0}. You should make a secure backup of this folder now. This configuration directory will also contain certificates and private keys obtained by Certbot so making regular backups of this folder is ideal.'", ".", "format", "(", "config", ".", "config_dir", ")", ",", "reporter", ".", "MEDIUM_PRIORITY", ")", "if", "acc", ".", "regr", ".", "body", ".", "emails", ":", "recovery_msg", "=", "'If you lose your account credentials, you can recover through e-mails sent to {0}.'", ".", "format", "(", "', '", ".", "join", "(", "acc", ".", "regr", ".", "body", ".", "emails", ")", ")", "reporter", ".", "add_message", "(", "recovery_msg", ",", "reporter", ".", "MEDIUM_PRIORITY", ")" ]
informs the user about their new acme account .
train
false
5,477
def email_split(text): if (not text): return [] return [addr[1] for addr in getaddresses([text]) if addr[1] if ('@' in addr[1])]
[ "def", "email_split", "(", "text", ")", ":", "if", "(", "not", "text", ")", ":", "return", "[", "]", "return", "[", "addr", "[", "1", "]", "for", "addr", "in", "getaddresses", "(", "[", "text", "]", ")", "if", "addr", "[", "1", "]", "if", "(", "'@'", "in", "addr", "[", "1", "]", ")", "]" ]
return a list of the email addresses found in text .
train
false
5,478
def revision_links(obj): return combine_funcs(obj, (current_revision_link, related_revisions_link))
[ "def", "revision_links", "(", "obj", ")", ":", "return", "combine_funcs", "(", "obj", ",", "(", "current_revision_link", ",", "related_revisions_link", ")", ")" ]
combine the revision nav links .
train
false
5,479
def _default_names(nvar): return [('v%d' % i) for i in range(1, (nvar + 1))]
[ "def", "_default_names", "(", "nvar", ")", ":", "return", "[", "(", "'v%d'", "%", "i", ")", "for", "i", "in", "range", "(", "1", ",", "(", "nvar", "+", "1", ")", ")", "]" ]
returns default stata names v1 .
train
false
5,480
@register.filter def sort_by(items, attr): def key_func(item): try: return getattr(item, attr) except AttributeError: try: return item[attr] except TypeError: getattr(item, attr) return sorted(items, key=key_func)
[ "@", "register", ".", "filter", "def", "sort_by", "(", "items", ",", "attr", ")", ":", "def", "key_func", "(", "item", ")", ":", "try", ":", "return", "getattr", "(", "item", ",", "attr", ")", "except", "AttributeError", ":", "try", ":", "return", "item", "[", "attr", "]", "except", "TypeError", ":", "getattr", "(", "item", ",", "attr", ")", "return", "sorted", "(", "items", ",", "key", "=", "key_func", ")" ]
general sort filter - sorts by either attribute or key .
train
true
5,481
def _message_pb_from_mapping(message): return PubsubMessage(data=_to_bytes(message['data']), attributes=message['attributes'])
[ "def", "_message_pb_from_mapping", "(", "message", ")", ":", "return", "PubsubMessage", "(", "data", "=", "_to_bytes", "(", "message", "[", "'data'", "]", ")", ",", "attributes", "=", "message", "[", "'attributes'", "]", ")" ]
helper for :meth:_publisherapi .
train
false
5,482
def remove_subs_from_store(subs_id, item, lang='en'): filename = subs_filename(subs_id, lang) Transcript.delete_asset(item.location, filename)
[ "def", "remove_subs_from_store", "(", "subs_id", ",", "item", ",", "lang", "=", "'en'", ")", ":", "filename", "=", "subs_filename", "(", "subs_id", ",", "lang", ")", "Transcript", ".", "delete_asset", "(", "item", ".", "location", ",", "filename", ")" ]
remove from store .
train
false
5,484
def timecheck(files): global _mtimes for filename in files: mtime = os.stat(filename).st_mtime if (filename not in _mtimes): _mtimes[filename] = mtime elif (mtime != _mtimes[filename]): _mtimes = {} return True else: return False
[ "def", "timecheck", "(", "files", ")", ":", "global", "_mtimes", "for", "filename", "in", "files", ":", "mtime", "=", "os", ".", "stat", "(", "filename", ")", ".", "st_mtime", "if", "(", "filename", "not", "in", "_mtimes", ")", ":", "_mtimes", "[", "filename", "]", "=", "mtime", "elif", "(", "mtime", "!=", "_mtimes", "[", "filename", "]", ")", ":", "_mtimes", "=", "{", "}", "return", "True", "else", ":", "return", "False" ]
return true if any of the files have changed .
train
false
5,485
def getBit(n, bit): return int(bool(((int(n) & (1 << bit)) >> bit)))
[ "def", "getBit", "(", "n", ",", "bit", ")", ":", "return", "int", "(", "bool", "(", "(", "(", "int", "(", "n", ")", "&", "(", "1", "<<", "bit", ")", ")", ">>", "bit", ")", ")", ")" ]
name: getbit args: n .
train
false
5,486
@skip('silverlight') def test_iteration_no_mutation_bad_hash(): import random class c(object, ): def __hash__(self): return int((random.random() * 200)) l = [c() for i in xrange(1000)] b = set(l) for x in b: pass
[ "@", "skip", "(", "'silverlight'", ")", "def", "test_iteration_no_mutation_bad_hash", "(", ")", ":", "import", "random", "class", "c", "(", "object", ",", ")", ":", "def", "__hash__", "(", "self", ")", ":", "return", "int", "(", "(", "random", ".", "random", "(", ")", "*", "200", ")", ")", "l", "=", "[", "c", "(", ")", "for", "i", "in", "xrange", "(", "1000", ")", "]", "b", "=", "set", "(", "l", ")", "for", "x", "in", "b", ":", "pass" ]
create a set w/ objects with a bad hash and enumerate through it .
train
false
5,487
def onRequestCreateAccount(accountName, password, datas): INFO_MSG(('onRequestCreateAccount() %s' % accountName)) errorno = KBEngine.SERVER_SUCCESS if (len(accountName) > 64): errorno = KBEngine.SERVER_ERR_NAME if (len(password) > 64): errorno = KBEngine.SERVER_ERR_PASSWORD return (errorno, accountName, password, datas)
[ "def", "onRequestCreateAccount", "(", "accountName", ",", "password", ",", "datas", ")", ":", "INFO_MSG", "(", "(", "'onRequestCreateAccount() %s'", "%", "accountName", ")", ")", "errorno", "=", "KBEngine", ".", "SERVER_SUCCESS", "if", "(", "len", "(", "accountName", ")", ">", "64", ")", ":", "errorno", "=", "KBEngine", ".", "SERVER_ERR_NAME", "if", "(", "len", "(", "password", ")", ">", "64", ")", ":", "errorno", "=", "KBEngine", ".", "SERVER_ERR_PASSWORD", "return", "(", "errorno", ",", "accountName", ",", "password", ",", "datas", ")" ]
kbengine method .
train
false
5,488
def get_values_of_matching_keys(pattern_dict, user_name): ret = [] for expr in pattern_dict: if expr_match(user_name, expr): ret.extend(pattern_dict[expr]) return ret
[ "def", "get_values_of_matching_keys", "(", "pattern_dict", ",", "user_name", ")", ":", "ret", "=", "[", "]", "for", "expr", "in", "pattern_dict", ":", "if", "expr_match", "(", "user_name", ",", "expr", ")", ":", "ret", ".", "extend", "(", "pattern_dict", "[", "expr", "]", ")", "return", "ret" ]
check a whitelist and/or blacklist to see if the value matches it .
train
false
5,489
def ifequal(parser, token): return do_ifequal(parser, token, False)
[ "def", "ifequal", "(", "parser", ",", "token", ")", ":", "return", "do_ifequal", "(", "parser", ",", "token", ",", "False", ")" ]
output the contents of the block if the two arguments equal each other .
train
false
5,490
def ole2datetime(oledt): val = float(oledt) if (val < 61): raise ValueError(('Value is outside of acceptable range: %s ' % val)) return (OLE_TIME_ZERO + timedelta(days=val))
[ "def", "ole2datetime", "(", "oledt", ")", ":", "val", "=", "float", "(", "oledt", ")", "if", "(", "val", "<", "61", ")", ":", "raise", "ValueError", "(", "(", "'Value is outside of acceptable range: %s '", "%", "val", ")", ")", "return", "(", "OLE_TIME_ZERO", "+", "timedelta", "(", "days", "=", "val", ")", ")" ]
function for converting excel date to normal date format .
train
false
5,491
@must_have_permission(ADMIN) @must_be_valid_project def get_draft_registrations(auth, node, *args, **kwargs): count = request.args.get('count', 100) drafts = itertools.islice(node.draft_registrations_active, 0, count) serialized_drafts = [serialize_draft_registration(d, auth) for d in drafts] sorted_serialized_drafts = sorted(serialized_drafts, key=itemgetter('updated'), reverse=True) return ({'drafts': sorted_serialized_drafts}, http.OK)
[ "@", "must_have_permission", "(", "ADMIN", ")", "@", "must_be_valid_project", "def", "get_draft_registrations", "(", "auth", ",", "node", ",", "*", "args", ",", "**", "kwargs", ")", ":", "count", "=", "request", ".", "args", ".", "get", "(", "'count'", ",", "100", ")", "drafts", "=", "itertools", ".", "islice", "(", "node", ".", "draft_registrations_active", ",", "0", ",", "count", ")", "serialized_drafts", "=", "[", "serialize_draft_registration", "(", "d", ",", "auth", ")", "for", "d", "in", "drafts", "]", "sorted_serialized_drafts", "=", "sorted", "(", "serialized_drafts", ",", "key", "=", "itemgetter", "(", "'updated'", ")", ",", "reverse", "=", "True", ")", "return", "(", "{", "'drafts'", ":", "sorted_serialized_drafts", "}", ",", "http", ".", "OK", ")" ]
list draft registrations for a node :return: serialized draft registrations :rtype: dict .
train
false
5,492
def get_motd(): return rpc_utils.get_motd()
[ "def", "get_motd", "(", ")", ":", "return", "rpc_utils", ".", "get_motd", "(", ")" ]
returns the message of the day .
train
false
5,493
def create_or_update_trigger_db(trigger): assert isinstance(trigger, dict) existing_trigger_db = _get_trigger_db(trigger) if existing_trigger_db: is_update = True else: is_update = False trigger_api = TriggerAPI(**trigger) trigger_api.validate() trigger_db = TriggerAPI.to_model(trigger_api) if is_update: trigger_db.id = existing_trigger_db.id trigger_db = Trigger.add_or_update(trigger_db) extra = {'trigger_db': trigger_db} if is_update: LOG.audit(('Trigger updated. Trigger.id=%s' % trigger_db.id), extra=extra) else: LOG.audit(('Trigger created. Trigger.id=%s' % trigger_db.id), extra=extra) return trigger_db
[ "def", "create_or_update_trigger_db", "(", "trigger", ")", ":", "assert", "isinstance", "(", "trigger", ",", "dict", ")", "existing_trigger_db", "=", "_get_trigger_db", "(", "trigger", ")", "if", "existing_trigger_db", ":", "is_update", "=", "True", "else", ":", "is_update", "=", "False", "trigger_api", "=", "TriggerAPI", "(", "**", "trigger", ")", "trigger_api", ".", "validate", "(", ")", "trigger_db", "=", "TriggerAPI", ".", "to_model", "(", "trigger_api", ")", "if", "is_update", ":", "trigger_db", ".", "id", "=", "existing_trigger_db", ".", "id", "trigger_db", "=", "Trigger", ".", "add_or_update", "(", "trigger_db", ")", "extra", "=", "{", "'trigger_db'", ":", "trigger_db", "}", "if", "is_update", ":", "LOG", ".", "audit", "(", "(", "'Trigger updated. Trigger.id=%s'", "%", "trigger_db", ".", "id", ")", ",", "extra", "=", "extra", ")", "else", ":", "LOG", ".", "audit", "(", "(", "'Trigger created. Trigger.id=%s'", "%", "trigger_db", ".", "id", ")", ",", "extra", "=", "extra", ")", "return", "trigger_db" ]
create a new triggerdb model if one doesnt exist yet or update existing one .
train
false
5,495
def synchronize(*klasses): if (threadingmodule is not None): for klass in klasses: for methodName in klass.synchronized: sync = _sync(klass, klass.__dict__[methodName]) setattr(klass, methodName, sync)
[ "def", "synchronize", "(", "*", "klasses", ")", ":", "if", "(", "threadingmodule", "is", "not", "None", ")", ":", "for", "klass", "in", "klasses", ":", "for", "methodName", "in", "klass", ".", "synchronized", ":", "sync", "=", "_sync", "(", "klass", ",", "klass", ".", "__dict__", "[", "methodName", "]", ")", "setattr", "(", "klass", ",", "methodName", ",", "sync", ")" ]
just the right side part of leftjoin() .
train
false
5,496
def find_couchbase_pid(): if (not os.path.isfile(COUCHBASE_INITFILE)): return try: fd = open(COUCHBASE_INITFILE) for line in fd: if line.startswith('exec'): init_script = line.split()[1] fd.close() except IOError: utils.err(('Check permission of file (%s)' % COUCHBASE_INITFILE)) return try: fd = open(init_script) for line in fd: if line.startswith('PIDFILE'): pid_file = line.split('=')[1].rsplit()[0] fd.close() except IOError: utils.err(('Check permission of file (%s)' % init_script)) return try: fd = open(pid_file) pid = fd.read() fd.close() except IOError: utils.err('Couchbase-server is not running, since no pid file exists') return return pid.split()[0]
[ "def", "find_couchbase_pid", "(", ")", ":", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "COUCHBASE_INITFILE", ")", ")", ":", "return", "try", ":", "fd", "=", "open", "(", "COUCHBASE_INITFILE", ")", "for", "line", "in", "fd", ":", "if", "line", ".", "startswith", "(", "'exec'", ")", ":", "init_script", "=", "line", ".", "split", "(", ")", "[", "1", "]", "fd", ".", "close", "(", ")", "except", "IOError", ":", "utils", ".", "err", "(", "(", "'Check permission of file (%s)'", "%", "COUCHBASE_INITFILE", ")", ")", "return", "try", ":", "fd", "=", "open", "(", "init_script", ")", "for", "line", "in", "fd", ":", "if", "line", ".", "startswith", "(", "'PIDFILE'", ")", ":", "pid_file", "=", "line", ".", "split", "(", "'='", ")", "[", "1", "]", ".", "rsplit", "(", ")", "[", "0", "]", "fd", ".", "close", "(", ")", "except", "IOError", ":", "utils", ".", "err", "(", "(", "'Check permission of file (%s)'", "%", "init_script", ")", ")", "return", "try", ":", "fd", "=", "open", "(", "pid_file", ")", "pid", "=", "fd", ".", "read", "(", ")", "fd", ".", "close", "(", ")", "except", "IOError", ":", "utils", ".", "err", "(", "'Couchbase-server is not running, since no pid file exists'", ")", "return", "return", "pid", ".", "split", "(", ")", "[", "0", "]" ]
find out the pid of couchbase .
train
false
5,497
def _parse_file_key_certs(certificate_file, validate=False): while True: keycert_content = _read_until_keywords('dir-key-certification', certificate_file) block_end_prefix = PGP_BLOCK_END.split(' ', 1)[0] keycert_content += _read_until_keywords(block_end_prefix, certificate_file, True) if keycert_content: (yield stem.descriptor.networkstatus.KeyCertificate(bytes.join('', keycert_content), validate=validate)) else: break
[ "def", "_parse_file_key_certs", "(", "certificate_file", ",", "validate", "=", "False", ")", ":", "while", "True", ":", "keycert_content", "=", "_read_until_keywords", "(", "'dir-key-certification'", ",", "certificate_file", ")", "block_end_prefix", "=", "PGP_BLOCK_END", ".", "split", "(", "' '", ",", "1", ")", "[", "0", "]", "keycert_content", "+=", "_read_until_keywords", "(", "block_end_prefix", ",", "certificate_file", ",", "True", ")", "if", "keycert_content", ":", "(", "yield", "stem", ".", "descriptor", ".", "networkstatus", ".", "KeyCertificate", "(", "bytes", ".", "join", "(", "''", ",", "keycert_content", ")", ",", "validate", "=", "validate", ")", ")", "else", ":", "break" ]
parses a file containing one or more authority key certificates .
train
false
5,498
def parse_cookie(data): return ((k, v) for (k, v) in _parse_cookie(data) if _valid_cookie_name(k))
[ "def", "parse_cookie", "(", "data", ")", ":", "return", "(", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "_parse_cookie", "(", "data", ")", "if", "_valid_cookie_name", "(", "k", ")", ")" ]
parses and verifies a cookie value from set_cookie .
train
false
5,499
def extract_swift_bytes(content_type): (content_type, params) = parse_content_type(content_type) swift_bytes = None for (k, v) in params: if (k == 'swift_bytes'): swift_bytes = v else: content_type += (';%s=%s' % (k, v)) return (content_type, swift_bytes)
[ "def", "extract_swift_bytes", "(", "content_type", ")", ":", "(", "content_type", ",", "params", ")", "=", "parse_content_type", "(", "content_type", ")", "swift_bytes", "=", "None", "for", "(", "k", ",", "v", ")", "in", "params", ":", "if", "(", "k", "==", "'swift_bytes'", ")", ":", "swift_bytes", "=", "v", "else", ":", "content_type", "+=", "(", "';%s=%s'", "%", "(", "k", ",", "v", ")", ")", "return", "(", "content_type", ",", "swift_bytes", ")" ]
parse a content-type and return a tuple containing: - the content_type string minus any swift_bytes param .
train
false
5,500
def get_gdp_quarter(): rdint = vs.random() request = Request((vs.MACRO_URL % (vs.P_TYPE['http'], vs.DOMAINS['sina'], rdint, vs.MACRO_TYPE[0], 1, 250, rdint))) text = urlopen(request, timeout=10).read() text = (text.decode('gbk') if ct.PY3 else text) regSym = re.compile('\\,count:(.*?)\\}') datastr = regSym.findall(text) datastr = datastr[0] datastr = datastr.split('data:')[1] datastr = datastr.replace('"', '').replace('null', '0') js = json.loads(datastr) df = pd.DataFrame(js, columns=vs.GDP_QUARTER_COLS) df['quarter'] = df['quarter'].astype(object) df[(df == 0)] = np.NaN return df
[ "def", "get_gdp_quarter", "(", ")", ":", "rdint", "=", "vs", ".", "random", "(", ")", "request", "=", "Request", "(", "(", "vs", ".", "MACRO_URL", "%", "(", "vs", ".", "P_TYPE", "[", "'http'", "]", ",", "vs", ".", "DOMAINS", "[", "'sina'", "]", ",", "rdint", ",", "vs", ".", "MACRO_TYPE", "[", "0", "]", ",", "1", ",", "250", ",", "rdint", ")", ")", ")", "text", "=", "urlopen", "(", "request", ",", "timeout", "=", "10", ")", ".", "read", "(", ")", "text", "=", "(", "text", ".", "decode", "(", "'gbk'", ")", "if", "ct", ".", "PY3", "else", "text", ")", "regSym", "=", "re", ".", "compile", "(", "'\\\\,count:(.*?)\\\\}'", ")", "datastr", "=", "regSym", ".", "findall", "(", "text", ")", "datastr", "=", "datastr", "[", "0", "]", "datastr", "=", "datastr", ".", "split", "(", "'data:'", ")", "[", "1", "]", "datastr", "=", "datastr", ".", "replace", "(", "'\"'", ",", "''", ")", ".", "replace", "(", "'null'", ",", "'0'", ")", "js", "=", "json", ".", "loads", "(", "datastr", ")", "df", "=", "pd", ".", "DataFrame", "(", "js", ",", "columns", "=", "vs", ".", "GDP_QUARTER_COLS", ")", "df", "[", "'quarter'", "]", "=", "df", "[", "'quarter'", "]", ".", "astype", "(", "object", ")", "df", "[", "(", "df", "==", "0", ")", "]", "=", "np", ".", "NaN", "return", "df" ]
return dataframe quarter :季度 gdp :国内生产总值 gdp_yoy :国内生产总值同比增长(%) pi :第一产业增加值 pi_yoy:第一产业增加值同比增长(%) si :第二产业增加值 si_yoy :第二产业增加值同比增长(%) ti :第三产业增加值 ti_yoy :第三产业增加值同比增长(%) .
train
false
5,501
def _EscapeVCProjCommandLineArgListItem(s): def _Replace(match): return ((((2 * match.group(1)) + '"') + match.group(2)) + '"') segments = s.split('"') for i in range(0, len(segments), 2): segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i]) s = '"'.join(segments) if ((len(segments) % 2) == 0): print >>sys.stderr, (('Warning: MSVS may misinterpret the odd number of ' + 'quotes in ') + s) return s
[ "def", "_EscapeVCProjCommandLineArgListItem", "(", "s", ")", ":", "def", "_Replace", "(", "match", ")", ":", "return", "(", "(", "(", "(", "2", "*", "match", ".", "group", "(", "1", ")", ")", "+", "'\"'", ")", "+", "match", ".", "group", "(", "2", ")", ")", "+", "'\"'", ")", "segments", "=", "s", ".", "split", "(", "'\"'", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "segments", ")", ",", "2", ")", ":", "segments", "[", "i", "]", "=", "delimiters_replacer_regex", ".", "sub", "(", "_Replace", ",", "segments", "[", "i", "]", ")", "s", "=", "'\"'", ".", "join", "(", "segments", ")", "if", "(", "(", "len", "(", "segments", ")", "%", "2", ")", "==", "0", ")", ":", "print", ">>", "sys", ".", "stderr", ",", "(", "(", "'Warning: MSVS may misinterpret the odd number of '", "+", "'quotes in '", ")", "+", "s", ")", "return", "s" ]
escapes command line arguments for msvs .
train
false
5,503
def post_download(project, filename, name=None, description=''): if (name is None): name = os.path.basename(filename) with open(filename, 'rb') as f: filedata = f.read() url = 'https://api.github.com/repos/{project}/downloads'.format(project=project) payload = json.dumps(dict(name=name, size=len(filedata), description=description)) response = requests.post(url, data=payload, headers=make_auth_header()) response.raise_for_status() reply = json.loads(response.content) s3_url = reply['s3_url'] fields = dict(key=reply['path'], acl=reply['acl'], success_action_status=201, Filename=reply['name'], AWSAccessKeyId=reply['accesskeyid'], Policy=reply['policy'], Signature=reply['signature'], file=(reply['name'], filedata)) fields['Content-Type'] = reply['mime_type'] (data, content_type) = encode_multipart_formdata(fields) s3r = requests.post(s3_url, data=data, headers={'Content-Type': content_type}) return s3r
[ "def", "post_download", "(", "project", ",", "filename", ",", "name", "=", "None", ",", "description", "=", "''", ")", ":", "if", "(", "name", "is", "None", ")", ":", "name", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "f", ":", "filedata", "=", "f", ".", "read", "(", ")", "url", "=", "'https://api.github.com/repos/{project}/downloads'", ".", "format", "(", "project", "=", "project", ")", "payload", "=", "json", ".", "dumps", "(", "dict", "(", "name", "=", "name", ",", "size", "=", "len", "(", "filedata", ")", ",", "description", "=", "description", ")", ")", "response", "=", "requests", ".", "post", "(", "url", ",", "data", "=", "payload", ",", "headers", "=", "make_auth_header", "(", ")", ")", "response", ".", "raise_for_status", "(", ")", "reply", "=", "json", ".", "loads", "(", "response", ".", "content", ")", "s3_url", "=", "reply", "[", "'s3_url'", "]", "fields", "=", "dict", "(", "key", "=", "reply", "[", "'path'", "]", ",", "acl", "=", "reply", "[", "'acl'", "]", ",", "success_action_status", "=", "201", ",", "Filename", "=", "reply", "[", "'name'", "]", ",", "AWSAccessKeyId", "=", "reply", "[", "'accesskeyid'", "]", ",", "Policy", "=", "reply", "[", "'policy'", "]", ",", "Signature", "=", "reply", "[", "'signature'", "]", ",", "file", "=", "(", "reply", "[", "'name'", "]", ",", "filedata", ")", ")", "fields", "[", "'Content-Type'", "]", "=", "reply", "[", "'mime_type'", "]", "(", "data", ",", "content_type", ")", "=", "encode_multipart_formdata", "(", "fields", ")", "s3r", "=", "requests", ".", "post", "(", "s3_url", ",", "data", "=", "data", ",", "headers", "=", "{", "'Content-Type'", ":", "content_type", "}", ")", "return", "s3r" ]
upload a file to the github downloads area .
train
true
5,504
def test_nested_workflow_join(tmpdir): wd = str(tmpdir) os.chdir(wd) def nested_wf(i, name=u'smallwf'): inputspec = pe.Node(IdentityInterface(fields=[u'n']), name=u'inputspec') inputspec.iterables = [(u'n', i)] pre_join = pe.Node(IncrementInterface(), name=u'pre_join') join = pe.JoinNode(IdentityInterface(fields=[u'n']), joinsource=u'inputspec', joinfield=u'n', name=u'join') wf = pe.Workflow(name=(u'wf_%d' % i[0])) wf.connect(inputspec, u'n', pre_join, u'input1') wf.connect(pre_join, u'output1', join, u'n') return wf meta_wf = pe.Workflow(name=u'meta', base_dir=u'.') for i in [[1, 3], [2, 4]]: mini_wf = nested_wf(i) meta_wf.add_nodes([mini_wf]) result = meta_wf.run() assert (len(result.nodes()) == 6), u'The number of expanded nodes is incorrect.'
[ "def", "test_nested_workflow_join", "(", "tmpdir", ")", ":", "wd", "=", "str", "(", "tmpdir", ")", "os", ".", "chdir", "(", "wd", ")", "def", "nested_wf", "(", "i", ",", "name", "=", "u'smallwf'", ")", ":", "inputspec", "=", "pe", ".", "Node", "(", "IdentityInterface", "(", "fields", "=", "[", "u'n'", "]", ")", ",", "name", "=", "u'inputspec'", ")", "inputspec", ".", "iterables", "=", "[", "(", "u'n'", ",", "i", ")", "]", "pre_join", "=", "pe", ".", "Node", "(", "IncrementInterface", "(", ")", ",", "name", "=", "u'pre_join'", ")", "join", "=", "pe", ".", "JoinNode", "(", "IdentityInterface", "(", "fields", "=", "[", "u'n'", "]", ")", ",", "joinsource", "=", "u'inputspec'", ",", "joinfield", "=", "u'n'", ",", "name", "=", "u'join'", ")", "wf", "=", "pe", ".", "Workflow", "(", "name", "=", "(", "u'wf_%d'", "%", "i", "[", "0", "]", ")", ")", "wf", ".", "connect", "(", "inputspec", ",", "u'n'", ",", "pre_join", ",", "u'input1'", ")", "wf", ".", "connect", "(", "pre_join", ",", "u'output1'", ",", "join", ",", "u'n'", ")", "return", "wf", "meta_wf", "=", "pe", ".", "Workflow", "(", "name", "=", "u'meta'", ",", "base_dir", "=", "u'.'", ")", "for", "i", "in", "[", "[", "1", ",", "3", "]", ",", "[", "2", ",", "4", "]", "]", ":", "mini_wf", "=", "nested_wf", "(", "i", ")", "meta_wf", ".", "add_nodes", "(", "[", "mini_wf", "]", ")", "result", "=", "meta_wf", ".", "run", "(", ")", "assert", "(", "len", "(", "result", ".", "nodes", "(", ")", ")", "==", "6", ")", ",", "u'The number of expanded nodes is incorrect.'" ]
test collecting join inputs within a nested workflow .
train
false
5,505
def contrast_diff_mean(nm): return (np.eye(nm) - (np.ones((nm, nm)) / nm))
[ "def", "contrast_diff_mean", "(", "nm", ")", ":", "return", "(", "np", ".", "eye", "(", "nm", ")", "-", "(", "np", ".", "ones", "(", "(", "nm", ",", "nm", ")", ")", "/", "nm", ")", ")" ]
contrast or restriction matrix for all against mean comparison parameters nm : int returns contr : ndarray .
train
false
5,507
@pytest.mark.django_db @pytest.mark.parametrize(u'address, expected_taxes', EXPECTED_TAXES_BY_ADDRESS) def test_module(address, expected_taxes): shop = get_shop(prices_include_tax=False, currency=u'USD') product = create_product(u'PROD', shop=shop, default_price=1000) price = product.get_shop_instance(shop).default_price for ruledef in shuffled(TAX_RULE_DEFS): rule = ruledef.get_tax_rule() rule.tax.save() rule.tax = rule.tax rule.save() rule.tax_classes.add(product.tax_class) assert (TaxRule.objects.count() == len(TAX_RULE_DEFS)) with override_settings(SHUUP_TAX_MODULE=u'default_tax'): module = get_tax_module() assert isinstance(module, DefaultTaxModule) context = TaxingContext(location=address) taxed_price = module.get_taxed_price_for(context, product, price) expected_codes = set(sum([x.split() for x in expected_taxes], [])) assert (set((x.tax.code for x in taxed_price.taxes)) == expected_codes) expected_tax = Money(TAX_AMOUNTS[expected_taxes], u'USD') assert (taxed_price.taxful.amount == (price.amount + expected_tax)) TaxRule.objects.all().delete()
[ "@", "pytest", ".", "mark", ".", "django_db", "@", "pytest", ".", "mark", ".", "parametrize", "(", "u'address, expected_taxes'", ",", "EXPECTED_TAXES_BY_ADDRESS", ")", "def", "test_module", "(", "address", ",", "expected_taxes", ")", ":", "shop", "=", "get_shop", "(", "prices_include_tax", "=", "False", ",", "currency", "=", "u'USD'", ")", "product", "=", "create_product", "(", "u'PROD'", ",", "shop", "=", "shop", ",", "default_price", "=", "1000", ")", "price", "=", "product", ".", "get_shop_instance", "(", "shop", ")", ".", "default_price", "for", "ruledef", "in", "shuffled", "(", "TAX_RULE_DEFS", ")", ":", "rule", "=", "ruledef", ".", "get_tax_rule", "(", ")", "rule", ".", "tax", ".", "save", "(", ")", "rule", ".", "tax", "=", "rule", ".", "tax", "rule", ".", "save", "(", ")", "rule", ".", "tax_classes", ".", "add", "(", "product", ".", "tax_class", ")", "assert", "(", "TaxRule", ".", "objects", ".", "count", "(", ")", "==", "len", "(", "TAX_RULE_DEFS", ")", ")", "with", "override_settings", "(", "SHUUP_TAX_MODULE", "=", "u'default_tax'", ")", ":", "module", "=", "get_tax_module", "(", ")", "assert", "isinstance", "(", "module", ",", "DefaultTaxModule", ")", "context", "=", "TaxingContext", "(", "location", "=", "address", ")", "taxed_price", "=", "module", ".", "get_taxed_price_for", "(", "context", ",", "product", ",", "price", ")", "expected_codes", "=", "set", "(", "sum", "(", "[", "x", ".", "split", "(", ")", "for", "x", "in", "expected_taxes", "]", ",", "[", "]", ")", ")", "assert", "(", "set", "(", "(", "x", ".", "tax", ".", "code", "for", "x", "in", "taxed_price", ".", "taxes", ")", ")", "==", "expected_codes", ")", "expected_tax", "=", "Money", "(", "TAX_AMOUNTS", "[", "expected_taxes", "]", ",", "u'USD'", ")", "assert", "(", "taxed_price", ".", "taxful", ".", "amount", "==", "(", "price", ".", "amount", "+", "expected_tax", ")", ")", "TaxRule", ".", "objects", ".", "all", "(", ")", ".", "delete", "(", ")" ]
helper for running tests .
train
false
5,508
def p_stmts(p): if (len(p) == 3): p[0] = (p[1] + p[2]) else: p[0] = p[1]
[ "def", "p_stmts", "(", "p", ")", ":", "if", "(", "len", "(", "p", ")", "==", "3", ")", ":", "p", "[", "0", "]", "=", "(", "p", "[", "1", "]", "+", "p", "[", "2", "]", ")", "else", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]" ]
stmts : stmts stmt | stmt .
train
false
5,509
def helpModule(module): t = module.split('.') importName = ((('from ' + '.'.join(t[:(-1)])) + ' import ') + t[(-1)]) exec importName moduleName = t[(-1)] functions = [locals()[moduleName].__dict__.get(a) for a in dir(locals()[moduleName]) if isinstance(locals()[moduleName].__dict__.get(a), types.FunctionType)] for function in functions: base = function.func_doc base = base.replace(' DCTB ', ' ') doc = ''.join(base.split('\n\n')[0].strip().split('\n')) print helpers.formatLong(function.func_name, doc)
[ "def", "helpModule", "(", "module", ")", ":", "t", "=", "module", ".", "split", "(", "'.'", ")", "importName", "=", "(", "(", "(", "'from '", "+", "'.'", ".", "join", "(", "t", "[", ":", "(", "-", "1", ")", "]", ")", ")", "+", "' import '", ")", "+", "t", "[", "(", "-", "1", ")", "]", ")", "exec", "importName", "moduleName", "=", "t", "[", "(", "-", "1", ")", "]", "functions", "=", "[", "locals", "(", ")", "[", "moduleName", "]", ".", "__dict__", ".", "get", "(", "a", ")", "for", "a", "in", "dir", "(", "locals", "(", ")", "[", "moduleName", "]", ")", "if", "isinstance", "(", "locals", "(", ")", "[", "moduleName", "]", ".", "__dict__", ".", "get", "(", "a", ")", ",", "types", ".", "FunctionType", ")", "]", "for", "function", "in", "functions", ":", "base", "=", "function", ".", "func_doc", "base", "=", "base", ".", "replace", "(", "' DCTB '", ",", "' '", ")", "doc", "=", "''", ".", "join", "(", "base", ".", "split", "(", "'\\n\\n'", ")", "[", "0", "]", ".", "strip", "(", ")", ".", "split", "(", "'\\n'", ")", ")", "print", "helpers", ".", "formatLong", "(", "function", ".", "func_name", ",", "doc", ")" ]
print the first text chunk for each established method in a module .
train
false
5,512
def avg_pool1d(input, kernel_size, stride=None, padding=0, ceil_mode=False, count_include_pad=True): if (input.dim() != 3): raise ValueError('expected 3D input (got {} dimensions)'.format(input.dim())) kernel_size = (_single(kernel_size) + (1,)) stride = (_single(stride) + (1,)) padding = (_single(padding) + (0,)) f = _functions.thnn.AvgPool2d(kernel_size, stride, padding, ceil_mode, count_include_pad) return f(input.unsqueeze(3)).squeeze(3)
[ "def", "avg_pool1d", "(", "input", ",", "kernel_size", ",", "stride", "=", "None", ",", "padding", "=", "0", ",", "ceil_mode", "=", "False", ",", "count_include_pad", "=", "True", ")", ":", "if", "(", "input", ".", "dim", "(", ")", "!=", "3", ")", ":", "raise", "ValueError", "(", "'expected 3D input (got {} dimensions)'", ".", "format", "(", "input", ".", "dim", "(", ")", ")", ")", "kernel_size", "=", "(", "_single", "(", "kernel_size", ")", "+", "(", "1", ",", ")", ")", "stride", "=", "(", "_single", "(", "stride", ")", "+", "(", "1", ",", ")", ")", "padding", "=", "(", "_single", "(", "padding", ")", "+", "(", "0", ",", ")", ")", "f", "=", "_functions", ".", "thnn", ".", "AvgPool2d", "(", "kernel_size", ",", "stride", ",", "padding", ",", "ceil_mode", ",", "count_include_pad", ")", "return", "f", "(", "input", ".", "unsqueeze", "(", "3", ")", ")", ".", "squeeze", "(", "3", ")" ]
applies a 1d average pooling over an input signal composed of several input planes .
train
false
5,513
def OxmlElement(nsptag_str, attrs=None, nsdecls=None): nsptag = NamespacePrefixedTag(nsptag_str) if (nsdecls is None): nsdecls = nsptag.nsmap return oxml_parser.makeelement(nsptag.clark_name, attrib=attrs, nsmap=nsdecls)
[ "def", "OxmlElement", "(", "nsptag_str", ",", "attrs", "=", "None", ",", "nsdecls", "=", "None", ")", ":", "nsptag", "=", "NamespacePrefixedTag", "(", "nsptag_str", ")", "if", "(", "nsdecls", "is", "None", ")", ":", "nsdecls", "=", "nsptag", ".", "nsmap", "return", "oxml_parser", ".", "makeelement", "(", "nsptag", ".", "clark_name", ",", "attrib", "=", "attrs", ",", "nsmap", "=", "nsdecls", ")" ]
return a loose lxml element having the tag specified by *nsptag_str* .
train
true
5,514
def _get_server_status_code(url): (host, path, params, query) = urlparse.urlparse(url)[1:5] try: conn = httplib.HTTPConnection(host) conn.request('HEAD', ((path + '?') + query)) return conn.getresponse().status except StandardError: return None
[ "def", "_get_server_status_code", "(", "url", ")", ":", "(", "host", ",", "path", ",", "params", ",", "query", ")", "=", "urlparse", ".", "urlparse", "(", "url", ")", "[", "1", ":", "5", "]", "try", ":", "conn", "=", "httplib", ".", "HTTPConnection", "(", "host", ")", "conn", ".", "request", "(", "'HEAD'", ",", "(", "(", "path", "+", "'?'", ")", "+", "query", ")", ")", "return", "conn", ".", "getresponse", "(", ")", ".", "status", "except", "StandardError", ":", "return", "None" ]
download just the header of a url and return the servers status code .
train
false
5,515
def _handleattrhelper(Class, transport, methodName): method = getattr(transport.getHandle(), ('get_%s_certificate' % (methodName,)), None) if (method is None): raise CertificateError(('non-TLS transport %r did not have %s certificate' % (transport, methodName))) cert = method() if (cert is None): raise CertificateError(('TLS transport %r did not have %s certificate' % (transport, methodName))) return Class(cert)
[ "def", "_handleattrhelper", "(", "Class", ",", "transport", ",", "methodName", ")", ":", "method", "=", "getattr", "(", "transport", ".", "getHandle", "(", ")", ",", "(", "'get_%s_certificate'", "%", "(", "methodName", ",", ")", ")", ",", "None", ")", "if", "(", "method", "is", "None", ")", ":", "raise", "CertificateError", "(", "(", "'non-TLS transport %r did not have %s certificate'", "%", "(", "transport", ",", "methodName", ")", ")", ")", "cert", "=", "method", "(", ")", "if", "(", "cert", "is", "None", ")", ":", "raise", "CertificateError", "(", "(", "'TLS transport %r did not have %s certificate'", "%", "(", "transport", ",", "methodName", ")", ")", ")", "return", "Class", "(", "cert", ")" ]
helper for l{certificate .
train
false
5,516
def get_section(file_name, section, separator='='): inifile = _Ini.get_ini_file(file_name, separator=separator) ret = {} for (key, value) in six.iteritems(inifile.get(section, {})): if (key[0] != '#'): ret.update({key: value}) return ret
[ "def", "get_section", "(", "file_name", ",", "section", ",", "separator", "=", "'='", ")", ":", "inifile", "=", "_Ini", ".", "get_ini_file", "(", "file_name", ",", "separator", "=", "separator", ")", "ret", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "six", ".", "iteritems", "(", "inifile", ".", "get", "(", "section", ",", "{", "}", ")", ")", ":", "if", "(", "key", "[", "0", "]", "!=", "'#'", ")", ":", "ret", ".", "update", "(", "{", "key", ":", "value", "}", ")", "return", "ret" ]
retrieve a section from an ini file .
train
true
5,517
def glances_logger(env_key='LOG_CFG'): _logger = logging.getLogger() config = LOGGING_CFG user_file = os.getenv(env_key, None) if (user_file and os.path.exists(user_file)): with open(user_file, 'rt') as f: config = json.load(f) logging.config.dictConfig(config) return _logger
[ "def", "glances_logger", "(", "env_key", "=", "'LOG_CFG'", ")", ":", "_logger", "=", "logging", ".", "getLogger", "(", ")", "config", "=", "LOGGING_CFG", "user_file", "=", "os", ".", "getenv", "(", "env_key", ",", "None", ")", "if", "(", "user_file", "and", "os", ".", "path", ".", "exists", "(", "user_file", ")", ")", ":", "with", "open", "(", "user_file", ",", "'rt'", ")", "as", "f", ":", "config", "=", "json", ".", "load", "(", "f", ")", "logging", ".", "config", ".", "dictConfig", "(", "config", ")", "return", "_logger" ]
build and return the logger .
train
true
5,518
def getThreeSignificantFigures(number): absoluteNumber = abs(number) if (absoluteNumber >= 10.0): return getRoundedToPlacesString(1, number) if (absoluteNumber < 1e-09): return getRoundedToPlacesString(12, number) return getRoundedToPlacesString((1 - math.floor(math.log10(absoluteNumber))), number)
[ "def", "getThreeSignificantFigures", "(", "number", ")", ":", "absoluteNumber", "=", "abs", "(", "number", ")", "if", "(", "absoluteNumber", ">=", "10.0", ")", ":", "return", "getRoundedToPlacesString", "(", "1", ",", "number", ")", "if", "(", "absoluteNumber", "<", "1e-09", ")", ":", "return", "getRoundedToPlacesString", "(", "12", ",", "number", ")", "return", "getRoundedToPlacesString", "(", "(", "1", "-", "math", ".", "floor", "(", "math", ".", "log10", "(", "absoluteNumber", ")", ")", ")", ",", "number", ")" ]
get number rounded to three significant figures as a string .
train
false
5,519
@utils.service_type('monitor') def do_extra_specs_list(cs, args): vtypes = cs.monitor_types.list() _print_type_and_extra_specs_list(vtypes)
[ "@", "utils", ".", "service_type", "(", "'monitor'", ")", "def", "do_extra_specs_list", "(", "cs", ",", "args", ")", ":", "vtypes", "=", "cs", ".", "monitor_types", ".", "list", "(", ")", "_print_type_and_extra_specs_list", "(", "vtypes", ")" ]
print a list of current monitor types and extra specs .
train
false
5,520
@require_POST @login_required def watch_ready(request, product=None): if (request.LANGUAGE_CODE != settings.WIKI_DEFAULT_LANGUAGE): raise Http404 kwargs = {} if (product is not None): kwargs['product'] = product ReadyRevisionEvent.notify(request.user, **kwargs) statsd.incr('wiki.watches.ready') return HttpResponse()
[ "@", "require_POST", "@", "login_required", "def", "watch_ready", "(", "request", ",", "product", "=", "None", ")", ":", "if", "(", "request", ".", "LANGUAGE_CODE", "!=", "settings", ".", "WIKI_DEFAULT_LANGUAGE", ")", ":", "raise", "Http404", "kwargs", "=", "{", "}", "if", "(", "product", "is", "not", "None", ")", ":", "kwargs", "[", "'product'", "]", "=", "product", "ReadyRevisionEvent", ".", "notify", "(", "request", ".", "user", ",", "**", "kwargs", ")", "statsd", ".", "incr", "(", "'wiki.watches.ready'", ")", "return", "HttpResponse", "(", ")" ]
start watching ready-for-l10n revisions for given product .
train
false
5,521
def _transform_banded_jac(bjac): newjac = zeros(((bjac.shape[0] + 1), bjac.shape[1])) newjac[1:, ::2] = bjac[:, ::2] newjac[:(-1), 1::2] = bjac[:, 1::2] return newjac
[ "def", "_transform_banded_jac", "(", "bjac", ")", ":", "newjac", "=", "zeros", "(", "(", "(", "bjac", ".", "shape", "[", "0", "]", "+", "1", ")", ",", "bjac", ".", "shape", "[", "1", "]", ")", ")", "newjac", "[", "1", ":", ",", ":", ":", "2", "]", "=", "bjac", "[", ":", ",", ":", ":", "2", "]", "newjac", "[", ":", "(", "-", "1", ")", ",", "1", ":", ":", "2", "]", "=", "bjac", "[", ":", ",", "1", ":", ":", "2", "]", "return", "newjac" ]
convert a real matrix of the form [0 0 a b] [0 0 0 b] [0 0 c d] [0 0 a d] [e f g h] to [0 f c h] [i j k l] [e j g l] [i 0 k 0] that is .
train
false
5,522
def test_comment(): pattern = 'a(?#foo)bc' c = re.compile(pattern) AreEqual(c.findall('abc'), ['abc']) pattern = 'a(?#)bc' c = re.compile(pattern) AreEqual(c.findall('abc'), ['abc']) pattern = 'a(?#foo)bdc' c = re.compile(pattern) AreEqual(len(c.findall('abc')), 0)
[ "def", "test_comment", "(", ")", ":", "pattern", "=", "'a(?#foo)bc'", "c", "=", "re", ".", "compile", "(", "pattern", ")", "AreEqual", "(", "c", ".", "findall", "(", "'abc'", ")", ",", "[", "'abc'", "]", ")", "pattern", "=", "'a(?#)bc'", "c", "=", "re", ".", "compile", "(", "pattern", ")", "AreEqual", "(", "c", ".", "findall", "(", "'abc'", ")", ",", "[", "'abc'", "]", ")", "pattern", "=", "'a(?#foo)bdc'", "c", "=", "re", ".", "compile", "(", "pattern", ")", "AreEqual", "(", "len", "(", "c", ".", "findall", "(", "'abc'", ")", ")", ",", "0", ")" ]
make sure that line comments are ignored by the c reader .
train
false
5,523
def test_imap_folders_no_flags(monkeypatch, constants): folders = [(('\\HasNoChildren',), '/', u'INBOX'), (('\\Noselect', '\\HasChildren'), '/', u'SKIP'), (('\\HasNoChildren', '\\Drafts'), '/', u'Drafts'), ('\\HasNoChildren', '/', u'Sent'), ('\\HasNoChildren', '/', u'Sent Items'), (('\\HasNoChildren', '\\Junk'), '/', u'Spam'), ('\\HasNoChildren', '/', u'Trash'), (('\\HasNoChildren',), '/', u'reference')] role_map = {'INBOX': 'inbox', 'Trash': 'trash', 'Drafts': 'drafts', 'Sent': 'sent', 'Sent Items': 'sent', 'Spam': 'spam', u'[Gmail]/Sent Mail': None, u'[Gmail]/Trash': 'trash', u'reference': None} client = patch_generic_client(monkeypatch, folders) raw_folders = client.folders() generic_folder_checks(raw_folders, role_map, client, 'imap')
[ "def", "test_imap_folders_no_flags", "(", "monkeypatch", ",", "constants", ")", ":", "folders", "=", "[", "(", "(", "'\\\\HasNoChildren'", ",", ")", ",", "'/'", ",", "u'INBOX'", ")", ",", "(", "(", "'\\\\Noselect'", ",", "'\\\\HasChildren'", ")", ",", "'/'", ",", "u'SKIP'", ")", ",", "(", "(", "'\\\\HasNoChildren'", ",", "'\\\\Drafts'", ")", ",", "'/'", ",", "u'Drafts'", ")", ",", "(", "'\\\\HasNoChildren'", ",", "'/'", ",", "u'Sent'", ")", ",", "(", "'\\\\HasNoChildren'", ",", "'/'", ",", "u'Sent Items'", ")", ",", "(", "(", "'\\\\HasNoChildren'", ",", "'\\\\Junk'", ")", ",", "'/'", ",", "u'Spam'", ")", ",", "(", "'\\\\HasNoChildren'", ",", "'/'", ",", "u'Trash'", ")", ",", "(", "(", "'\\\\HasNoChildren'", ",", ")", ",", "'/'", ",", "u'reference'", ")", "]", "role_map", "=", "{", "'INBOX'", ":", "'inbox'", ",", "'Trash'", ":", "'trash'", ",", "'Drafts'", ":", "'drafts'", ",", "'Sent'", ":", "'sent'", ",", "'Sent Items'", ":", "'sent'", ",", "'Spam'", ":", "'spam'", ",", "u'[Gmail]/Sent Mail'", ":", "None", ",", "u'[Gmail]/Trash'", ":", "'trash'", ",", "u'reference'", ":", "None", "}", "client", "=", "patch_generic_client", "(", "monkeypatch", ",", "folders", ")", "raw_folders", "=", "client", ".", "folders", "(", ")", "generic_folder_checks", "(", "raw_folders", ",", "role_map", ",", "client", ",", "'imap'", ")" ]
tests that system folders without flags can be labeled .
train
false