id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
20,905
def ntToPosixSlashes(filepath): return filepath.replace('\\', '/')
[ "def", "ntToPosixSlashes", "(", "filepath", ")", ":", "return", "filepath", ".", "replace", "(", "'\\\\'", ",", "'/'", ")" ]
replaces all occurances of nt slashes () in provided filepath with posix ones (/) .
train
false
20,907
def addXGroove(derivation, negatives, y): if (derivation.topBevel <= 0.0): return bottom = (derivation.height - derivation.topBevel) top = derivation.height groove = [complex(y, bottom), complex((y - derivation.topBevel), top), complex((y + derivation.topBevel), top)] triangle_mesh.addSymmetricXPath(negatives, groove, (1.0001 * derivation.topRight.real))
[ "def", "addXGroove", "(", "derivation", ",", "negatives", ",", "y", ")", ":", "if", "(", "derivation", ".", "topBevel", "<=", "0.0", ")", ":", "return", "bottom", "=", "(", "derivation", ".", "height", "-", "derivation", ".", "topBevel", ")", "top", "=", "derivation", ".", "height", "groove", "=", "[", "complex", "(", "y", ",", "bottom", ")", ",", "complex", "(", "(", "y", "-", "derivation", ".", "topBevel", ")", ",", "top", ")", ",", "complex", "(", "(", "y", "+", "derivation", ".", "topBevel", ")", ",", "top", ")", "]", "triangle_mesh", ".", "addSymmetricXPath", "(", "negatives", ",", "groove", ",", "(", "1.0001", "*", "derivation", ".", "topRight", ".", "real", ")", ")" ]
add x groove .
train
false
20,908
@pytest.fixture(scope='function') def remove_additional_dirs(request): def fin_remove_additional_dirs(): if os.path.isdir('fake-project'): utils.rmtree('fake-project') if os.path.isdir('fake-project-extra'): utils.rmtree('fake-project-extra') if os.path.isdir('fake-project-templated'): utils.rmtree('fake-project-templated') if os.path.isdir('fake-project-dict'): utils.rmtree('fake-project-dict') request.addfinalizer(fin_remove_additional_dirs)
[ "@", "pytest", ".", "fixture", "(", "scope", "=", "'function'", ")", "def", "remove_additional_dirs", "(", "request", ")", ":", "def", "fin_remove_additional_dirs", "(", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "'fake-project'", ")", ":", "utils", ".", "rmtree", "(", "'fake-project'", ")", "if", "os", ".", "path", ".", "isdir", "(", "'fake-project-extra'", ")", ":", "utils", ".", "rmtree", "(", "'fake-project-extra'", ")", "if", "os", ".", "path", ".", "isdir", "(", "'fake-project-templated'", ")", ":", "utils", ".", "rmtree", "(", "'fake-project-templated'", ")", "if", "os", ".", "path", ".", "isdir", "(", "'fake-project-dict'", ")", ":", "utils", ".", "rmtree", "(", "'fake-project-dict'", ")", "request", ".", "addfinalizer", "(", "fin_remove_additional_dirs", ")" ]
remove special directories which are created during the tests .
train
false
20,909
@public def lcm(f, g=None, *gens, **args): if hasattr(f, '__iter__'): if (g is not None): gens = ((g,) + gens) return lcm_list(f, *gens, **args) elif (g is None): raise TypeError('lcm() takes 2 arguments or a sequence of arguments') options.allowed_flags(args, ['polys']) try: ((F, G), opt) = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: (domain, (a, b)) = construct_domain(exc.exprs) try: return domain.to_sympy(domain.lcm(a, b)) except NotImplementedError: raise ComputationFailed('lcm', 2, exc) result = F.lcm(G) if (not opt.polys): return result.as_expr() else: return result
[ "@", "public", "def", "lcm", "(", "f", ",", "g", "=", "None", ",", "*", "gens", ",", "**", "args", ")", ":", "if", "hasattr", "(", "f", ",", "'__iter__'", ")", ":", "if", "(", "g", "is", "not", "None", ")", ":", "gens", "=", "(", "(", "g", ",", ")", "+", "gens", ")", "return", "lcm_list", "(", "f", ",", "*", "gens", ",", "**", "args", ")", "elif", "(", "g", "is", "None", ")", ":", "raise", "TypeError", "(", "'lcm() takes 2 arguments or a sequence of arguments'", ")", "options", ".", "allowed_flags", "(", "args", ",", "[", "'polys'", "]", ")", "try", ":", "(", "(", "F", ",", "G", ")", ",", "opt", ")", "=", "parallel_poly_from_expr", "(", "(", "f", ",", "g", ")", ",", "*", "gens", ",", "**", "args", ")", "except", "PolificationFailed", "as", "exc", ":", "(", "domain", ",", "(", "a", ",", "b", ")", ")", "=", "construct_domain", "(", "exc", ".", "exprs", ")", "try", ":", "return", "domain", ".", "to_sympy", "(", "domain", ".", "lcm", "(", "a", ",", "b", ")", ")", "except", "NotImplementedError", ":", "raise", "ComputationFailed", "(", "'lcm'", ",", "2", ",", "exc", ")", "result", "=", "F", ".", "lcm", "(", "G", ")", "if", "(", "not", "opt", ".", "polys", ")", ":", "return", "result", ".", "as_expr", "(", ")", "else", ":", "return", "result" ]
simple version of lcm .
train
false
20,910
def convert_color(in_image, mode): return in_image.convert(mode)
[ "def", "convert_color", "(", "in_image", ",", "mode", ")", ":", "return", "in_image", ".", "convert", "(", "mode", ")" ]
converts mpl color formats to bokeh color formats .
train
false
20,911
def get_translation_project_dir(language, project, file_style, make_dirs=False): if (file_style == 'gnu'): return project.get_real_path() else: return get_language_dir(project, language, file_style, make_dirs)
[ "def", "get_translation_project_dir", "(", "language", ",", "project", ",", "file_style", ",", "make_dirs", "=", "False", ")", ":", "if", "(", "file_style", "==", "'gnu'", ")", ":", "return", "project", ".", "get_real_path", "(", ")", "else", ":", "return", "get_language_dir", "(", "project", ",", "language", ",", "file_style", ",", "make_dirs", ")" ]
returns the base directory containing translations files for the project .
train
false
20,912
def is_linux_fs_type(device): disk_device = device.rstrip('0123456789') fdisk_fd = os.popen(("/sbin/fdisk -l -u '%s'" % disk_device)) fdisk_lines = fdisk_fd.readlines() fdisk_fd.close() for line in fdisk_lines: if (not line.startswith(device)): continue info_tuple = line.split() for fsinfo in info_tuple[4:6]: if (fsinfo == '83'): return True return False
[ "def", "is_linux_fs_type", "(", "device", ")", ":", "disk_device", "=", "device", ".", "rstrip", "(", "'0123456789'", ")", "fdisk_fd", "=", "os", ".", "popen", "(", "(", "\"/sbin/fdisk -l -u '%s'\"", "%", "disk_device", ")", ")", "fdisk_lines", "=", "fdisk_fd", ".", "readlines", "(", ")", "fdisk_fd", ".", "close", "(", ")", "for", "line", "in", "fdisk_lines", ":", "if", "(", "not", "line", ".", "startswith", "(", "device", ")", ")", ":", "continue", "info_tuple", "=", "line", ".", "split", "(", ")", "for", "fsinfo", "in", "info_tuple", "[", "4", ":", "6", "]", ":", "if", "(", "fsinfo", "==", "'83'", ")", ":", "return", "True", "return", "False" ]
checks if specified partition is type 83 .
train
false
20,913
def missing_version_field(data_relation, reference): value_field = data_relation['field'] version_field = app.config['VERSION'] collection = data_relation['resource'] query = {} query[value_field] = reference[value_field] query[version_field] = {'$exists': False} return app.data.find_one(collection, None, **query)
[ "def", "missing_version_field", "(", "data_relation", ",", "reference", ")", ":", "value_field", "=", "data_relation", "[", "'field'", "]", "version_field", "=", "app", ".", "config", "[", "'VERSION'", "]", "collection", "=", "data_relation", "[", "'resource'", "]", "query", "=", "{", "}", "query", "[", "value_field", "]", "=", "reference", "[", "value_field", "]", "query", "[", "version_field", "]", "=", "{", "'$exists'", ":", "False", "}", "return", "app", ".", "data", ".", "find_one", "(", "collection", ",", "None", ",", "**", "query", ")" ]
returns a document if it matches the value_field but doesnt have a _version field .
train
false
20,914
def _convert_js_arg(arg): if (arg is None): return 'undefined' elif isinstance(arg, str): return '"{}"'.format(string_escape(arg)) elif isinstance(arg, bool): return str(arg).lower() elif isinstance(arg, (int, float)): return str(arg) else: raise TypeError("Don't know how to handle {!r} of type {}!".format(arg, type(arg).__name__))
[ "def", "_convert_js_arg", "(", "arg", ")", ":", "if", "(", "arg", "is", "None", ")", ":", "return", "'undefined'", "elif", "isinstance", "(", "arg", ",", "str", ")", ":", "return", "'\"{}\"'", ".", "format", "(", "string_escape", "(", "arg", ")", ")", "elif", "isinstance", "(", "arg", ",", "bool", ")", ":", "return", "str", "(", "arg", ")", ".", "lower", "(", ")", "elif", "isinstance", "(", "arg", ",", "(", "int", ",", "float", ")", ")", ":", "return", "str", "(", "arg", ")", "else", ":", "raise", "TypeError", "(", "\"Don't know how to handle {!r} of type {}!\"", ".", "format", "(", "arg", ",", "type", "(", "arg", ")", ".", "__name__", ")", ")" ]
convert the given argument so its the equivalent in js .
train
false
20,915
def get_debug_queries(): return g.get('sqlalchemy_queries', ())
[ "def", "get_debug_queries", "(", ")", ":", "return", "g", ".", "get", "(", "'sqlalchemy_queries'", ",", "(", ")", ")" ]
in debug mode flask-sqlalchemy will log all the sql queries sent to the database .
train
false
20,916
def constantX(value): return theano.tensor.constant(np.asarray(value, dtype=theano.config.floatX))
[ "def", "constantX", "(", "value", ")", ":", "return", "theano", ".", "tensor", ".", "constant", "(", "np", ".", "asarray", "(", "value", ",", "dtype", "=", "theano", ".", "config", ".", "floatX", ")", ")" ]
returns a constant of value value with floatx dtype parameters variable : writeme returns writeme .
train
false
20,917
@njit def mergesort(arr): assert (arr.ndim == 1) if (arr.size > 2): mid = (arr.size // 2) first = mergesort(arr[:mid].copy()) second = mergesort(arr[mid:].copy()) left = right = 0 writeidx = 0 while ((left < first.size) and (right < second.size)): if (first[left] <= second[right]): arr[writeidx] = first[left] left += 1 else: arr[writeidx] = second[right] right += 1 writeidx += 1 while (left < first.size): arr[writeidx] = first[left] writeidx += 1 left += 1 while (right < second.size): arr[writeidx] = second[right] writeidx += 1 right += 1 elif (arr.size == 2): (a, b) = arr (arr[0], arr[1]) = ((a, b) if (a <= b) else (b, a)) return arr
[ "@", "njit", "def", "mergesort", "(", "arr", ")", ":", "assert", "(", "arr", ".", "ndim", "==", "1", ")", "if", "(", "arr", ".", "size", ">", "2", ")", ":", "mid", "=", "(", "arr", ".", "size", "//", "2", ")", "first", "=", "mergesort", "(", "arr", "[", ":", "mid", "]", ".", "copy", "(", ")", ")", "second", "=", "mergesort", "(", "arr", "[", "mid", ":", "]", ".", "copy", "(", ")", ")", "left", "=", "right", "=", "0", "writeidx", "=", "0", "while", "(", "(", "left", "<", "first", ".", "size", ")", "and", "(", "right", "<", "second", ".", "size", ")", ")", ":", "if", "(", "first", "[", "left", "]", "<=", "second", "[", "right", "]", ")", ":", "arr", "[", "writeidx", "]", "=", "first", "[", "left", "]", "left", "+=", "1", "else", ":", "arr", "[", "writeidx", "]", "=", "second", "[", "right", "]", "right", "+=", "1", "writeidx", "+=", "1", "while", "(", "left", "<", "first", ".", "size", ")", ":", "arr", "[", "writeidx", "]", "=", "first", "[", "left", "]", "writeidx", "+=", "1", "left", "+=", "1", "while", "(", "right", "<", "second", ".", "size", ")", ":", "arr", "[", "writeidx", "]", "=", "second", "[", "right", "]", "writeidx", "+=", "1", "right", "+=", "1", "elif", "(", "arr", ".", "size", "==", "2", ")", ":", "(", "a", ",", "b", ")", "=", "arr", "(", "arr", "[", "0", "]", ",", "arr", "[", "1", "]", ")", "=", "(", "(", "a", ",", "b", ")", "if", "(", "a", "<=", "b", ")", "else", "(", "b", ",", "a", ")", ")", "return", "arr" ]
perform mergesort on a list of numbers .
train
false
20,918
def get_keyword_phrases(): phrases = [] with open(jasperpath.data('keyword_phrases'), mode='r') as f: for line in f: phrase = line.strip() if phrase: phrases.append(phrase) return phrases
[ "def", "get_keyword_phrases", "(", ")", ":", "phrases", "=", "[", "]", "with", "open", "(", "jasperpath", ".", "data", "(", "'keyword_phrases'", ")", ",", "mode", "=", "'r'", ")", "as", "f", ":", "for", "line", "in", "f", ":", "phrase", "=", "line", ".", "strip", "(", ")", "if", "phrase", ":", "phrases", ".", "append", "(", "phrase", ")", "return", "phrases" ]
gets the keyword phrases from the keywords file in the jasper data dir .
train
false
20,919
def delete_server(protocol=None, service_address=None, server_address=None): cmd = '{0} -d {1}'.format(__detect_os(), _build_cmd(protocol=protocol, service_address=service_address, server_address=server_address)) out = __salt__['cmd.run_all'](cmd, python_shell=False) if out['retcode']: ret = out['stderr'].strip() else: ret = True return ret
[ "def", "delete_server", "(", "protocol", "=", "None", ",", "service_address", "=", "None", ",", "server_address", "=", "None", ")", ":", "cmd", "=", "'{0} -d {1}'", ".", "format", "(", "__detect_os", "(", ")", ",", "_build_cmd", "(", "protocol", "=", "protocol", ",", "service_address", "=", "service_address", ",", "server_address", "=", "server_address", ")", ")", "out", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "if", "out", "[", "'retcode'", "]", ":", "ret", "=", "out", "[", "'stderr'", "]", ".", "strip", "(", ")", "else", ":", "ret", "=", "True", "return", "ret" ]
delete the realserver from the virtual service .
train
true
20,920
def _CopyQueryObjectToProtocolBuffer(query, params): _CopyQueryToProtocolBuffer(query.query_string, params) for refinement in query.facet_refinements: refinement._CopyToProtocolBuffer(params.add_facet_refinement()) for return_facet in query.return_facets: return_facet._CopyToProtocolBuffer(params.add_include_facet()) options = query.options if (query.options is None): options = QueryOptions() _CopyQueryOptionsObjectToProtocolBuffer(query.query_string, options, params) facet_options = query.facet_options if (facet_options is None): facet_options = FacetOptions(discovery_limit=(10 if query.enable_facet_discovery else None)) facet_options._CopyToProtocolBuffer(params)
[ "def", "_CopyQueryObjectToProtocolBuffer", "(", "query", ",", "params", ")", ":", "_CopyQueryToProtocolBuffer", "(", "query", ".", "query_string", ",", "params", ")", "for", "refinement", "in", "query", ".", "facet_refinements", ":", "refinement", ".", "_CopyToProtocolBuffer", "(", "params", ".", "add_facet_refinement", "(", ")", ")", "for", "return_facet", "in", "query", ".", "return_facets", ":", "return_facet", ".", "_CopyToProtocolBuffer", "(", "params", ".", "add_include_facet", "(", ")", ")", "options", "=", "query", ".", "options", "if", "(", "query", ".", "options", "is", "None", ")", ":", "options", "=", "QueryOptions", "(", ")", "_CopyQueryOptionsObjectToProtocolBuffer", "(", "query", ".", "query_string", ",", "options", ",", "params", ")", "facet_options", "=", "query", ".", "facet_options", "if", "(", "facet_options", "is", "None", ")", ":", "facet_options", "=", "FacetOptions", "(", "discovery_limit", "=", "(", "10", "if", "query", ".", "enable_facet_discovery", "else", "None", ")", ")", "facet_options", ".", "_CopyToProtocolBuffer", "(", "params", ")" ]
copy a query object to search_service_pb .
train
false
20,921
def p_declaration_1(t): pass
[ "def", "p_declaration_1", "(", "t", ")", ":", "pass" ]
declaration : declaration_specifiers init_declarator_list semi .
train
false
20,922
def queue_subtasks_for_query(entry, action_name, create_subtask_fcn, item_querysets, item_fields, items_per_task, total_num_items): task_id = entry.task_id total_num_subtasks = _get_number_of_subtasks(total_num_items, items_per_task) subtask_id_list = [str(uuid4()) for _ in range(total_num_subtasks)] TASK_LOG.info('Task %s: updating InstructorTask %s with subtask info for %s subtasks to process %s items.', task_id, entry.id, total_num_subtasks, total_num_items) with outer_atomic(): progress = initialize_subtask_info(entry, action_name, total_num_items, subtask_id_list) item_list_generator = _generate_items_for_subtask(item_querysets, item_fields, total_num_items, items_per_task, total_num_subtasks, entry.course_id) TASK_LOG.info('Task %s: creating %s subtasks to process %s items.', task_id, total_num_subtasks, total_num_items) num_subtasks = 0 for item_list in item_list_generator: subtask_id = subtask_id_list[num_subtasks] num_subtasks += 1 subtask_status = SubtaskStatus.create(subtask_id) new_subtask = create_subtask_fcn(item_list, subtask_status) new_subtask.apply_async() return progress
[ "def", "queue_subtasks_for_query", "(", "entry", ",", "action_name", ",", "create_subtask_fcn", ",", "item_querysets", ",", "item_fields", ",", "items_per_task", ",", "total_num_items", ")", ":", "task_id", "=", "entry", ".", "task_id", "total_num_subtasks", "=", "_get_number_of_subtasks", "(", "total_num_items", ",", "items_per_task", ")", "subtask_id_list", "=", "[", "str", "(", "uuid4", "(", ")", ")", "for", "_", "in", "range", "(", "total_num_subtasks", ")", "]", "TASK_LOG", ".", "info", "(", "'Task %s: updating InstructorTask %s with subtask info for %s subtasks to process %s items.'", ",", "task_id", ",", "entry", ".", "id", ",", "total_num_subtasks", ",", "total_num_items", ")", "with", "outer_atomic", "(", ")", ":", "progress", "=", "initialize_subtask_info", "(", "entry", ",", "action_name", ",", "total_num_items", ",", "subtask_id_list", ")", "item_list_generator", "=", "_generate_items_for_subtask", "(", "item_querysets", ",", "item_fields", ",", "total_num_items", ",", "items_per_task", ",", "total_num_subtasks", ",", "entry", ".", "course_id", ")", "TASK_LOG", ".", "info", "(", "'Task %s: creating %s subtasks to process %s items.'", ",", "task_id", ",", "total_num_subtasks", ",", "total_num_items", ")", "num_subtasks", "=", "0", "for", "item_list", "in", "item_list_generator", ":", "subtask_id", "=", "subtask_id_list", "[", "num_subtasks", "]", "num_subtasks", "+=", "1", "subtask_status", "=", "SubtaskStatus", ".", "create", "(", "subtask_id", ")", "new_subtask", "=", "create_subtask_fcn", "(", "item_list", ",", "subtask_status", ")", "new_subtask", ".", "apply_async", "(", ")", "return", "progress" ]
generates and queues subtasks to each execute a chunk of "items" generated by a queryset .
train
false
20,923
def get_org(name, profile='grafana'): if isinstance(profile, string_types): profile = __salt__['config.option'](profile) response = requests.get('{0}/api/orgs/name/{1}'.format(profile['grafana_url'], name), auth=_get_auth(profile), headers=_get_headers(profile), timeout=profile.get('grafana_timeout', 3)) if (response.status_code >= 400): response.raise_for_status() return response.json()
[ "def", "get_org", "(", "name", ",", "profile", "=", "'grafana'", ")", ":", "if", "isinstance", "(", "profile", ",", "string_types", ")", ":", "profile", "=", "__salt__", "[", "'config.option'", "]", "(", "profile", ")", "response", "=", "requests", ".", "get", "(", "'{0}/api/orgs/name/{1}'", ".", "format", "(", "profile", "[", "'grafana_url'", "]", ",", "name", ")", ",", "auth", "=", "_get_auth", "(", "profile", ")", ",", "headers", "=", "_get_headers", "(", "profile", ")", ",", "timeout", "=", "profile", ".", "get", "(", "'grafana_timeout'", ",", "3", ")", ")", "if", "(", "response", ".", "status_code", ">=", "400", ")", ":", "response", ".", "raise_for_status", "(", ")", "return", "response", ".", "json", "(", ")" ]
show a single organization .
train
true
20,924
def maybe_shorten_name(powerline, name): if powerline.args.cwd_max_dir_size: return name[:powerline.args.cwd_max_dir_size] return name
[ "def", "maybe_shorten_name", "(", "powerline", ",", "name", ")", ":", "if", "powerline", ".", "args", ".", "cwd_max_dir_size", ":", "return", "name", "[", ":", "powerline", ".", "args", ".", "cwd_max_dir_size", "]", "return", "name" ]
if the user has asked for each directory name to be shortened .
train
false
20,925
def create_colors(n_colors): colors_hue = np.arange(n_colors) colors_hue = as_floatX(colors_hue) colors_hue *= (1.0 / n_colors) colors_hsv = np.ones((n_colors, 3)) colors_hsv[:, 2] *= 0.75 colors_hsv[:, 0] = colors_hue colors_hsv = colors_hsv.reshape(((1,) + colors_hsv.shape)) colors_rgb = matplotlib.colors.hsv_to_rgb(colors_hsv) colors_rgb = colors_rgb[0] return colors_rgb
[ "def", "create_colors", "(", "n_colors", ")", ":", "colors_hue", "=", "np", ".", "arange", "(", "n_colors", ")", "colors_hue", "=", "as_floatX", "(", "colors_hue", ")", "colors_hue", "*=", "(", "1.0", "/", "n_colors", ")", "colors_hsv", "=", "np", ".", "ones", "(", "(", "n_colors", ",", "3", ")", ")", "colors_hsv", "[", ":", ",", "2", "]", "*=", "0.75", "colors_hsv", "[", ":", ",", "0", "]", "=", "colors_hue", "colors_hsv", "=", "colors_hsv", ".", "reshape", "(", "(", "(", "1", ",", ")", "+", "colors_hsv", ".", "shape", ")", ")", "colors_rgb", "=", "matplotlib", ".", "colors", ".", "hsv_to_rgb", "(", "colors_hsv", ")", "colors_rgb", "=", "colors_rgb", "[", "0", "]", "return", "colors_rgb" ]
create an array of n_colors parameters n_colors : int the number of colors to create returns colors_rgb : np .
train
false
20,926
def iteritems(d): return getattr(d, _iteritems)()
[ "def", "iteritems", "(", "d", ")", ":", "return", "getattr", "(", "d", ",", "_iteritems", ")", "(", ")" ]
replacement for sixs iteritems for python2/3 compat uses iteritems if available and otherwise uses items .
train
false
20,927
def groebner_gcd(f, g): if (f.ring != g.ring): raise ValueError('Values should be equal') domain = f.ring.domain if (not domain.has_Field): (fc, f) = f.primitive() (gc, g) = g.primitive() gcd = domain.gcd(fc, gc) H = (f * g).quo([groebner_lcm(f, g)]) if (len(H) != 1): raise ValueError('Length should be 1') h = H[0] if (not domain.has_Field): return (gcd * h) else: return h.monic()
[ "def", "groebner_gcd", "(", "f", ",", "g", ")", ":", "if", "(", "f", ".", "ring", "!=", "g", ".", "ring", ")", ":", "raise", "ValueError", "(", "'Values should be equal'", ")", "domain", "=", "f", ".", "ring", ".", "domain", "if", "(", "not", "domain", ".", "has_Field", ")", ":", "(", "fc", ",", "f", ")", "=", "f", ".", "primitive", "(", ")", "(", "gc", ",", "g", ")", "=", "g", ".", "primitive", "(", ")", "gcd", "=", "domain", ".", "gcd", "(", "fc", ",", "gc", ")", "H", "=", "(", "f", "*", "g", ")", ".", "quo", "(", "[", "groebner_lcm", "(", "f", ",", "g", ")", "]", ")", "if", "(", "len", "(", "H", ")", "!=", "1", ")", ":", "raise", "ValueError", "(", "'Length should be 1'", ")", "h", "=", "H", "[", "0", "]", "if", "(", "not", "domain", ".", "has_Field", ")", ":", "return", "(", "gcd", "*", "h", ")", "else", ":", "return", "h", ".", "monic", "(", ")" ]
computes gcd of two polynomials using groebner bases .
train
false
20,928
@_ConfigurableFilter(executable='JPEGOPTIM_EXECUTABLE') def jpegoptim(infile, executable='jpegoptim'): return runinplace('{} -p --strip-all -q %1'.format(executable), infile)
[ "@", "_ConfigurableFilter", "(", "executable", "=", "'JPEGOPTIM_EXECUTABLE'", ")", "def", "jpegoptim", "(", "infile", ",", "executable", "=", "'jpegoptim'", ")", ":", "return", "runinplace", "(", "'{} -p --strip-all -q %1'", ".", "format", "(", "executable", ")", ",", "infile", ")" ]
run jpegoptim on a file .
train
false
20,929
def _getContentType(msg): attrs = None mm = msg.getHeaders(False, 'content-type').get('content-type', None) if mm: mm = ''.join(mm.splitlines()) mimetype = mm.split(';') if mimetype: type = mimetype[0].split('/', 1) if (len(type) == 1): major = type[0] minor = None elif (len(type) == 2): (major, minor) = type else: major = minor = None attrs = dict((x.strip().lower().split('=', 1) for x in mimetype[1:])) else: major = minor = None else: major = minor = None return (major, minor, attrs)
[ "def", "_getContentType", "(", "msg", ")", ":", "attrs", "=", "None", "mm", "=", "msg", ".", "getHeaders", "(", "False", ",", "'content-type'", ")", ".", "get", "(", "'content-type'", ",", "None", ")", "if", "mm", ":", "mm", "=", "''", ".", "join", "(", "mm", ".", "splitlines", "(", ")", ")", "mimetype", "=", "mm", ".", "split", "(", "';'", ")", "if", "mimetype", ":", "type", "=", "mimetype", "[", "0", "]", ".", "split", "(", "'/'", ",", "1", ")", "if", "(", "len", "(", "type", ")", "==", "1", ")", ":", "major", "=", "type", "[", "0", "]", "minor", "=", "None", "elif", "(", "len", "(", "type", ")", "==", "2", ")", ":", "(", "major", ",", "minor", ")", "=", "type", "else", ":", "major", "=", "minor", "=", "None", "attrs", "=", "dict", "(", "(", "x", ".", "strip", "(", ")", ".", "lower", "(", ")", ".", "split", "(", "'='", ",", "1", ")", "for", "x", "in", "mimetype", "[", "1", ":", "]", ")", ")", "else", ":", "major", "=", "minor", "=", "None", "else", ":", "major", "=", "minor", "=", "None", "return", "(", "major", ",", "minor", ",", "attrs", ")" ]
return a two-tuple of the main and subtype of the given message .
train
false
20,930
@frappe.whitelist() def get_fee_structure(program, academic_term=None): fee_structure = frappe.db.get_values(u'Fee Structure', {u'program': program, u'academic_term': academic_term}, u'name', as_dict=True) return (fee_structure[0].name if fee_structure else None)
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "get_fee_structure", "(", "program", ",", "academic_term", "=", "None", ")", ":", "fee_structure", "=", "frappe", ".", "db", ".", "get_values", "(", "u'Fee Structure'", ",", "{", "u'program'", ":", "program", ",", "u'academic_term'", ":", "academic_term", "}", ",", "u'name'", ",", "as_dict", "=", "True", ")", "return", "(", "fee_structure", "[", "0", "]", ".", "name", "if", "fee_structure", "else", "None", ")" ]
returns fee structure .
train
false
20,931
def csv(*args, **kwargs): if (len(args) == 0): return Datasheet(**kwargs) return Datasheet.load(*args, **kwargs)
[ "def", "csv", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "len", "(", "args", ")", "==", "0", ")", ":", "return", "Datasheet", "(", "**", "kwargs", ")", "return", "Datasheet", ".", "load", "(", "*", "args", ",", "**", "kwargs", ")" ]
returns a datasheet from the given csv file path .
train
false
20,933
def make_connection(config=None, default_model=None, _api_version=datastore_rpc._DATASTORE_V3, _id_resolver=None): return datastore_rpc.Connection(adapter=ModelAdapter(default_model, id_resolver=_id_resolver), config=config, _api_version=_api_version)
[ "def", "make_connection", "(", "config", "=", "None", ",", "default_model", "=", "None", ",", "_api_version", "=", "datastore_rpc", ".", "_DATASTORE_V3", ",", "_id_resolver", "=", "None", ")", ":", "return", "datastore_rpc", ".", "Connection", "(", "adapter", "=", "ModelAdapter", "(", "default_model", ",", "id_resolver", "=", "_id_resolver", ")", ",", "config", "=", "config", ",", "_api_version", "=", "_api_version", ")" ]
create a new connection object with the right adapter .
train
true
20,935
def test_and_associativity(): assert (((A & B) & C) == (A & (B & C)))
[ "def", "test_and_associativity", "(", ")", ":", "assert", "(", "(", "(", "A", "&", "B", ")", "&", "C", ")", "==", "(", "A", "&", "(", "B", "&", "C", ")", ")", ")" ]
test for associativity of and .
train
false
20,938
def import_package(name): import zipimport try: mod = __import__(name) except ImportError: clear_zipimport_cache() mod = __import__(name) components = name.split('.') for comp in components[1:]: mod = getattr(mod, comp) return mod
[ "def", "import_package", "(", "name", ")", ":", "import", "zipimport", "try", ":", "mod", "=", "__import__", "(", "name", ")", "except", "ImportError", ":", "clear_zipimport_cache", "(", ")", "mod", "=", "__import__", "(", "name", ")", "components", "=", "name", ".", "split", "(", "'.'", ")", "for", "comp", "in", "components", "[", "1", ":", "]", ":", "mod", "=", "getattr", "(", "mod", ",", "comp", ")", "return", "mod" ]
given a package name like foo .
train
true
20,939
def httpdate(date_obj): return date_obj.strftime('%a, %d %b %Y %H:%M:%S GMT')
[ "def", "httpdate", "(", "date_obj", ")", ":", "return", "date_obj", ".", "strftime", "(", "'%a, %d %b %Y %H:%M:%S GMT'", ")" ]
formats a datetime object for use in http headers .
train
false
20,941
def ProfileListFeedFromString(xml_string): return atom.CreateClassFromXMLString(ProfileListFeed, xml_string)
[ "def", "ProfileListFeedFromString", "(", "xml_string", ")", ":", "return", "atom", ".", "CreateClassFromXMLString", "(", "ProfileListFeed", ",", "xml_string", ")" ]
converts an xml string into a profilelistfeed object .
train
false
20,942
def header_value(headers, name): name = name.lower() result = [value for (header, value) in headers if (header.lower() == name)] if result: return ','.join(result) else: return None
[ "def", "header_value", "(", "headers", ",", "name", ")", ":", "name", "=", "name", ".", "lower", "(", ")", "result", "=", "[", "value", "for", "(", "header", ",", "value", ")", "in", "headers", "if", "(", "header", ".", "lower", "(", ")", "==", "name", ")", "]", "if", "result", ":", "return", "','", ".", "join", "(", "result", ")", "else", ":", "return", "None" ]
returns the headers value .
train
true
20,945
@mock_ec2 def test_dhcp_options_associate_invalid_dhcp_id(): conn = boto.connect_vpc(u'the_key', u'the_secret') vpc = conn.create_vpc(u'10.0.0.0/16') with assert_raises(EC2ResponseError) as cm: conn.associate_dhcp_options(u'foo', vpc.id) cm.exception.code.should.equal(u'InvalidDhcpOptionID.NotFound') cm.exception.status.should.equal(400) cm.exception.request_id.should_not.be.none
[ "@", "mock_ec2", "def", "test_dhcp_options_associate_invalid_dhcp_id", "(", ")", ":", "conn", "=", "boto", ".", "connect_vpc", "(", "u'the_key'", ",", "u'the_secret'", ")", "vpc", "=", "conn", ".", "create_vpc", "(", "u'10.0.0.0/16'", ")", "with", "assert_raises", "(", "EC2ResponseError", ")", "as", "cm", ":", "conn", ".", "associate_dhcp_options", "(", "u'foo'", ",", "vpc", ".", "id", ")", "cm", ".", "exception", ".", "code", ".", "should", ".", "equal", "(", "u'InvalidDhcpOptionID.NotFound'", ")", "cm", ".", "exception", ".", "status", ".", "should", ".", "equal", "(", "400", ")", "cm", ".", "exception", ".", "request_id", ".", "should_not", ".", "be", ".", "none" ]
associate dhcp option bad dhcp options id .
train
false
20,946
@Profiler.profile def test_core_fetchall(n): with engine.connect() as conn: result = conn.execute(Customer.__table__.select().limit(n)).fetchall() for row in result: data = (row['id'], row['name'], row['description'])
[ "@", "Profiler", ".", "profile", "def", "test_core_fetchall", "(", "n", ")", ":", "with", "engine", ".", "connect", "(", ")", "as", "conn", ":", "result", "=", "conn", ".", "execute", "(", "Customer", ".", "__table__", ".", "select", "(", ")", ".", "limit", "(", "n", ")", ")", ".", "fetchall", "(", ")", "for", "row", "in", "result", ":", "data", "=", "(", "row", "[", "'id'", "]", ",", "row", "[", "'name'", "]", ",", "row", "[", "'description'", "]", ")" ]
load core result rows using fetchall .
train
false
20,947
@handle_response_format @treeio_login_required def liability_edit(request, liability_id, response_format='html'): liability = get_object_or_404(Liability, pk=liability_id) if request.POST: if ('cancel' not in request.POST): form = LiabilityForm(request.user.profile, request.POST, instance=liability) if form.is_valid(): liability = form.save(commit=False) convert(liability, 'value') return HttpResponseRedirect(reverse('finance_liability_view', args=[liability.id])) else: return HttpResponseRedirect(reverse('finance_liability_view', args=[liability.id])) else: form = LiabilityForm(request.user.profile, instance=liability) return render_to_response('finance/liability_edit', {'form': form, 'liability': liability}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "liability_edit", "(", "request", ",", "liability_id", ",", "response_format", "=", "'html'", ")", ":", "liability", "=", "get_object_or_404", "(", "Liability", ",", "pk", "=", "liability_id", ")", "if", "request", ".", "POST", ":", "if", "(", "'cancel'", "not", "in", "request", ".", "POST", ")", ":", "form", "=", "LiabilityForm", "(", "request", ".", "user", ".", "profile", ",", "request", ".", "POST", ",", "instance", "=", "liability", ")", "if", "form", ".", "is_valid", "(", ")", ":", "liability", "=", "form", ".", "save", "(", "commit", "=", "False", ")", "convert", "(", "liability", ",", "'value'", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'finance_liability_view'", ",", "args", "=", "[", "liability", ".", "id", "]", ")", ")", "else", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'finance_liability_view'", ",", "args", "=", "[", "liability", ".", "id", "]", ")", ")", "else", ":", "form", "=", "LiabilityForm", "(", "request", ".", "user", ".", "profile", ",", "instance", "=", "liability", ")", "return", "render_to_response", "(", "'finance/liability_edit'", ",", "{", "'form'", ":", "form", ",", "'liability'", ":", "liability", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
liability edit page .
train
false
20,948
def share(*args): return prefix(u'share', u'git-cola', *args)
[ "def", "share", "(", "*", "args", ")", ":", "return", "prefix", "(", "u'share'", ",", "u'git-cola'", ",", "*", "args", ")" ]
queues a share link email .
train
false
20,949
def maxnomlen(atrv): nomtp = get_nom_val(atrv) return max((len(i) for i in nomtp))
[ "def", "maxnomlen", "(", "atrv", ")", ":", "nomtp", "=", "get_nom_val", "(", "atrv", ")", "return", "max", "(", "(", "len", "(", "i", ")", "for", "i", "in", "nomtp", ")", ")" ]
given a string containing a nominal type definition .
train
false
20,950
def gauss_gen_laguerre(n, alpha, n_digits): x = Dummy('x') p = laguerre_poly(n, x, alpha=alpha, polys=True) p1 = laguerre_poly((n - 1), x, alpha=alpha, polys=True) p2 = laguerre_poly((n - 1), x, alpha=(alpha + 1), polys=True) xi = [] w = [] for r in p.real_roots(): if isinstance(r, RootOf): r = r.eval_rational((S(1) / (10 ** (n_digits + 2)))) xi.append(r.n(n_digits)) w.append((gamma((alpha + n)) / (((n * gamma(n)) * p1.subs(x, r)) * p2.subs(x, r))).n(n_digits)) return (xi, w)
[ "def", "gauss_gen_laguerre", "(", "n", ",", "alpha", ",", "n_digits", ")", ":", "x", "=", "Dummy", "(", "'x'", ")", "p", "=", "laguerre_poly", "(", "n", ",", "x", ",", "alpha", "=", "alpha", ",", "polys", "=", "True", ")", "p1", "=", "laguerre_poly", "(", "(", "n", "-", "1", ")", ",", "x", ",", "alpha", "=", "alpha", ",", "polys", "=", "True", ")", "p2", "=", "laguerre_poly", "(", "(", "n", "-", "1", ")", ",", "x", ",", "alpha", "=", "(", "alpha", "+", "1", ")", ",", "polys", "=", "True", ")", "xi", "=", "[", "]", "w", "=", "[", "]", "for", "r", "in", "p", ".", "real_roots", "(", ")", ":", "if", "isinstance", "(", "r", ",", "RootOf", ")", ":", "r", "=", "r", ".", "eval_rational", "(", "(", "S", "(", "1", ")", "/", "(", "10", "**", "(", "n_digits", "+", "2", ")", ")", ")", ")", "xi", ".", "append", "(", "r", ".", "n", "(", "n_digits", ")", ")", "w", ".", "append", "(", "(", "gamma", "(", "(", "alpha", "+", "n", ")", ")", "/", "(", "(", "(", "n", "*", "gamma", "(", "n", ")", ")", "*", "p1", ".", "subs", "(", "x", ",", "r", ")", ")", "*", "p2", ".", "subs", "(", "x", ",", "r", ")", ")", ")", ".", "n", "(", "n_digits", ")", ")", "return", "(", "xi", ",", "w", ")" ]
computes the generalized gauss-laguerre quadrature [1]_ points and weights .
train
false
20,951
def boundary(value, minvalue, maxvalue): return min(max(value, minvalue), maxvalue)
[ "def", "boundary", "(", "value", ",", "minvalue", ",", "maxvalue", ")", ":", "return", "min", "(", "max", "(", "value", ",", "minvalue", ")", ",", "maxvalue", ")" ]
limit a value between a minvalue and maxvalue .
train
false
20,954
def trim_url(link): link_text = link if (len(link_text) > TRIM_URL_LENGTH): scheme_index = (link.rfind('://') + 3) last_slash_index = link.rfind('/') text_to_replace = link[scheme_index:last_slash_index] link_text = link_text.replace(text_to_replace, '...') return link_text
[ "def", "trim_url", "(", "link", ")", ":", "link_text", "=", "link", "if", "(", "len", "(", "link_text", ")", ">", "TRIM_URL_LENGTH", ")", ":", "scheme_index", "=", "(", "link", ".", "rfind", "(", "'://'", ")", "+", "3", ")", "last_slash_index", "=", "link", ".", "rfind", "(", "'/'", ")", "text_to_replace", "=", "link", "[", "scheme_index", ":", "last_slash_index", "]", "link_text", "=", "link_text", ".", "replace", "(", "text_to_replace", ",", "'...'", ")", "return", "link_text" ]
trims link if its longer than trim_url_length chars .
train
false
20,955
def test_locate_app(test_apps): assert (locate_app('cliapp.app').name == 'testapp') assert (locate_app('cliapp.app:testapp').name == 'testapp') assert (locate_app('cliapp.multiapp:app1').name == 'app1') pytest.raises(NoAppException, locate_app, 'notanpp.py') pytest.raises(NoAppException, locate_app, 'cliapp/app') pytest.raises(RuntimeError, locate_app, 'cliapp.app:notanapp') pytest.raises(ImportError, locate_app, 'cliapp.importerrorapp')
[ "def", "test_locate_app", "(", "test_apps", ")", ":", "assert", "(", "locate_app", "(", "'cliapp.app'", ")", ".", "name", "==", "'testapp'", ")", "assert", "(", "locate_app", "(", "'cliapp.app:testapp'", ")", ".", "name", "==", "'testapp'", ")", "assert", "(", "locate_app", "(", "'cliapp.multiapp:app1'", ")", ".", "name", "==", "'app1'", ")", "pytest", ".", "raises", "(", "NoAppException", ",", "locate_app", ",", "'notanpp.py'", ")", "pytest", ".", "raises", "(", "NoAppException", ",", "locate_app", ",", "'cliapp/app'", ")", "pytest", ".", "raises", "(", "RuntimeError", ",", "locate_app", ",", "'cliapp.app:notanapp'", ")", "pytest", ".", "raises", "(", "ImportError", ",", "locate_app", ",", "'cliapp.importerrorapp'", ")" ]
test of locate_app .
train
false
20,956
def set_expire(name, date): _set_account_policy(name, 'usingHardExpirationDate=1 hardExpireDateGMT={0}'.format(date)) return (get_expire(name) == date)
[ "def", "set_expire", "(", "name", ",", "date", ")", ":", "_set_account_policy", "(", "name", ",", "'usingHardExpirationDate=1 hardExpireDateGMT={0}'", ".", "format", "(", "date", ")", ")", "return", "(", "get_expire", "(", "name", ")", "==", "date", ")" ]
sets the time at which the account expires .
train
true
20,957
def load_ndarray_label(name): assert (name in ['ule']) common_path = os.path.join(preprocess('${PYLEARN2_DATA_PATH}'), 'UTLC', 'filetensor', (name + '_')) (trname, vname, tename) = [((common_path + subset) + '.tf') for subset in ['trainl', 'validl', 'testl']] trainl = load_filetensor(trname) validl = load_filetensor(vname) testl = load_filetensor(tename) return (trainl, validl, testl)
[ "def", "load_ndarray_label", "(", "name", ")", ":", "assert", "(", "name", "in", "[", "'ule'", "]", ")", "common_path", "=", "os", ".", "path", ".", "join", "(", "preprocess", "(", "'${PYLEARN2_DATA_PATH}'", ")", ",", "'UTLC'", ",", "'filetensor'", ",", "(", "name", "+", "'_'", ")", ")", "(", "trname", ",", "vname", ",", "tename", ")", "=", "[", "(", "(", "common_path", "+", "subset", ")", "+", "'.tf'", ")", "for", "subset", "in", "[", "'trainl'", ",", "'validl'", ",", "'testl'", "]", "]", "trainl", "=", "load_filetensor", "(", "trname", ")", "validl", "=", "load_filetensor", "(", "vname", ")", "testl", "=", "load_filetensor", "(", "tename", ")", "return", "(", "trainl", ",", "validl", ",", "testl", ")" ]
load the train .
train
false
20,958
def _index_param_value(X, v, indices): if ((not _is_arraylike(v)) or (_num_samples(v) != _num_samples(X))): return v if sp.issparse(v): v = v.tocsr() return safe_indexing(v, indices)
[ "def", "_index_param_value", "(", "X", ",", "v", ",", "indices", ")", ":", "if", "(", "(", "not", "_is_arraylike", "(", "v", ")", ")", "or", "(", "_num_samples", "(", "v", ")", "!=", "_num_samples", "(", "X", ")", ")", ")", ":", "return", "v", "if", "sp", ".", "issparse", "(", "v", ")", ":", "v", "=", "v", ".", "tocsr", "(", ")", "return", "safe_indexing", "(", "v", ",", "indices", ")" ]
private helper function for parameter value indexing .
train
true
20,959
def debugHandler(widget, event, *a): if (event.type in event_types): print event.type.value_nick
[ "def", "debugHandler", "(", "widget", ",", "event", ",", "*", "a", ")", ":", "if", "(", "event", ".", "type", "in", "event_types", ")", ":", "print", "event", ".", "type", ".", "value_nick" ]
just connect it to the event event .
train
false
20,960
def StatusUpdate(msg): if (verbosity > 0): print msg
[ "def", "StatusUpdate", "(", "msg", ")", ":", "if", "(", "verbosity", ">", "0", ")", ":", "print", "msg" ]
print a status message to stderr or the given file-like object .
train
false
20,962
def runProgrammer(port, filename): programmer = Stk500v2() programmer.connect(port=port) programmer.programChip(intelHex.readHex(filename)) programmer.close()
[ "def", "runProgrammer", "(", "port", ",", "filename", ")", ":", "programmer", "=", "Stk500v2", "(", ")", "programmer", ".", "connect", "(", "port", "=", "port", ")", "programmer", ".", "programChip", "(", "intelHex", ".", "readHex", "(", "filename", ")", ")", "programmer", ".", "close", "(", ")" ]
run an stk500v2 program on serial port port and write filename into flash .
train
false
20,963
def _create_base_string(method, base, params): normalized_qs = _normalize_params(params) return _join_by_ampersand(method, base, normalized_qs)
[ "def", "_create_base_string", "(", "method", ",", "base", ",", "params", ")", ":", "normalized_qs", "=", "_normalize_params", "(", "params", ")", "return", "_join_by_ampersand", "(", "method", ",", "base", ",", "normalized_qs", ")" ]
returns base string for hmac-sha1 signature as specified in: URL#rfc .
train
true
20,964
def resource_pathname(pathname, verbose=0): try: refno = Res.FSpOpenResFile(pathname, 1) Res.CloseResFile(refno) except Res.Error as arg: if (arg[0] in ((-37), (-39))): try: refno = Res.FSOpenResourceFile(pathname, u'', 1) except Res.Error as arg: if (arg[0] != (-199)): raise else: return refno pathname = _decode(pathname, verbose=verbose) else: raise return pathname
[ "def", "resource_pathname", "(", "pathname", ",", "verbose", "=", "0", ")", ":", "try", ":", "refno", "=", "Res", ".", "FSpOpenResFile", "(", "pathname", ",", "1", ")", "Res", ".", "CloseResFile", "(", "refno", ")", "except", "Res", ".", "Error", "as", "arg", ":", "if", "(", "arg", "[", "0", "]", "in", "(", "(", "-", "37", ")", ",", "(", "-", "39", ")", ")", ")", ":", "try", ":", "refno", "=", "Res", ".", "FSOpenResourceFile", "(", "pathname", ",", "u''", ",", "1", ")", "except", "Res", ".", "Error", "as", "arg", ":", "if", "(", "arg", "[", "0", "]", "!=", "(", "-", "199", ")", ")", ":", "raise", "else", ":", "return", "refno", "pathname", "=", "_decode", "(", "pathname", ",", "verbose", "=", "verbose", ")", "else", ":", "raise", "return", "pathname" ]
return the pathname for a resource file .
train
false
20,965
def default_on_failure(request, message, **kwargs): return render('openid_failure.html', {'message': message})
[ "def", "default_on_failure", "(", "request", ",", "message", ",", "**", "kwargs", ")", ":", "return", "render", "(", "'openid_failure.html'", ",", "{", "'message'", ":", "message", "}", ")" ]
default failure action on signin .
train
false
20,966
def lstm(c_prev, x): return LSTM()(c_prev, x)
[ "def", "lstm", "(", "c_prev", ",", "x", ")", ":", "return", "LSTM", "(", ")", "(", "c_prev", ",", "x", ")" ]
long short-term memory units as an activation function .
train
false
20,967
def getTeamsNS(message): alias = message.namespaces.getAlias(ns_uri) if (alias is None): try: message.namespaces.addAlias(ns_uri, 'lp') except KeyError as why: raise TeamsNamespaceError(why[0]) return ns_uri
[ "def", "getTeamsNS", "(", "message", ")", ":", "alias", "=", "message", ".", "namespaces", ".", "getAlias", "(", "ns_uri", ")", "if", "(", "alias", "is", "None", ")", ":", "try", ":", "message", ".", "namespaces", ".", "addAlias", "(", "ns_uri", ",", "'lp'", ")", "except", "KeyError", "as", "why", ":", "raise", "TeamsNamespaceError", "(", "why", "[", "0", "]", ")", "return", "ns_uri" ]
extract the launchpad teams namespace uri from the given openid message .
train
false
20,970
def _clear_assets(location): store = contentstore() (assets, __) = store.get_all_content_for_course(location.course_key) for asset in assets: asset_location = asset['asset_key'] del_cached_content(asset_location) store.delete(asset_location)
[ "def", "_clear_assets", "(", "location", ")", ":", "store", "=", "contentstore", "(", ")", "(", "assets", ",", "__", ")", "=", "store", ".", "get_all_content_for_course", "(", "location", ".", "course_key", ")", "for", "asset", "in", "assets", ":", "asset_location", "=", "asset", "[", "'asset_key'", "]", "del_cached_content", "(", "asset_location", ")", "store", ".", "delete", "(", "asset_location", ")" ]
clear all assets for location .
train
false
20,971
def _AddPropertiesForFields(descriptor, cls): for field in descriptor.fields: _AddPropertiesForField(field, cls) if descriptor.is_extendable: cls.Extensions = property((lambda self: _ExtensionDict(self)))
[ "def", "_AddPropertiesForFields", "(", "descriptor", ",", "cls", ")", ":", "for", "field", "in", "descriptor", ".", "fields", ":", "_AddPropertiesForField", "(", "field", ",", "cls", ")", "if", "descriptor", ".", "is_extendable", ":", "cls", ".", "Extensions", "=", "property", "(", "(", "lambda", "self", ":", "_ExtensionDict", "(", "self", ")", ")", ")" ]
adds properties for all fields in this protocol message type .
train
true
20,973
def get_async_test_timeout(): try: return float(os.environ.get('ASYNC_TEST_TIMEOUT')) except (ValueError, TypeError): return 5
[ "def", "get_async_test_timeout", "(", ")", ":", "try", ":", "return", "float", "(", "os", ".", "environ", ".", "get", "(", "'ASYNC_TEST_TIMEOUT'", ")", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "return", "5" ]
get the global timeout setting for async tests .
train
false
20,974
def lcs(str1, str2): lengths = build_lengths_matrix(str1, str2) return read_from_matrix(lengths, str1, str2)
[ "def", "lcs", "(", "str1", ",", "str2", ")", ":", "lengths", "=", "build_lengths_matrix", "(", "str1", ",", "str2", ")", "return", "read_from_matrix", "(", "lengths", ",", "str1", ",", "str2", ")" ]
xxx: needs documentation written .
train
false
20,975
def make_token_store(fpath=None): if (fpath is None): fpath = DEFAULT_TOKEN_FILE return auth_file.Storage(fpath)
[ "def", "make_token_store", "(", "fpath", "=", "None", ")", ":", "if", "(", "fpath", "is", "None", ")", ":", "fpath", "=", "DEFAULT_TOKEN_FILE", "return", "auth_file", ".", "Storage", "(", "fpath", ")" ]
create token storage from give file name .
train
false
20,976
def test_system_tuple(): print 'TODO'
[ "def", "test_system_tuple", "(", ")", ":", "print", "'TODO'" ]
URL - python tuple interchangeable? if so .
train
false
20,978
def RegisterValidator(flag_name, checker, message='Flag validation failed', flag_values=FLAGS): flag_values.AddValidator(gflags_validators.SimpleValidator(flag_name, checker, message))
[ "def", "RegisterValidator", "(", "flag_name", ",", "checker", ",", "message", "=", "'Flag validation failed'", ",", "flag_values", "=", "FLAGS", ")", ":", "flag_values", ".", "AddValidator", "(", "gflags_validators", ".", "SimpleValidator", "(", "flag_name", ",", "checker", ",", "message", ")", ")" ]
adds a constraint .
train
false
20,979
def cert_info(app_dir, tools_dir): try: print '[INFO] Reading Code Signing Certificate' cert = os.path.join(app_dir, 'META-INF/') cp_path = (tools_dir + 'CertPrint.jar') files = [f for f in os.listdir(cert) if os.path.isfile(os.path.join(cert, f))] certfile = None dat = '' if ('CERT.RSA' in files): certfile = os.path.join(cert, 'CERT.RSA') else: for file_name in files: if file_name.lower().endswith('.rsa'): certfile = os.path.join(cert, file_name) elif file_name.lower().endswith('.dsa'): certfile = os.path.join(cert, file_name) if certfile: args = [(settings.JAVA_PATH + 'java'), '-jar', cp_path, certfile] issued = 'good' dat = subprocess.check_output(args) unicode_output = unicode(dat, encoding='utf-8', errors='replace') dat = escape(unicode_output).replace('\n', '</br>') else: dat = 'No Code Signing Certificate Found!' issued = 'missing' if re.findall('Issuer: CN=Android Debug|Subject: CN=Android Debug', dat): issued = 'bad' cert_dic = {'cert_info': dat, 'issued': issued} return cert_dic except: PrintException('[ERROR] Reading Code Signing Certificate')
[ "def", "cert_info", "(", "app_dir", ",", "tools_dir", ")", ":", "try", ":", "print", "'[INFO] Reading Code Signing Certificate'", "cert", "=", "os", ".", "path", ".", "join", "(", "app_dir", ",", "'META-INF/'", ")", "cp_path", "=", "(", "tools_dir", "+", "'CertPrint.jar'", ")", "files", "=", "[", "f", "for", "f", "in", "os", ".", "listdir", "(", "cert", ")", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "cert", ",", "f", ")", ")", "]", "certfile", "=", "None", "dat", "=", "''", "if", "(", "'CERT.RSA'", "in", "files", ")", ":", "certfile", "=", "os", ".", "path", ".", "join", "(", "cert", ",", "'CERT.RSA'", ")", "else", ":", "for", "file_name", "in", "files", ":", "if", "file_name", ".", "lower", "(", ")", ".", "endswith", "(", "'.rsa'", ")", ":", "certfile", "=", "os", ".", "path", ".", "join", "(", "cert", ",", "file_name", ")", "elif", "file_name", ".", "lower", "(", ")", ".", "endswith", "(", "'.dsa'", ")", ":", "certfile", "=", "os", ".", "path", ".", "join", "(", "cert", ",", "file_name", ")", "if", "certfile", ":", "args", "=", "[", "(", "settings", ".", "JAVA_PATH", "+", "'java'", ")", ",", "'-jar'", ",", "cp_path", ",", "certfile", "]", "issued", "=", "'good'", "dat", "=", "subprocess", ".", "check_output", "(", "args", ")", "unicode_output", "=", "unicode", "(", "dat", ",", "encoding", "=", "'utf-8'", ",", "errors", "=", "'replace'", ")", "dat", "=", "escape", "(", "unicode_output", ")", ".", "replace", "(", "'\\n'", ",", "'</br>'", ")", "else", ":", "dat", "=", "'No Code Signing Certificate Found!'", "issued", "=", "'missing'", "if", "re", ".", "findall", "(", "'Issuer: CN=Android Debug|Subject: CN=Android Debug'", ",", "dat", ")", ":", "issued", "=", "'bad'", "cert_dic", "=", "{", "'cert_info'", ":", "dat", ",", "'issued'", ":", "issued", "}", "return", "cert_dic", "except", ":", "PrintException", "(", "'[ERROR] Reading Code Signing Certificate'", ")" ]
get the certificate info needed to render the dashboard section for the given student and course .
train
false
20,980
def cancel_card_hold(hold): result = braintree.Transaction.void(hold.id) assert result.is_success amount = hold.amount participant_id = hold.custom_fields[u'participant_id'] log(u'Canceled a ${:.2f} hold for {}.'.format(amount, participant_id))
[ "def", "cancel_card_hold", "(", "hold", ")", ":", "result", "=", "braintree", ".", "Transaction", ".", "void", "(", "hold", ".", "id", ")", "assert", "result", ".", "is_success", "amount", "=", "hold", ".", "amount", "participant_id", "=", "hold", ".", "custom_fields", "[", "u'participant_id'", "]", "log", "(", "u'Canceled a ${:.2f} hold for {}.'", ".", "format", "(", "amount", ",", "participant_id", ")", ")" ]
cancel the previously created hold on the participants credit card .
train
false
20,981
def get_user_lang(user=None): if (not user): user = frappe.session.user lang = frappe.cache().hget(u'lang', user) if (not lang): lang = frappe.db.get_value(u'User', user, u'language') if (not lang): lang = frappe.db.get_default(u'lang') if (not lang): lang = (frappe.local.lang or u'en') frappe.cache().hset(u'lang', user, lang) return lang
[ "def", "get_user_lang", "(", "user", "=", "None", ")", ":", "if", "(", "not", "user", ")", ":", "user", "=", "frappe", ".", "session", ".", "user", "lang", "=", "frappe", ".", "cache", "(", ")", ".", "hget", "(", "u'lang'", ",", "user", ")", "if", "(", "not", "lang", ")", ":", "lang", "=", "frappe", ".", "db", ".", "get_value", "(", "u'User'", ",", "user", ",", "u'language'", ")", "if", "(", "not", "lang", ")", ":", "lang", "=", "frappe", ".", "db", ".", "get_default", "(", "u'lang'", ")", "if", "(", "not", "lang", ")", ":", "lang", "=", "(", "frappe", ".", "local", ".", "lang", "or", "u'en'", ")", "frappe", ".", "cache", "(", ")", ".", "hset", "(", "u'lang'", ",", "user", ",", "lang", ")", "return", "lang" ]
set frappe .
train
false
20,982
def bool_option(arg): return True
[ "def", "bool_option", "(", "arg", ")", ":", "return", "True" ]
used to convert flag options to auto directives .
train
false
20,983
def get_constant(lin_op): constant = mul(lin_op, {}) const_size = (constant.shape[0] * constant.shape[1]) return np.reshape(constant, const_size, 'F')
[ "def", "get_constant", "(", "lin_op", ")", ":", "constant", "=", "mul", "(", "lin_op", ",", "{", "}", ")", "const_size", "=", "(", "constant", ".", "shape", "[", "0", "]", "*", "constant", ".", "shape", "[", "1", "]", ")", "return", "np", ".", "reshape", "(", "constant", ",", "const_size", ",", "'F'", ")" ]
returns the constant term in the expression .
train
false
20,985
def get_course_tab_list(request, course): user = request.user xmodule_tab_list = CourseTabList.iterate_displayable(course, user=user) course_tab_list = [] must_complete_ee = user_must_complete_entrance_exam(request, user, course) for tab in xmodule_tab_list: if must_complete_ee: if (tab.type != 'courseware'): continue tab.name = _('Entrance Exam') if ((tab.type == 'static_tab') and tab.course_staff_only and (not bool((user and has_access(user, 'staff', course, course.id))))): continue course_tab_list.append(tab) course_tab_list += _get_dynamic_tabs(course, user) return course_tab_list
[ "def", "get_course_tab_list", "(", "request", ",", "course", ")", ":", "user", "=", "request", ".", "user", "xmodule_tab_list", "=", "CourseTabList", ".", "iterate_displayable", "(", "course", ",", "user", "=", "user", ")", "course_tab_list", "=", "[", "]", "must_complete_ee", "=", "user_must_complete_entrance_exam", "(", "request", ",", "user", ",", "course", ")", "for", "tab", "in", "xmodule_tab_list", ":", "if", "must_complete_ee", ":", "if", "(", "tab", ".", "type", "!=", "'courseware'", ")", ":", "continue", "tab", ".", "name", "=", "_", "(", "'Entrance Exam'", ")", "if", "(", "(", "tab", ".", "type", "==", "'static_tab'", ")", "and", "tab", ".", "course_staff_only", "and", "(", "not", "bool", "(", "(", "user", "and", "has_access", "(", "user", ",", "'staff'", ",", "course", ",", "course", ".", "id", ")", ")", ")", ")", ")", ":", "continue", "course_tab_list", ".", "append", "(", "tab", ")", "course_tab_list", "+=", "_get_dynamic_tabs", "(", "course", ",", "user", ")", "return", "course_tab_list" ]
retrieves the course tab list from xmodule .
train
false
20,987
def default_user_agent(): _implementation = platform.python_implementation() if (_implementation == 'CPython'): _implementation_version = platform.python_version() elif (_implementation == 'PyPy'): _implementation_version = ('%s.%s.%s' % (sys.pypy_version_info.major, sys.pypy_version_info.minor, sys.pypy_version_info.micro)) if (sys.pypy_version_info.releaselevel != 'final'): _implementation_version = ''.join([_implementation_version, sys.pypy_version_info.releaselevel]) elif (_implementation == 'Jython'): _implementation_version = platform.python_version() elif (_implementation == 'IronPython'): _implementation_version = platform.python_version() else: _implementation_version = 'Unknown' try: p_system = platform.system() p_release = platform.release() except IOError: p_system = 'Unknown' p_release = 'Unknown' return ' '.join([('python-requests/%s' % __version__), ('%s/%s' % (_implementation, _implementation_version)), ('%s/%s' % (p_system, p_release))])
[ "def", "default_user_agent", "(", ")", ":", "_implementation", "=", "platform", ".", "python_implementation", "(", ")", "if", "(", "_implementation", "==", "'CPython'", ")", ":", "_implementation_version", "=", "platform", ".", "python_version", "(", ")", "elif", "(", "_implementation", "==", "'PyPy'", ")", ":", "_implementation_version", "=", "(", "'%s.%s.%s'", "%", "(", "sys", ".", "pypy_version_info", ".", "major", ",", "sys", ".", "pypy_version_info", ".", "minor", ",", "sys", ".", "pypy_version_info", ".", "micro", ")", ")", "if", "(", "sys", ".", "pypy_version_info", ".", "releaselevel", "!=", "'final'", ")", ":", "_implementation_version", "=", "''", ".", "join", "(", "[", "_implementation_version", ",", "sys", ".", "pypy_version_info", ".", "releaselevel", "]", ")", "elif", "(", "_implementation", "==", "'Jython'", ")", ":", "_implementation_version", "=", "platform", ".", "python_version", "(", ")", "elif", "(", "_implementation", "==", "'IronPython'", ")", ":", "_implementation_version", "=", "platform", ".", "python_version", "(", ")", "else", ":", "_implementation_version", "=", "'Unknown'", "try", ":", "p_system", "=", "platform", ".", "system", "(", ")", "p_release", "=", "platform", ".", "release", "(", ")", "except", "IOError", ":", "p_system", "=", "'Unknown'", "p_release", "=", "'Unknown'", "return", "' '", ".", "join", "(", "[", "(", "'python-requests/%s'", "%", "__version__", ")", ",", "(", "'%s/%s'", "%", "(", "_implementation", ",", "_implementation_version", ")", ")", ",", "(", "'%s/%s'", "%", "(", "p_system", ",", "p_release", ")", ")", "]", ")" ]
return a string representing the default user agent .
train
true
20,988
def differentiate_finite(expr, *symbols, **kwargs): points = kwargs.pop('points', 1) x0 = kwargs.pop('x0', None) wrt = kwargs.pop('wrt', None) evaluate = kwargs.pop('evaluate', True) if (kwargs != {}): raise ValueError(('Unknown kwargs: %s' % kwargs)) Dexpr = expr.diff(evaluate=evaluate, *symbols) return Dexpr.replace((lambda arg: arg.is_Derivative), (lambda arg: arg.as_finite_difference(points=points, x0=x0, wrt=wrt)))
[ "def", "differentiate_finite", "(", "expr", ",", "*", "symbols", ",", "**", "kwargs", ")", ":", "points", "=", "kwargs", ".", "pop", "(", "'points'", ",", "1", ")", "x0", "=", "kwargs", ".", "pop", "(", "'x0'", ",", "None", ")", "wrt", "=", "kwargs", ".", "pop", "(", "'wrt'", ",", "None", ")", "evaluate", "=", "kwargs", ".", "pop", "(", "'evaluate'", ",", "True", ")", "if", "(", "kwargs", "!=", "{", "}", ")", ":", "raise", "ValueError", "(", "(", "'Unknown kwargs: %s'", "%", "kwargs", ")", ")", "Dexpr", "=", "expr", ".", "diff", "(", "evaluate", "=", "evaluate", ",", "*", "symbols", ")", "return", "Dexpr", ".", "replace", "(", "(", "lambda", "arg", ":", "arg", ".", "is_Derivative", ")", ",", "(", "lambda", "arg", ":", "arg", ".", "as_finite_difference", "(", "points", "=", "points", ",", "x0", "=", "x0", ",", "wrt", "=", "wrt", ")", ")", ")" ]
differentiate expr and replace derivatives with finite differences .
train
false
20,990
def libvlc_media_list_add_media(p_ml, p_md): f = (_Cfunctions.get('libvlc_media_list_add_media', None) or _Cfunction('libvlc_media_list_add_media', ((1,), (1,)), None, ctypes.c_int, MediaList, Media)) return f(p_ml, p_md)
[ "def", "libvlc_media_list_add_media", "(", "p_ml", ",", "p_md", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_list_add_media'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_list_add_media'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "ctypes", ".", "c_int", ",", "MediaList", ",", "Media", ")", ")", "return", "f", "(", "p_ml", ",", "p_md", ")" ]
add media instance to media list the l{libvlc_media_list_lock} should be held upon entering this function .
train
true
20,991
def test_get_readable_fileobj_cleans_up_temporary_files(tmpdir, monkeypatch): local_filename = get_pkg_data_filename(os.path.join(u'data', u'local.dat')) url = (u'file://' + urllib.request.pathname2url(local_filename)) monkeypatch.setattr(tempfile, u'tempdir', str(tmpdir)) with get_readable_fileobj(url) as fileobj: pass tempdir_listing = tmpdir.listdir() assert (len(tempdir_listing) == 0)
[ "def", "test_get_readable_fileobj_cleans_up_temporary_files", "(", "tmpdir", ",", "monkeypatch", ")", ":", "local_filename", "=", "get_pkg_data_filename", "(", "os", ".", "path", ".", "join", "(", "u'data'", ",", "u'local.dat'", ")", ")", "url", "=", "(", "u'file://'", "+", "urllib", ".", "request", ".", "pathname2url", "(", "local_filename", ")", ")", "monkeypatch", ".", "setattr", "(", "tempfile", ",", "u'tempdir'", ",", "str", "(", "tmpdir", ")", ")", "with", "get_readable_fileobj", "(", "url", ")", "as", "fileobj", ":", "pass", "tempdir_listing", "=", "tmpdir", ".", "listdir", "(", ")", "assert", "(", "len", "(", "tempdir_listing", ")", "==", "0", ")" ]
checks that get_readable_fileobj leaves no temporary files behind .
train
false
20,992
def _parse_attrs(header): attributes = {} attrs = '' if ('; ' in header): (header, attrs) = header.split('; ', 1) m = True while m: m = ATTRIBUTES_RE.match(attrs) if m: attrs = attrs[len(m.group(0)):] attributes[m.group(1)] = m.group(2).strip('"') return (header, attributes)
[ "def", "_parse_attrs", "(", "header", ")", ":", "attributes", "=", "{", "}", "attrs", "=", "''", "if", "(", "'; '", "in", "header", ")", ":", "(", "header", ",", "attrs", ")", "=", "header", ".", "split", "(", "'; '", ",", "1", ")", "m", "=", "True", "while", "m", ":", "m", "=", "ATTRIBUTES_RE", ".", "match", "(", "attrs", ")", "if", "m", ":", "attrs", "=", "attrs", "[", "len", "(", "m", ".", "group", "(", "0", ")", ")", ":", "]", "attributes", "[", "m", ".", "group", "(", "1", ")", "]", "=", "m", ".", "group", "(", "2", ")", ".", "strip", "(", "'\"'", ")", "return", "(", "header", ",", "attributes", ")" ]
given the value of a header like: content-disposition: form-data; name="somefile"; filename="test .
train
false
20,993
def describe_token(token): if (token.type == 'name'): return token.value return _describe_token_type(token.type)
[ "def", "describe_token", "(", "token", ")", ":", "if", "(", "token", ".", "type", "==", "'name'", ")", ":", "return", "token", ".", "value", "return", "_describe_token_type", "(", "token", ".", "type", ")" ]
returns a description of the token .
train
false
20,994
def to_array(img): return (numpy.array(img.getdata()) / 255.0)
[ "def", "to_array", "(", "img", ")", ":", "return", "(", "numpy", ".", "array", "(", "img", ".", "getdata", "(", ")", ")", "/", "255.0", ")" ]
convert pil .
train
false
20,996
def state_to_operators(state, **options): if (not (isinstance(state, StateBase) or issubclass(state, StateBase))): raise NotImplementedError('Argument is not a state!') if (state in state_mapping): state_inst = _make_default(state) try: ret = _get_ops(state_inst, _make_set(state_mapping[state]), **options) except (NotImplementedError, TypeError): ret = state_mapping[state] elif (type(state) in state_mapping): ret = _get_ops(state, _make_set(state_mapping[type(state)]), **options) elif (isinstance(state, BraBase) and (state.dual_class() in state_mapping)): ret = _get_ops(state, _make_set(state_mapping[state.dual_class()])) elif (issubclass(state, BraBase) and (state.dual_class() in state_mapping)): state_inst = _make_default(state) try: ret = _get_ops(state_inst, _make_set(state_mapping[state.dual_class()])) except (NotImplementedError, TypeError): ret = state_mapping[state.dual_class()] else: ret = None return _make_set(ret)
[ "def", "state_to_operators", "(", "state", ",", "**", "options", ")", ":", "if", "(", "not", "(", "isinstance", "(", "state", ",", "StateBase", ")", "or", "issubclass", "(", "state", ",", "StateBase", ")", ")", ")", ":", "raise", "NotImplementedError", "(", "'Argument is not a state!'", ")", "if", "(", "state", "in", "state_mapping", ")", ":", "state_inst", "=", "_make_default", "(", "state", ")", "try", ":", "ret", "=", "_get_ops", "(", "state_inst", ",", "_make_set", "(", "state_mapping", "[", "state", "]", ")", ",", "**", "options", ")", "except", "(", "NotImplementedError", ",", "TypeError", ")", ":", "ret", "=", "state_mapping", "[", "state", "]", "elif", "(", "type", "(", "state", ")", "in", "state_mapping", ")", ":", "ret", "=", "_get_ops", "(", "state", ",", "_make_set", "(", "state_mapping", "[", "type", "(", "state", ")", "]", ")", ",", "**", "options", ")", "elif", "(", "isinstance", "(", "state", ",", "BraBase", ")", "and", "(", "state", ".", "dual_class", "(", ")", "in", "state_mapping", ")", ")", ":", "ret", "=", "_get_ops", "(", "state", ",", "_make_set", "(", "state_mapping", "[", "state", ".", "dual_class", "(", ")", "]", ")", ")", "elif", "(", "issubclass", "(", "state", ",", "BraBase", ")", "and", "(", "state", ".", "dual_class", "(", ")", "in", "state_mapping", ")", ")", ":", "state_inst", "=", "_make_default", "(", "state", ")", "try", ":", "ret", "=", "_get_ops", "(", "state_inst", ",", "_make_set", "(", "state_mapping", "[", "state", ".", "dual_class", "(", ")", "]", ")", ")", "except", "(", "NotImplementedError", ",", "TypeError", ")", ":", "ret", "=", "state_mapping", "[", "state", ".", "dual_class", "(", ")", "]", "else", ":", "ret", "=", "None", "return", "_make_set", "(", "ret", ")" ]
returns the operator or set of operators corresponding to the given eigenstate a global function for mapping state classes to their associated operators or sets of operators .
train
false
20,997
def correlate(in1, in2, mode='full', method='auto'): in1 = asarray(in1) in2 = asarray(in2) if (in1.ndim == in2.ndim == 0): return (in1 * in2) elif (in1.ndim != in2.ndim): raise ValueError('in1 and in2 should have the same dimensionality') try: val = _modedict[mode] except KeyError: raise ValueError("Acceptable mode flags are 'valid', 'same', or 'full'.") if (method in ('fft', 'auto')): return convolve(in1, _reverse_and_conj(in2), mode, method) if _np_conv_ok(in1, in2, mode): return np.correlate(in1, in2, mode) swapped_inputs = (((mode == 'full') and (in2.size > in1.size)) or _inputs_swap_needed(mode, in1.shape, in2.shape)) if swapped_inputs: (in1, in2) = (in2, in1) if (mode == 'valid'): ps = [((i - j) + 1) for (i, j) in zip(in1.shape, in2.shape)] out = np.empty(ps, in1.dtype) z = sigtools._correlateND(in1, in2, out, val) else: ps = [((i + j) - 1) for (i, j) in zip(in1.shape, in2.shape)] in1zpadded = np.zeros(ps, in1.dtype) sc = [slice(0, i) for i in in1.shape] in1zpadded[sc] = in1.copy() if (mode == 'full'): out = np.empty(ps, in1.dtype) elif (mode == 'same'): out = np.empty(in1.shape, in1.dtype) z = sigtools._correlateND(in1zpadded, in2, out, val) if swapped_inputs: z = _reverse_and_conj(z) return z
[ "def", "correlate", "(", "in1", ",", "in2", ",", "mode", "=", "'full'", ",", "method", "=", "'auto'", ")", ":", "in1", "=", "asarray", "(", "in1", ")", "in2", "=", "asarray", "(", "in2", ")", "if", "(", "in1", ".", "ndim", "==", "in2", ".", "ndim", "==", "0", ")", ":", "return", "(", "in1", "*", "in2", ")", "elif", "(", "in1", ".", "ndim", "!=", "in2", ".", "ndim", ")", ":", "raise", "ValueError", "(", "'in1 and in2 should have the same dimensionality'", ")", "try", ":", "val", "=", "_modedict", "[", "mode", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "\"Acceptable mode flags are 'valid', 'same', or 'full'.\"", ")", "if", "(", "method", "in", "(", "'fft'", ",", "'auto'", ")", ")", ":", "return", "convolve", "(", "in1", ",", "_reverse_and_conj", "(", "in2", ")", ",", "mode", ",", "method", ")", "if", "_np_conv_ok", "(", "in1", ",", "in2", ",", "mode", ")", ":", "return", "np", ".", "correlate", "(", "in1", ",", "in2", ",", "mode", ")", "swapped_inputs", "=", "(", "(", "(", "mode", "==", "'full'", ")", "and", "(", "in2", ".", "size", ">", "in1", ".", "size", ")", ")", "or", "_inputs_swap_needed", "(", "mode", ",", "in1", ".", "shape", ",", "in2", ".", "shape", ")", ")", "if", "swapped_inputs", ":", "(", "in1", ",", "in2", ")", "=", "(", "in2", ",", "in1", ")", "if", "(", "mode", "==", "'valid'", ")", ":", "ps", "=", "[", "(", "(", "i", "-", "j", ")", "+", "1", ")", "for", "(", "i", ",", "j", ")", "in", "zip", "(", "in1", ".", "shape", ",", "in2", ".", "shape", ")", "]", "out", "=", "np", ".", "empty", "(", "ps", ",", "in1", ".", "dtype", ")", "z", "=", "sigtools", ".", "_correlateND", "(", "in1", ",", "in2", ",", "out", ",", "val", ")", "else", ":", "ps", "=", "[", "(", "(", "i", "+", "j", ")", "-", "1", ")", "for", "(", "i", ",", "j", ")", "in", "zip", "(", "in1", ".", "shape", ",", "in2", ".", "shape", ")", "]", "in1zpadded", "=", "np", ".", "zeros", "(", "ps", ",", "in1", ".", "dtype", ")", "sc", "=", "[", "slice", "(", "0", ",", "i", ")", "for", "i", "in", "in1", ".", "shape", "]", "in1zpadded", "[", "sc", "]", "=", "in1", ".", "copy", "(", ")", "if", "(", "mode", "==", "'full'", ")", ":", "out", "=", "np", ".", "empty", "(", "ps", ",", "in1", ".", "dtype", ")", "elif", "(", "mode", "==", "'same'", ")", ":", "out", "=", "np", ".", "empty", "(", "in1", ".", "shape", ",", "in1", ".", "dtype", ")", "z", "=", "sigtools", ".", "_correlateND", "(", "in1zpadded", ",", "in2", ",", "out", ",", "val", ")", "if", "swapped_inputs", ":", "z", "=", "_reverse_and_conj", "(", "z", ")", "return", "z" ]
correlate vectors using method .
train
false
20,999
def test_message_recording(client): register_and_login(client, 'foo', 'default') add_message(client, 'test message 1') add_message(client, '<test message 2>') rv = client.get('/') assert ('test message 1' in rv.data) assert ('&lt;test message 2&gt;' in rv.data)
[ "def", "test_message_recording", "(", "client", ")", ":", "register_and_login", "(", "client", ",", "'foo'", ",", "'default'", ")", "add_message", "(", "client", ",", "'test message 1'", ")", "add_message", "(", "client", ",", "'<test message 2>'", ")", "rv", "=", "client", ".", "get", "(", "'/'", ")", "assert", "(", "'test message 1'", "in", "rv", ".", "data", ")", "assert", "(", "'&lt;test message 2&gt;'", "in", "rv", ".", "data", ")" ]
check if adding messages works .
train
false
21,001
def get_marvel_characters(number_of_characters, marvel_public_key, marvel_private_key): timestamp = str(int(time.time())) hash_value = hashlib.md5(((timestamp + marvel_private_key) + marvel_public_key)).hexdigest() characters = [] for _num in xrange(number_of_characters): offset = random.randrange(1, 1478) limit = '1' url = ((((((((('http://gateway.marvel.com:80/v1/public/characters?limit=' + limit) + '&offset=') + str(offset)) + '&apikey=') + marvel_public_key) + '&ts=') + timestamp) + '&hash=') + hash_value) headers = {'content-type': 'application/json'} request = requests.get(url, headers=headers) data = json.loads(request.content) if (data.get('code') == 'InvalidCredentials'): raise RuntimeError('Your Marvel API keys do not work!') character = data['data']['results'][0]['name'].strip().replace(' ', '_') characters.append(character) return characters
[ "def", "get_marvel_characters", "(", "number_of_characters", ",", "marvel_public_key", ",", "marvel_private_key", ")", ":", "timestamp", "=", "str", "(", "int", "(", "time", ".", "time", "(", ")", ")", ")", "hash_value", "=", "hashlib", ".", "md5", "(", "(", "(", "timestamp", "+", "marvel_private_key", ")", "+", "marvel_public_key", ")", ")", ".", "hexdigest", "(", ")", "characters", "=", "[", "]", "for", "_num", "in", "xrange", "(", "number_of_characters", ")", ":", "offset", "=", "random", ".", "randrange", "(", "1", ",", "1478", ")", "limit", "=", "'1'", "url", "=", "(", "(", "(", "(", "(", "(", "(", "(", "(", "'http://gateway.marvel.com:80/v1/public/characters?limit='", "+", "limit", ")", "+", "'&offset='", ")", "+", "str", "(", "offset", ")", ")", "+", "'&apikey='", ")", "+", "marvel_public_key", ")", "+", "'&ts='", ")", "+", "timestamp", ")", "+", "'&hash='", ")", "+", "hash_value", ")", "headers", "=", "{", "'content-type'", ":", "'application/json'", "}", "request", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "headers", ")", "data", "=", "json", ".", "loads", "(", "request", ".", "content", ")", "if", "(", "data", ".", "get", "(", "'code'", ")", "==", "'InvalidCredentials'", ")", ":", "raise", "RuntimeError", "(", "'Your Marvel API keys do not work!'", ")", "character", "=", "data", "[", "'data'", "]", "[", "'results'", "]", "[", "0", "]", "[", "'name'", "]", ".", "strip", "(", ")", ".", "replace", "(", "' '", ",", "'_'", ")", "characters", ".", "append", "(", "character", ")", "return", "characters" ]
makes an api call to the marvel comics developer api to get a list of character names .
train
false
21,002
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
get an sqlite3 connection .
train
false
21,003
def _e_step(psamples, W_list, b_list, n_steps=100, eps=1e-05): depth = len(psamples) def mf_iteration(*psamples): new_psamples = [p for p in psamples] for i in xrange(1, depth, 2): new_psamples[i] = hi_given(psamples, i, W_list, b_list) for i in xrange(2, depth, 2): new_psamples[i] = hi_given(psamples, i, W_list, b_list) score = 0.0 for i in xrange(1, depth): score = T.maximum(T.mean(abs((new_psamples[i] - psamples[i]))), score) return (new_psamples, theano.scan_module.until((score < eps))) (new_psamples, updates) = scan(mf_iteration, outputs_info=psamples, n_steps=n_steps) return [x[(-1)] for x in new_psamples]
[ "def", "_e_step", "(", "psamples", ",", "W_list", ",", "b_list", ",", "n_steps", "=", "100", ",", "eps", "=", "1e-05", ")", ":", "depth", "=", "len", "(", "psamples", ")", "def", "mf_iteration", "(", "*", "psamples", ")", ":", "new_psamples", "=", "[", "p", "for", "p", "in", "psamples", "]", "for", "i", "in", "xrange", "(", "1", ",", "depth", ",", "2", ")", ":", "new_psamples", "[", "i", "]", "=", "hi_given", "(", "psamples", ",", "i", ",", "W_list", ",", "b_list", ")", "for", "i", "in", "xrange", "(", "2", ",", "depth", ",", "2", ")", ":", "new_psamples", "[", "i", "]", "=", "hi_given", "(", "psamples", ",", "i", ",", "W_list", ",", "b_list", ")", "score", "=", "0.0", "for", "i", "in", "xrange", "(", "1", ",", "depth", ")", ":", "score", "=", "T", ".", "maximum", "(", "T", ".", "mean", "(", "abs", "(", "(", "new_psamples", "[", "i", "]", "-", "psamples", "[", "i", "]", ")", ")", ")", ",", "score", ")", "return", "(", "new_psamples", ",", "theano", ".", "scan_module", ".", "until", "(", "(", "score", "<", "eps", ")", ")", ")", "(", "new_psamples", ",", "updates", ")", "=", "scan", "(", "mf_iteration", ",", "outputs_info", "=", "psamples", ",", "n_steps", "=", "n_steps", ")", "return", "[", "x", "[", "(", "-", "1", ")", "]", "for", "x", "in", "new_psamples", "]" ]
performs n_steps of mean-field inference parameters psamples : array-like object of theano shared variables state of each layer of the dbm .
train
false
21,005
def plugin_sync(): ret = __salt__['cmd.run']('puppet plugin download') if (not ret): return '' return ret
[ "def", "plugin_sync", "(", ")", ":", "ret", "=", "__salt__", "[", "'cmd.run'", "]", "(", "'puppet plugin download'", ")", "if", "(", "not", "ret", ")", ":", "return", "''", "return", "ret" ]
runs a plugin synch between the puppet master and agent cli example: .
train
false
21,006
def UploadShapefile(service, project_id, shapefile_prefix): suffixes = ['shp', 'dbf', 'prj', 'shx'] files = [] for suffix in suffixes: files.append({'filename': ('%s.%s' % (shapefile_prefix, suffix))}) metadata = {'projectId': project_id, 'name': shapefile_prefix, 'description': 'polygons that were uploaded by a script', 'files': files, 'draftAccessList': 'Map Editors', 'tags': [shapefile_prefix, 'auto_upload', 'kittens']} logging.info('Uploading metadata for %s', shapefile_prefix) response = service.tables().upload(body=metadata).execute() table_id = response['id'] for suffix in suffixes: shapefile = ('%s.%s' % (shapefile_prefix, suffix)) media_body = MediaFileUpload(shapefile, mimetype='application/octet-stream') logging.info('uploading %s', shapefile) response = service.tables().files().insert(id=table_id, filename=shapefile, media_body=media_body).execute() CheckAssetStatus(service, 'tables', table_id) return table_id
[ "def", "UploadShapefile", "(", "service", ",", "project_id", ",", "shapefile_prefix", ")", ":", "suffixes", "=", "[", "'shp'", ",", "'dbf'", ",", "'prj'", ",", "'shx'", "]", "files", "=", "[", "]", "for", "suffix", "in", "suffixes", ":", "files", ".", "append", "(", "{", "'filename'", ":", "(", "'%s.%s'", "%", "(", "shapefile_prefix", ",", "suffix", ")", ")", "}", ")", "metadata", "=", "{", "'projectId'", ":", "project_id", ",", "'name'", ":", "shapefile_prefix", ",", "'description'", ":", "'polygons that were uploaded by a script'", ",", "'files'", ":", "files", ",", "'draftAccessList'", ":", "'Map Editors'", ",", "'tags'", ":", "[", "shapefile_prefix", ",", "'auto_upload'", ",", "'kittens'", "]", "}", "logging", ".", "info", "(", "'Uploading metadata for %s'", ",", "shapefile_prefix", ")", "response", "=", "service", ".", "tables", "(", ")", ".", "upload", "(", "body", "=", "metadata", ")", ".", "execute", "(", ")", "table_id", "=", "response", "[", "'id'", "]", "for", "suffix", "in", "suffixes", ":", "shapefile", "=", "(", "'%s.%s'", "%", "(", "shapefile_prefix", ",", "suffix", ")", ")", "media_body", "=", "MediaFileUpload", "(", "shapefile", ",", "mimetype", "=", "'application/octet-stream'", ")", "logging", ".", "info", "(", "'uploading %s'", ",", "shapefile", ")", "response", "=", "service", ".", "tables", "(", ")", ".", "files", "(", ")", ".", "insert", "(", "id", "=", "table_id", ",", "filename", "=", "shapefile", ",", "media_body", "=", "media_body", ")", ".", "execute", "(", ")", "CheckAssetStatus", "(", "service", ",", "'tables'", ",", "table_id", ")", "return", "table_id" ]
upload a shapefile to a given project .
train
false
21,007
def infer_callfunc(self, context=None): callcontext = context.clone() callcontext.callcontext = CallContext(self.args, self.starargs, self.kwargs) callcontext.boundnode = None for callee in self.func.infer(context): if (callee is YES): (yield callee) continue try: if hasattr(callee, 'infer_call_result'): for infered in callee.infer_call_result(self, callcontext): (yield infered) except InferenceError: continue
[ "def", "infer_callfunc", "(", "self", ",", "context", "=", "None", ")", ":", "callcontext", "=", "context", ".", "clone", "(", ")", "callcontext", ".", "callcontext", "=", "CallContext", "(", "self", ".", "args", ",", "self", ".", "starargs", ",", "self", ".", "kwargs", ")", "callcontext", ".", "boundnode", "=", "None", "for", "callee", "in", "self", ".", "func", ".", "infer", "(", "context", ")", ":", "if", "(", "callee", "is", "YES", ")", ":", "(", "yield", "callee", ")", "continue", "try", ":", "if", "hasattr", "(", "callee", ",", "'infer_call_result'", ")", ":", "for", "infered", "in", "callee", ".", "infer_call_result", "(", "self", ",", "callcontext", ")", ":", "(", "yield", "infered", ")", "except", "InferenceError", ":", "continue" ]
infer a callfunc node by trying to guess what the function returns .
train
false
21,008
@contextmanager def in_tempdir(): pwd = os.getcwd() d = mkdtemp() os.chdir(d) (yield d) os.chdir(pwd) rmtree(d)
[ "@", "contextmanager", "def", "in_tempdir", "(", ")", ":", "pwd", "=", "os", ".", "getcwd", "(", ")", "d", "=", "mkdtemp", "(", ")", "os", ".", "chdir", "(", "d", ")", "(", "yield", "d", ")", "os", ".", "chdir", "(", "pwd", ")", "rmtree", "(", "d", ")" ]
a decorator moving the enclosed function inside the tempfile .
train
false
21,009
def to_html_string(text): if text: try: text = unicodify(text) except UnicodeDecodeError as e: return ('Error decoding string: %s' % str(e)) text = text_type(markupsafe.escape(text)) text = text.replace('\n', '<br/>') text = text.replace(' ', '&nbsp;&nbsp;&nbsp;&nbsp;') text = text.replace(' ', '&nbsp;') return text
[ "def", "to_html_string", "(", "text", ")", ":", "if", "text", ":", "try", ":", "text", "=", "unicodify", "(", "text", ")", "except", "UnicodeDecodeError", "as", "e", ":", "return", "(", "'Error decoding string: %s'", "%", "str", "(", "e", ")", ")", "text", "=", "text_type", "(", "markupsafe", ".", "escape", "(", "text", ")", ")", "text", "=", "text", ".", "replace", "(", "'\\n'", ",", "'<br/>'", ")", "text", "=", "text", ".", "replace", "(", "' '", ",", "'&nbsp;&nbsp;&nbsp;&nbsp;'", ")", "text", "=", "text", ".", "replace", "(", "' '", ",", "'&nbsp;'", ")", "return", "text" ]
translates the characters in text to an html string .
train
false
21,012
def get_nonphylogenetic_metric(name): for metric in nonphylogenetic_metrics: if (metric.__name__.lower() == name.lower()): return metric raise AttributeError
[ "def", "get_nonphylogenetic_metric", "(", "name", ")", ":", "for", "metric", "in", "nonphylogenetic_metrics", ":", "if", "(", "metric", ".", "__name__", ".", "lower", "(", ")", "==", "name", ".", "lower", "(", ")", ")", ":", "return", "metric", "raise", "AttributeError" ]
gets metric by name from distance_transform .
train
false
21,014
def cgsnapshot_get_all_by_project(context, project_id, filters=None): return IMPL.cgsnapshot_get_all_by_project(context, project_id, filters)
[ "def", "cgsnapshot_get_all_by_project", "(", "context", ",", "project_id", ",", "filters", "=", "None", ")", ":", "return", "IMPL", ".", "cgsnapshot_get_all_by_project", "(", "context", ",", "project_id", ",", "filters", ")" ]
get all cgsnapshots belonging to a project .
train
false
21,015
def find_languages(string, context=None): allowed_languages = context.get('allowed_languages') common_words = (COMMON_WORDS_STRICT if allowed_languages else COMMON_WORDS) matches = [] for word_match in iter_words(string): word = word_match.value (start, end) = word_match.span lang_word = word.lower() key = 'language' for prefix in subtitle_prefixes: if lang_word.startswith(prefix): lang_word = lang_word[len(prefix):] key = 'subtitle_language' for suffix in subtitle_suffixes: if lang_word.endswith(suffix): lang_word = lang_word[:(len(lang_word) - len(suffix))] key = 'subtitle_language' for prefix in lang_prefixes: if lang_word.startswith(prefix): lang_word = lang_word[len(prefix):] if ((lang_word not in common_words) and (word.lower() not in common_words)): try: lang = babelfish.Language.fromguessit(lang_word) match = (start, end, {'name': key, 'value': lang}) if allowed_languages: if ((lang.name.lower() in allowed_languages) or (lang.alpha2.lower() in allowed_languages) or (lang.alpha3.lower() in allowed_languages)): matches.append(match) elif ((lang == 'mul') or hasattr(lang, 'alpha2')): matches.append(match) except babelfish.Error: pass return matches
[ "def", "find_languages", "(", "string", ",", "context", "=", "None", ")", ":", "allowed_languages", "=", "context", ".", "get", "(", "'allowed_languages'", ")", "common_words", "=", "(", "COMMON_WORDS_STRICT", "if", "allowed_languages", "else", "COMMON_WORDS", ")", "matches", "=", "[", "]", "for", "word_match", "in", "iter_words", "(", "string", ")", ":", "word", "=", "word_match", ".", "value", "(", "start", ",", "end", ")", "=", "word_match", ".", "span", "lang_word", "=", "word", ".", "lower", "(", ")", "key", "=", "'language'", "for", "prefix", "in", "subtitle_prefixes", ":", "if", "lang_word", ".", "startswith", "(", "prefix", ")", ":", "lang_word", "=", "lang_word", "[", "len", "(", "prefix", ")", ":", "]", "key", "=", "'subtitle_language'", "for", "suffix", "in", "subtitle_suffixes", ":", "if", "lang_word", ".", "endswith", "(", "suffix", ")", ":", "lang_word", "=", "lang_word", "[", ":", "(", "len", "(", "lang_word", ")", "-", "len", "(", "suffix", ")", ")", "]", "key", "=", "'subtitle_language'", "for", "prefix", "in", "lang_prefixes", ":", "if", "lang_word", ".", "startswith", "(", "prefix", ")", ":", "lang_word", "=", "lang_word", "[", "len", "(", "prefix", ")", ":", "]", "if", "(", "(", "lang_word", "not", "in", "common_words", ")", "and", "(", "word", ".", "lower", "(", ")", "not", "in", "common_words", ")", ")", ":", "try", ":", "lang", "=", "babelfish", ".", "Language", ".", "fromguessit", "(", "lang_word", ")", "match", "=", "(", "start", ",", "end", ",", "{", "'name'", ":", "key", ",", "'value'", ":", "lang", "}", ")", "if", "allowed_languages", ":", "if", "(", "(", "lang", ".", "name", ".", "lower", "(", ")", "in", "allowed_languages", ")", "or", "(", "lang", ".", "alpha2", ".", "lower", "(", ")", "in", "allowed_languages", ")", "or", "(", "lang", ".", "alpha3", ".", "lower", "(", ")", "in", "allowed_languages", ")", ")", ":", "matches", ".", "append", "(", "match", ")", "elif", "(", "(", "lang", "==", "'mul'", ")", "or", "hasattr", "(", "lang", ",", "'alpha2'", ")", ")", ":", "matches", ".", "append", "(", "match", ")", "except", "babelfish", ".", "Error", ":", "pass", "return", "matches" ]
generate supported languages list from the .
train
false
21,016
def collapsingPumpPolicy(queue, target): bytes = [] while queue: chunk = queue.get() if (chunk is None): break bytes.append(chunk) if bytes: target.dataReceived(''.join(bytes))
[ "def", "collapsingPumpPolicy", "(", "queue", ",", "target", ")", ":", "bytes", "=", "[", "]", "while", "queue", ":", "chunk", "=", "queue", ".", "get", "(", ")", "if", "(", "chunk", "is", "None", ")", ":", "break", "bytes", ".", "append", "(", "chunk", ")", "if", "bytes", ":", "target", ".", "dataReceived", "(", "''", ".", "join", "(", "bytes", ")", ")" ]
l{collapsingpumppolicy} is a policy which collapses all outstanding chunks into a single string and delivers it to the target .
train
false
21,017
def save_logo(logo_url, event_id): upload_path = UPLOAD_PATHS['event']['logo'].format(event_id=event_id) return save_event_image(logo_url, upload_path)
[ "def", "save_logo", "(", "logo_url", ",", "event_id", ")", ":", "upload_path", "=", "UPLOAD_PATHS", "[", "'event'", "]", "[", "'logo'", "]", ".", "format", "(", "event_id", "=", "event_id", ")", "return", "save_event_image", "(", "logo_url", ",", "upload_path", ")" ]
save the logo .
train
false
21,018
def isInKnownHosts(host, pubKey, options): keyType = common.getNS(pubKey)[0] retVal = 0 if ((not options['known-hosts']) and (not os.path.exists(os.path.expanduser('~/.ssh/')))): print 'Creating ~/.ssh directory...' os.mkdir(os.path.expanduser('~/.ssh')) kh_file = (options['known-hosts'] or '~/.ssh/known_hosts') try: known_hosts = open(os.path.expanduser(kh_file)) except IOError: return 0 for line in known_hosts.xreadlines(): split = line.split() if (len(split) < 3): continue (hosts, hostKeyType, encodedKey) = split[:3] if (host not in hosts.split(',')): continue if (hostKeyType != keyType): continue try: decodedKey = base64.decodestring(encodedKey) except: continue if (decodedKey == pubKey): return 1 else: retVal = 2 return retVal
[ "def", "isInKnownHosts", "(", "host", ",", "pubKey", ",", "options", ")", ":", "keyType", "=", "common", ".", "getNS", "(", "pubKey", ")", "[", "0", "]", "retVal", "=", "0", "if", "(", "(", "not", "options", "[", "'known-hosts'", "]", ")", "and", "(", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "expanduser", "(", "'~/.ssh/'", ")", ")", ")", ")", ":", "print", "'Creating ~/.ssh directory...'", "os", ".", "mkdir", "(", "os", ".", "path", ".", "expanduser", "(", "'~/.ssh'", ")", ")", "kh_file", "=", "(", "options", "[", "'known-hosts'", "]", "or", "'~/.ssh/known_hosts'", ")", "try", ":", "known_hosts", "=", "open", "(", "os", ".", "path", ".", "expanduser", "(", "kh_file", ")", ")", "except", "IOError", ":", "return", "0", "for", "line", "in", "known_hosts", ".", "xreadlines", "(", ")", ":", "split", "=", "line", ".", "split", "(", ")", "if", "(", "len", "(", "split", ")", "<", "3", ")", ":", "continue", "(", "hosts", ",", "hostKeyType", ",", "encodedKey", ")", "=", "split", "[", ":", "3", "]", "if", "(", "host", "not", "in", "hosts", ".", "split", "(", "','", ")", ")", ":", "continue", "if", "(", "hostKeyType", "!=", "keyType", ")", ":", "continue", "try", ":", "decodedKey", "=", "base64", ".", "decodestring", "(", "encodedKey", ")", "except", ":", "continue", "if", "(", "decodedKey", "==", "pubKey", ")", ":", "return", "1", "else", ":", "retVal", "=", "2", "return", "retVal" ]
checks to see if host is in the known_hosts file for the user .
train
false
21,020
def vo_reraise(exc, config=None, pos=None, additional=u''): if (config is None): config = {} message = _format_message(str(exc), exc.__class__.__name__, config, pos) if (message.split()[0] == str(exc).split()[0]): message = str(exc) if len(additional): message += (u' ' + additional) exc.args = (message,) raise exc
[ "def", "vo_reraise", "(", "exc", ",", "config", "=", "None", ",", "pos", "=", "None", ",", "additional", "=", "u''", ")", ":", "if", "(", "config", "is", "None", ")", ":", "config", "=", "{", "}", "message", "=", "_format_message", "(", "str", "(", "exc", ")", ",", "exc", ".", "__class__", ".", "__name__", ",", "config", ",", "pos", ")", "if", "(", "message", ".", "split", "(", ")", "[", "0", "]", "==", "str", "(", "exc", ")", ".", "split", "(", ")", "[", "0", "]", ")", ":", "message", "=", "str", "(", "exc", ")", "if", "len", "(", "additional", ")", ":", "message", "+=", "(", "u' '", "+", "additional", ")", "exc", ".", "args", "=", "(", "message", ",", ")", "raise", "exc" ]
raise an exception .
train
false
21,021
@csrf_exempt @require_POST def password_reset(request): limiter = BadRequestRateLimiter() if limiter.is_rate_limit_exceeded(request): AUDIT_LOG.warning('Rate limit exceeded in password_reset') return HttpResponseForbidden() form = PasswordResetFormNoActive(request.POST) if form.is_valid(): form.save(use_https=request.is_secure(), from_email=configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL), request=request, domain_override=request.get_host()) tracker.emit(SETTING_CHANGE_INITIATED, {'setting': 'password', 'old': None, 'new': None, 'user_id': request.user.id}) destroy_oauth_tokens(request.user) else: AUDIT_LOG.info('Bad password_reset user passed in.') limiter.tick_bad_request_counter(request) return JsonResponse({'success': True, 'value': render_to_string('registration/password_reset_done.html', {})})
[ "@", "csrf_exempt", "@", "require_POST", "def", "password_reset", "(", "request", ")", ":", "limiter", "=", "BadRequestRateLimiter", "(", ")", "if", "limiter", ".", "is_rate_limit_exceeded", "(", "request", ")", ":", "AUDIT_LOG", ".", "warning", "(", "'Rate limit exceeded in password_reset'", ")", "return", "HttpResponseForbidden", "(", ")", "form", "=", "PasswordResetFormNoActive", "(", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "form", ".", "save", "(", "use_https", "=", "request", ".", "is_secure", "(", ")", ",", "from_email", "=", "configuration_helpers", ".", "get_value", "(", "'email_from_address'", ",", "settings", ".", "DEFAULT_FROM_EMAIL", ")", ",", "request", "=", "request", ",", "domain_override", "=", "request", ".", "get_host", "(", ")", ")", "tracker", ".", "emit", "(", "SETTING_CHANGE_INITIATED", ",", "{", "'setting'", ":", "'password'", ",", "'old'", ":", "None", ",", "'new'", ":", "None", ",", "'user_id'", ":", "request", ".", "user", ".", "id", "}", ")", "destroy_oauth_tokens", "(", "request", ".", "user", ")", "else", ":", "AUDIT_LOG", ".", "info", "(", "'Bad password_reset user passed in.'", ")", "limiter", ".", "tick_bad_request_counter", "(", "request", ")", "return", "JsonResponse", "(", "{", "'success'", ":", "True", ",", "'value'", ":", "render_to_string", "(", "'registration/password_reset_done.html'", ",", "{", "}", ")", "}", ")" ]
password_reset sends the email with the new password .
train
false
21,023
def change_SUBTITLES_FINDER_FREQUENCY(subtitles_finder_frequency): if ((subtitles_finder_frequency == '') or (subtitles_finder_frequency is None)): subtitles_finder_frequency = 1 sickbeard.SUBTITLES_FINDER_FREQUENCY = try_int(subtitles_finder_frequency, 1)
[ "def", "change_SUBTITLES_FINDER_FREQUENCY", "(", "subtitles_finder_frequency", ")", ":", "if", "(", "(", "subtitles_finder_frequency", "==", "''", ")", "or", "(", "subtitles_finder_frequency", "is", "None", ")", ")", ":", "subtitles_finder_frequency", "=", "1", "sickbeard", ".", "SUBTITLES_FINDER_FREQUENCY", "=", "try_int", "(", "subtitles_finder_frequency", ",", "1", ")" ]
change frequency of subtitle thread .
train
false
21,025
def _check_subject(class_subject, input_subject, raise_error=True): if (input_subject is not None): if (not isinstance(input_subject, string_types)): raise ValueError('subject input must be a string') else: return input_subject elif (class_subject is not None): if (not isinstance(class_subject, string_types)): raise ValueError('Neither subject input nor class subject attribute was a string') else: return class_subject else: if (raise_error is True): raise ValueError('Neither subject input nor class subject attribute was a string') return None
[ "def", "_check_subject", "(", "class_subject", ",", "input_subject", ",", "raise_error", "=", "True", ")", ":", "if", "(", "input_subject", "is", "not", "None", ")", ":", "if", "(", "not", "isinstance", "(", "input_subject", ",", "string_types", ")", ")", ":", "raise", "ValueError", "(", "'subject input must be a string'", ")", "else", ":", "return", "input_subject", "elif", "(", "class_subject", "is", "not", "None", ")", ":", "if", "(", "not", "isinstance", "(", "class_subject", ",", "string_types", ")", ")", ":", "raise", "ValueError", "(", "'Neither subject input nor class subject attribute was a string'", ")", "else", ":", "return", "class_subject", "else", ":", "if", "(", "raise_error", "is", "True", ")", ":", "raise", "ValueError", "(", "'Neither subject input nor class subject attribute was a string'", ")", "return", "None" ]
get subject name from class .
train
false
21,027
def p_selection_statement_2(t): pass
[ "def", "p_selection_statement_2", "(", "t", ")", ":", "pass" ]
selection_statement : if lparen expression rparen statement else statement .
train
false
21,028
def data_of(s): return s.container.storage[0]
[ "def", "data_of", "(", "s", ")", ":", "return", "s", ".", "container", ".", "storage", "[", "0", "]" ]
return the raw value of a shared variable .
train
false
21,030
def saveRepository(repository): for setting in repository.preferences: setting.setToDisplay() writeSettingsPrintMessage(repository) for saveListener in repository.saveListenerTable.values(): saveListener()
[ "def", "saveRepository", "(", "repository", ")", ":", "for", "setting", "in", "repository", ".", "preferences", ":", "setting", ".", "setToDisplay", "(", ")", "writeSettingsPrintMessage", "(", "repository", ")", "for", "saveListener", "in", "repository", ".", "saveListenerTable", ".", "values", "(", ")", ":", "saveListener", "(", ")" ]
set the entities to the dialog then write them .
train
false
21,033
@pytest.mark.parametrize('text, expected', [('foo|bar', 'fo|bar'), ('foobar|', 'fooba|'), ('|foobar', '|foobar'), ('f<oo>bar', 'f|bar')]) def test_rl_backward_delete_char(text, expected, lineedit, bridge): lineedit.set_aug_text(text) bridge.rl_backward_delete_char() assert (lineedit.aug_text() == expected)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'text, expected'", ",", "[", "(", "'foo|bar'", ",", "'fo|bar'", ")", ",", "(", "'foobar|'", ",", "'fooba|'", ")", ",", "(", "'|foobar'", ",", "'|foobar'", ")", ",", "(", "'f<oo>bar'", ",", "'f|bar'", ")", "]", ")", "def", "test_rl_backward_delete_char", "(", "text", ",", "expected", ",", "lineedit", ",", "bridge", ")", ":", "lineedit", ".", "set_aug_text", "(", "text", ")", "bridge", ".", "rl_backward_delete_char", "(", ")", "assert", "(", "lineedit", ".", "aug_text", "(", ")", "==", "expected", ")" ]
test rl_backward_delete_char .
train
false
21,034
@require_POST @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def list_email_content(request, course_id): course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) task_type = 'bulk_course_email' emails = lms.djangoapps.instructor_task.api.get_instructor_task_history(course_id, task_type=task_type) response_payload = {'emails': map(extract_email_features, emails)} return JsonResponse(response_payload)
[ "@", "require_POST", "@", "ensure_csrf_cookie", "@", "cache_control", "(", "no_cache", "=", "True", ",", "no_store", "=", "True", ",", "must_revalidate", "=", "True", ")", "@", "require_level", "(", "'staff'", ")", "def", "list_email_content", "(", "request", ",", "course_id", ")", ":", "course_id", "=", "SlashSeparatedCourseKey", ".", "from_deprecated_string", "(", "course_id", ")", "task_type", "=", "'bulk_course_email'", "emails", "=", "lms", ".", "djangoapps", ".", "instructor_task", ".", "api", ".", "get_instructor_task_history", "(", "course_id", ",", "task_type", "=", "task_type", ")", "response_payload", "=", "{", "'emails'", ":", "map", "(", "extract_email_features", ",", "emails", ")", "}", "return", "JsonResponse", "(", "response_payload", ")" ]
list the content of bulk emails sent .
train
false
21,036
def read_docstrings(lang): modname = ('turtle_docstringdict_%(language)s' % {'language': lang.lower()}) module = __import__(modname) docsdict = module.docsdict for key in docsdict: try: eval(key).__doc__ = docsdict[key] except Exception: print ('Bad docstring-entry: %s' % key)
[ "def", "read_docstrings", "(", "lang", ")", ":", "modname", "=", "(", "'turtle_docstringdict_%(language)s'", "%", "{", "'language'", ":", "lang", ".", "lower", "(", ")", "}", ")", "module", "=", "__import__", "(", "modname", ")", "docsdict", "=", "module", ".", "docsdict", "for", "key", "in", "docsdict", ":", "try", ":", "eval", "(", "key", ")", ".", "__doc__", "=", "docsdict", "[", "key", "]", "except", "Exception", ":", "print", "(", "'Bad docstring-entry: %s'", "%", "key", ")" ]
read in docstrings from lang-specific docstring dictionary .
train
false
21,038
def autodelegate(prefix=''): def internal(self, arg): func = (prefix + arg) if hasattr(self, func): return getattr(self, func)() else: return notfound() return internal
[ "def", "autodelegate", "(", "prefix", "=", "''", ")", ":", "def", "internal", "(", "self", ",", "arg", ")", ":", "func", "=", "(", "prefix", "+", "arg", ")", "if", "hasattr", "(", "self", ",", "func", ")", ":", "return", "getattr", "(", "self", ",", "func", ")", "(", ")", "else", ":", "return", "notfound", "(", ")", "return", "internal" ]
returns a method that takes one argument and calls the method named prefix+arg .
train
false