id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
38,456
def _isclose(a, b, rel_tol=1e-09, abs_tol=0.0): if (a == b): return True if ((rel_tol < 0.0) or (abs_tol < 0.0)): raise ValueError('error tolerances must be non-negative') if (isinf(abs(a)) or isinf(abs(b))): return False diff = fabs((b - a)) return (((diff <= fabs((rel_tol * b))) or (diff <= fabs((rel_tol * a)))) or (diff <= abs_tol))
[ "def", "_isclose", "(", "a", ",", "b", ",", "rel_tol", "=", "1e-09", ",", "abs_tol", "=", "0.0", ")", ":", "if", "(", "a", "==", "b", ")", ":", "return", "True", "if", "(", "(", "rel_tol", "<", "0.0", ")", "or", "(", "abs_tol", "<", "0.0", ")", ")", ":", "raise", "ValueError", "(", "'error tolerances must be non-negative'", ")", "if", "(", "isinf", "(", "abs", "(", "a", ")", ")", "or", "isinf", "(", "abs", "(", "b", ")", ")", ")", ":", "return", "False", "diff", "=", "fabs", "(", "(", "b", "-", "a", ")", ")", "return", "(", "(", "(", "diff", "<=", "fabs", "(", "(", "rel_tol", "*", "b", ")", ")", ")", "or", "(", "diff", "<=", "fabs", "(", "(", "rel_tol", "*", "a", ")", ")", ")", ")", "or", "(", "diff", "<=", "abs_tol", ")", ")" ]
measures whether two floats are "close" to each other .
train
false
38,457
def _tag(short_name, local): return ('{%s}%s' % (NAMESPACES[short_name], local))
[ "def", "_tag", "(", "short_name", ",", "local", ")", ":", "return", "(", "'{%s}%s'", "%", "(", "NAMESPACES", "[", "short_name", "]", ",", "local", ")", ")" ]
get xml clark notation {uri}local .
train
false
38,460
def convert_bed_coords_to_gff(interval): if isinstance(interval, GenomicInterval): interval.start += 1 if isinstance(interval, GFFFeature): for subinterval in interval.intervals: convert_bed_coords_to_gff(subinterval) elif isinstance(interval, list): interval[0] += 1 return interval
[ "def", "convert_bed_coords_to_gff", "(", "interval", ")", ":", "if", "isinstance", "(", "interval", ",", "GenomicInterval", ")", ":", "interval", ".", "start", "+=", "1", "if", "isinstance", "(", "interval", ",", "GFFFeature", ")", ":", "for", "subinterval", "in", "interval", ".", "intervals", ":", "convert_bed_coords_to_gff", "(", "subinterval", ")", "elif", "isinstance", "(", "interval", ",", "list", ")", ":", "interval", "[", "0", "]", "+=", "1", "return", "interval" ]
converts an interval objects coordinates from bed format to gff format .
train
false
38,461
def RawMoment(xs, k): return (sum(((x ** k) for x in xs)) / len(xs))
[ "def", "RawMoment", "(", "xs", ",", "k", ")", ":", "return", "(", "sum", "(", "(", "(", "x", "**", "k", ")", "for", "x", "in", "xs", ")", ")", "/", "len", "(", "xs", ")", ")" ]
computes the kth raw moment of xs .
train
false
38,462
def browse_other(): branch = choose_ref(N_(u'Browse Commits...'), N_(u'Browse')) if (not branch): return BrowseDialog.browse(branch)
[ "def", "browse_other", "(", ")", ":", "branch", "=", "choose_ref", "(", "N_", "(", "u'Browse Commits...'", ")", ",", "N_", "(", "u'Browse'", ")", ")", "if", "(", "not", "branch", ")", ":", "return", "BrowseDialog", ".", "browse", "(", "branch", ")" ]
prompt for a branch and inspect content at that point in time .
train
false
38,463
@allow_cross_site_request def manifest(request, uuid): try: uuid_hex = UUID(uuid).hex except ValueError: raise Http404 langpack = get_object_or_404(LangPack, pk=uuid_hex) if (langpack.active or action_allowed(request, 'LangPacks', '%')): (manifest_contents, langpack_etag) = langpack.get_minifest_contents() @condition(last_modified_func=(lambda request: langpack.modified), etag_func=(lambda request: langpack_etag)) def _inner_view(request): return HttpResponse(manifest_contents, content_type=MANIFEST_CONTENT_TYPE) return _inner_view(request) raise Http404
[ "@", "allow_cross_site_request", "def", "manifest", "(", "request", ",", "uuid", ")", ":", "try", ":", "uuid_hex", "=", "UUID", "(", "uuid", ")", ".", "hex", "except", "ValueError", ":", "raise", "Http404", "langpack", "=", "get_object_or_404", "(", "LangPack", ",", "pk", "=", "uuid_hex", ")", "if", "(", "langpack", ".", "active", "or", "action_allowed", "(", "request", ",", "'LangPacks'", ",", "'%'", ")", ")", ":", "(", "manifest_contents", ",", "langpack_etag", ")", "=", "langpack", ".", "get_minifest_contents", "(", ")", "@", "condition", "(", "last_modified_func", "=", "(", "lambda", "request", ":", "langpack", ".", "modified", ")", ",", "etag_func", "=", "(", "lambda", "request", ":", "langpack_etag", ")", ")", "def", "_inner_view", "(", "request", ")", ":", "return", "HttpResponse", "(", "manifest_contents", ",", "content_type", "=", "MANIFEST_CONTENT_TYPE", ")", "return", "_inner_view", "(", "request", ")", "raise", "Http404" ]
returns the "mini" manifest for a langpack .
train
false
38,464
def test_eip150_opcode_gascost(): assert ((opcode_gas['EXTCODESIZE'] + opcodes.EXTCODELOAD_SUPPLEMENTAL_GAS) == 700) assert ((opcode_gas['EXTCODECOPY'] + opcodes.EXTCODELOAD_SUPPLEMENTAL_GAS) == 700) assert ((opcode_gas['BALANCE'] + opcodes.BALANCE_SUPPLEMENTAL_GAS) == 400) assert ((opcode_gas['SLOAD'] + opcodes.SLOAD_SUPPLEMENTAL_GAS) == 200) assert ((opcode_gas['CALL'] + opcodes.CALL_SUPPLEMENTAL_GAS) == 700) assert ((opcode_gas['DELEGATECALL'] + opcodes.CALL_SUPPLEMENTAL_GAS) == 700) assert ((opcode_gas['CALLCODE'] + opcodes.CALL_SUPPLEMENTAL_GAS) == 700) assert ((opcode_gas['SUICIDE'] + opcodes.SUICIDE_SUPPLEMENTAL_GAS) == 5000)
[ "def", "test_eip150_opcode_gascost", "(", ")", ":", "assert", "(", "(", "opcode_gas", "[", "'EXTCODESIZE'", "]", "+", "opcodes", ".", "EXTCODELOAD_SUPPLEMENTAL_GAS", ")", "==", "700", ")", "assert", "(", "(", "opcode_gas", "[", "'EXTCODECOPY'", "]", "+", "opcodes", ".", "EXTCODELOAD_SUPPLEMENTAL_GAS", ")", "==", "700", ")", "assert", "(", "(", "opcode_gas", "[", "'BALANCE'", "]", "+", "opcodes", ".", "BALANCE_SUPPLEMENTAL_GAS", ")", "==", "400", ")", "assert", "(", "(", "opcode_gas", "[", "'SLOAD'", "]", "+", "opcodes", ".", "SLOAD_SUPPLEMENTAL_GAS", ")", "==", "200", ")", "assert", "(", "(", "opcode_gas", "[", "'CALL'", "]", "+", "opcodes", ".", "CALL_SUPPLEMENTAL_GAS", ")", "==", "700", ")", "assert", "(", "(", "opcode_gas", "[", "'DELEGATECALL'", "]", "+", "opcodes", ".", "CALL_SUPPLEMENTAL_GAS", ")", "==", "700", ")", "assert", "(", "(", "opcode_gas", "[", "'CALLCODE'", "]", "+", "opcodes", ".", "CALL_SUPPLEMENTAL_GAS", ")", "==", "700", ")", "assert", "(", "(", "opcode_gas", "[", "'SUICIDE'", "]", "+", "opcodes", ".", "SUICIDE_SUPPLEMENTAL_GAS", ")", "==", "5000", ")" ]
ensure gas prices specified in URL .
train
false
38,465
def _custom_view(filename): templates = settings.get_template() if (templates != 'default'): folder = request.folder template_location = settings.get_template_location() if (not isinstance(templates, (tuple, list))): templates = (templates,) for template in templates[::(-1)]: view = os.path.join(folder, template_location, 'templates', template, 'views', ('%s.html' % filename)) if os.path.exists(view): try: response.view = open(view, 'rb') except IOError: from gluon.http import HTTP raise HTTP('404', ('Unable to open Custom View: %s' % view)) else: break
[ "def", "_custom_view", "(", "filename", ")", ":", "templates", "=", "settings", ".", "get_template", "(", ")", "if", "(", "templates", "!=", "'default'", ")", ":", "folder", "=", "request", ".", "folder", "template_location", "=", "settings", ".", "get_template_location", "(", ")", "if", "(", "not", "isinstance", "(", "templates", ",", "(", "tuple", ",", "list", ")", ")", ")", ":", "templates", "=", "(", "templates", ",", ")", "for", "template", "in", "templates", "[", ":", ":", "(", "-", "1", ")", "]", ":", "view", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "template_location", ",", "'templates'", ",", "template", ",", "'views'", ",", "(", "'%s.html'", "%", "filename", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "view", ")", ":", "try", ":", "response", ".", "view", "=", "open", "(", "view", ",", "'rb'", ")", "except", "IOError", ":", "from", "gluon", ".", "http", "import", "HTTP", "raise", "HTTP", "(", "'404'", ",", "(", "'Unable to open Custom View: %s'", "%", "view", ")", ")", "else", ":", "break" ]
see if there is a custom view for a page & .
train
false
38,466
@pytest.fixture() def admin_client(admin_user): from django.test.client import Client client = Client() client.login(username=admin_user.email, password=u'password') return client
[ "@", "pytest", ".", "fixture", "(", ")", "def", "admin_client", "(", "admin_user", ")", ":", "from", "django", ".", "test", ".", "client", "import", "Client", "client", "=", "Client", "(", ")", "client", ".", "login", "(", "username", "=", "admin_user", ".", "email", ",", "password", "=", "u'password'", ")", "return", "client" ]
a django test client logged in as an admin user .
train
false
38,468
def bool_attrib(element, attr): value = element.get(attr) if ((not value) or (value in ('false', 'f', '0'))): return False return True
[ "def", "bool_attrib", "(", "element", ",", "attr", ")", ":", "value", "=", "element", ".", "get", "(", "attr", ")", "if", "(", "(", "not", "value", ")", "or", "(", "value", "in", "(", "'false'", ",", "'f'", ",", "'0'", ")", ")", ")", ":", "return", "False", "return", "True" ]
cast an xml attribute that should be a boolean to a python equivalent none .
train
false
38,470
def lsof(name): sanitize_name = str(name) lsof_infos = __salt__['cmd.run'](('lsof -c ' + sanitize_name)) ret = [] ret.extend([sanitize_name, lsof_infos]) return ret
[ "def", "lsof", "(", "name", ")", ":", "sanitize_name", "=", "str", "(", "name", ")", "lsof_infos", "=", "__salt__", "[", "'cmd.run'", "]", "(", "(", "'lsof -c '", "+", "sanitize_name", ")", ")", "ret", "=", "[", "]", "ret", ".", "extend", "(", "[", "sanitize_name", ",", "lsof_infos", "]", ")", "return", "ret" ]
retrieve the lsof informations of the given process name .
train
false
38,471
def backward_substitution(upper_triangle, variable, constant, K): copy_upper_triangle = copy.deepcopy(upper_triangle) nrow = len(copy_upper_triangle) result = [] for i in reversed(range(nrow)): a = K.zero for j in reversed(range((i + 1), nrow)): a += (copy_upper_triangle[i][j] * variable[j][0]) variable[i][0] = ((constant[i][0] - a) / copy_upper_triangle[i][i]) return variable
[ "def", "backward_substitution", "(", "upper_triangle", ",", "variable", ",", "constant", ",", "K", ")", ":", "copy_upper_triangle", "=", "copy", ".", "deepcopy", "(", "upper_triangle", ")", "nrow", "=", "len", "(", "copy_upper_triangle", ")", "result", "=", "[", "]", "for", "i", "in", "reversed", "(", "range", "(", "nrow", ")", ")", ":", "a", "=", "K", ".", "zero", "for", "j", "in", "reversed", "(", "range", "(", "(", "i", "+", "1", ")", ",", "nrow", ")", ")", ":", "a", "+=", "(", "copy_upper_triangle", "[", "i", "]", "[", "j", "]", "*", "variable", "[", "j", "]", "[", "0", "]", ")", "variable", "[", "i", "]", "[", "0", "]", "=", "(", "(", "constant", "[", "i", "]", "[", "0", "]", "-", "a", ")", "/", "copy_upper_triangle", "[", "i", "]", "[", "i", "]", ")", "return", "variable" ]
performs forward substitution given a lower triangular matrix .
train
false
38,472
def check_fields(context, fields): for field in fields: if (field.get('type') and (not _is_valid_pg_type(context, field['type']))): raise ValidationError({'fields': [u'"{0}" is not a valid field type'.format(field['type'])]}) elif (not _is_valid_field_name(field['id'])): raise ValidationError({'fields': [u'"{0}" is not a valid field name'.format(field['id'])]})
[ "def", "check_fields", "(", "context", ",", "fields", ")", ":", "for", "field", "in", "fields", ":", "if", "(", "field", ".", "get", "(", "'type'", ")", "and", "(", "not", "_is_valid_pg_type", "(", "context", ",", "field", "[", "'type'", "]", ")", ")", ")", ":", "raise", "ValidationError", "(", "{", "'fields'", ":", "[", "u'\"{0}\" is not a valid field type'", ".", "format", "(", "field", "[", "'type'", "]", ")", "]", "}", ")", "elif", "(", "not", "_is_valid_field_name", "(", "field", "[", "'id'", "]", ")", ")", ":", "raise", "ValidationError", "(", "{", "'fields'", ":", "[", "u'\"{0}\" is not a valid field name'", ".", "format", "(", "field", "[", "'id'", "]", ")", "]", "}", ")" ]
check if field types are valid .
train
false
38,473
def assert_discovered_state(case, deployer, expected_discovered_datasets, persistent_state=PersistentState()): previous_state = NodeState(uuid=deployer.node_uuid, hostname=deployer.hostname, applications=None, manifestations=None, paths=None, devices=None) discovering = deployer.discover_state(DeploymentState(nodes={previous_state}), persistent_state=persistent_state) local_state = case.successResultOf(discovering) case.assertEqual(local_state, BlockDeviceDeployerLocalState(hostname=deployer.hostname, node_uuid=deployer.node_uuid, datasets=dataset_map_from_iterable(expected_discovered_datasets)))
[ "def", "assert_discovered_state", "(", "case", ",", "deployer", ",", "expected_discovered_datasets", ",", "persistent_state", "=", "PersistentState", "(", ")", ")", ":", "previous_state", "=", "NodeState", "(", "uuid", "=", "deployer", ".", "node_uuid", ",", "hostname", "=", "deployer", ".", "hostname", ",", "applications", "=", "None", ",", "manifestations", "=", "None", ",", "paths", "=", "None", ",", "devices", "=", "None", ")", "discovering", "=", "deployer", ".", "discover_state", "(", "DeploymentState", "(", "nodes", "=", "{", "previous_state", "}", ")", ",", "persistent_state", "=", "persistent_state", ")", "local_state", "=", "case", ".", "successResultOf", "(", "discovering", ")", "case", ".", "assertEqual", "(", "local_state", ",", "BlockDeviceDeployerLocalState", "(", "hostname", "=", "deployer", ".", "hostname", ",", "node_uuid", "=", "deployer", ".", "node_uuid", ",", "datasets", "=", "dataset_map_from_iterable", "(", "expected_discovered_datasets", ")", ")", ")" ]
assert that datasets on the state object returned by deployer .
train
false
38,474
def app_view_factory(qs): return functools.partial(app_view, qs=qs)
[ "def", "app_view_factory", "(", "qs", ")", ":", "return", "functools", ".", "partial", "(", "app_view", ",", "qs", "=", "qs", ")" ]
dont evaluate qs or the locale will get stuck on whatever the server starts with .
train
false
38,475
def min_score(scores): lst = sorted_score(scores) min_score = lst[(len(lst) - 1)][1] return [(i[0], i[1]) for i in lst if (i[1] == min_score)]
[ "def", "min_score", "(", "scores", ")", ":", "lst", "=", "sorted_score", "(", "scores", ")", "min_score", "=", "lst", "[", "(", "len", "(", "lst", ")", "-", "1", ")", "]", "[", "1", "]", "return", "[", "(", "i", "[", "0", "]", ",", "i", "[", "1", "]", ")", "for", "i", "in", "lst", "if", "(", "i", "[", "1", "]", "==", "min_score", ")", "]" ]
the min score and the persons name .
train
false
38,476
def memoized_with_request(request_func, request_index=0): def wrapper(func): memoized_func = memoized(func) @functools.wraps(func) def wrapped(*args, **kwargs): args = list(args) request = args.pop(request_index) args.insert(request_index, request_func(request)) return memoized_func(*args, **kwargs) return wrapped return wrapper
[ "def", "memoized_with_request", "(", "request_func", ",", "request_index", "=", "0", ")", ":", "def", "wrapper", "(", "func", ")", ":", "memoized_func", "=", "memoized", "(", "func", ")", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapped", "(", "*", "args", ",", "**", "kwargs", ")", ":", "args", "=", "list", "(", "args", ")", "request", "=", "args", ".", "pop", "(", "request_index", ")", "args", ".", "insert", "(", "request_index", ",", "request_func", "(", "request", ")", ")", "return", "memoized_func", "(", "*", "args", ",", "**", "kwargs", ")", "return", "wrapped", "return", "wrapper" ]
decorator for caching functions which receive a request argument memoized functions with a request argument are memoized only during the rendering of a single view because the request argument is a new request instance on each view .
train
false
38,477
def image_clone(call=None, kwargs=None): if (call != 'function'): raise SaltCloudSystemExit('The image_clone function must be called with -f or --function.') if (kwargs is None): kwargs = {} name = kwargs.get('name', None) image_id = kwargs.get('image_id', None) image_name = kwargs.get('image_name', None) if (name is None): raise SaltCloudSystemExit("The image_clone function requires a 'name' to be provided.") if image_id: if image_name: log.warning("Both the 'image_id' and 'image_name' arguments were provided. 'image_id' will take precedence.") elif image_name: image_id = get_image_id(kwargs={'name': image_name}) else: raise SaltCloudSystemExit("The image_clone function requires either an 'image_id' or an 'image_name' to be provided.") (server, user, password) = _get_xml_rpc() auth = ':'.join([user, password]) response = server.one.image.clone(auth, int(image_id), name) data = {'action': 'image.clone', 'cloned': response[0], 'cloned_image_id': response[1], 'cloned_image_name': name, 'error_code': response[2]} return data
[ "def", "image_clone", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The image_clone function must be called with -f or --function.'", ")", "if", "(", "kwargs", "is", "None", ")", ":", "kwargs", "=", "{", "}", "name", "=", "kwargs", ".", "get", "(", "'name'", ",", "None", ")", "image_id", "=", "kwargs", ".", "get", "(", "'image_id'", ",", "None", ")", "image_name", "=", "kwargs", ".", "get", "(", "'image_name'", ",", "None", ")", "if", "(", "name", "is", "None", ")", ":", "raise", "SaltCloudSystemExit", "(", "\"The image_clone function requires a 'name' to be provided.\"", ")", "if", "image_id", ":", "if", "image_name", ":", "log", ".", "warning", "(", "\"Both the 'image_id' and 'image_name' arguments were provided. 'image_id' will take precedence.\"", ")", "elif", "image_name", ":", "image_id", "=", "get_image_id", "(", "kwargs", "=", "{", "'name'", ":", "image_name", "}", ")", "else", ":", "raise", "SaltCloudSystemExit", "(", "\"The image_clone function requires either an 'image_id' or an 'image_name' to be provided.\"", ")", "(", "server", ",", "user", ",", "password", ")", "=", "_get_xml_rpc", "(", ")", "auth", "=", "':'", ".", "join", "(", "[", "user", ",", "password", "]", ")", "response", "=", "server", ".", "one", ".", "image", ".", "clone", "(", "auth", ",", "int", "(", "image_id", ")", ",", "name", ")", "data", "=", "{", "'action'", ":", "'image.clone'", ",", "'cloned'", ":", "response", "[", "0", "]", ",", "'cloned_image_id'", ":", "response", "[", "1", "]", ",", "'cloned_image_name'", ":", "name", ",", "'error_code'", ":", "response", "[", "2", "]", "}", "return", "data" ]
clones an existing image .
train
true
38,480
def accepts_kwarg(func, kwarg): if (sys.version_info >= (3, 3)): signature = inspect.signature(func) try: signature.bind_partial(**{kwarg: None}) return True except TypeError: return False else: argspec = inspect.getargspec(func) return ((kwarg in argspec.args) or (argspec.keywords is not None))
[ "def", "accepts_kwarg", "(", "func", ",", "kwarg", ")", ":", "if", "(", "sys", ".", "version_info", ">=", "(", "3", ",", "3", ")", ")", ":", "signature", "=", "inspect", ".", "signature", "(", "func", ")", "try", ":", "signature", ".", "bind_partial", "(", "**", "{", "kwarg", ":", "None", "}", ")", "return", "True", "except", "TypeError", ":", "return", "False", "else", ":", "argspec", "=", "inspect", ".", "getargspec", "(", "func", ")", "return", "(", "(", "kwarg", "in", "argspec", ".", "args", ")", "or", "(", "argspec", ".", "keywords", "is", "not", "None", ")", ")" ]
determine whether the callable func has a signature that accepts the keyword argument kwarg .
train
false
38,484
def with_backing_file(method): @with_backing_lock def wrapped_method(self, *args, **dargs): self._read_from_backing_file() try: return method(self, *args, **dargs) finally: self._write_to_backing_file() wrapped_method.__name__ = method.__name__ wrapped_method.__doc__ = method.__doc__ return wrapped_method
[ "def", "with_backing_file", "(", "method", ")", ":", "@", "with_backing_lock", "def", "wrapped_method", "(", "self", ",", "*", "args", ",", "**", "dargs", ")", ":", "self", ".", "_read_from_backing_file", "(", ")", "try", ":", "return", "method", "(", "self", ",", "*", "args", ",", "**", "dargs", ")", "finally", ":", "self", ".", "_write_to_backing_file", "(", ")", "wrapped_method", ".", "__name__", "=", "method", ".", "__name__", "wrapped_method", ".", "__doc__", "=", "method", ".", "__doc__", "return", "wrapped_method" ]
a decorator to perform a lock-read-*-write-unlock cycle .
train
false
38,485
def is_visible(df, doc): if (df.fieldtype in (u'Section Break', u'Column Break', u'Button')): return False if hasattr(doc, u'hide_in_print_layout'): if (df.fieldname in doc.hide_in_print_layout): return False if ((df.permlevel > 0) and (not doc.has_permlevel_access_to(df.fieldname, df))): return False return (not doc.is_print_hide(df.fieldname, df))
[ "def", "is_visible", "(", "df", ",", "doc", ")", ":", "if", "(", "df", ".", "fieldtype", "in", "(", "u'Section Break'", ",", "u'Column Break'", ",", "u'Button'", ")", ")", ":", "return", "False", "if", "hasattr", "(", "doc", ",", "u'hide_in_print_layout'", ")", ":", "if", "(", "df", ".", "fieldname", "in", "doc", ".", "hide_in_print_layout", ")", ":", "return", "False", "if", "(", "(", "df", ".", "permlevel", ">", "0", ")", "and", "(", "not", "doc", ".", "has_permlevel_access_to", "(", "df", ".", "fieldname", ",", "df", ")", ")", ")", ":", "return", "False", "return", "(", "not", "doc", ".", "is_print_hide", "(", "df", ".", "fieldname", ",", "df", ")", ")" ]
true if sr is visible to regular users .
train
false
38,486
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
38,488
def filename_arg(path, optname): if os.path.isdir(path): raise UsageError('{0} must be a filename, given: {1}'.format(optname, path)) return path
[ "def", "filename_arg", "(", "path", ",", "optname", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "raise", "UsageError", "(", "'{0} must be a filename, given: {1}'", ".", "format", "(", "optname", ",", "path", ")", ")", "return", "path" ]
argparse type validator for filename arguments .
train
false
38,489
def linked(prefix, ignore_channels=False): return set(linked_data(prefix, ignore_channels=ignore_channels).keys())
[ "def", "linked", "(", "prefix", ",", "ignore_channels", "=", "False", ")", ":", "return", "set", "(", "linked_data", "(", "prefix", ",", "ignore_channels", "=", "ignore_channels", ")", ".", "keys", "(", ")", ")" ]
return the set of canonical names of linked packages in prefix .
train
false
38,491
def new_reporter(source_path, settings): reporter = Reporter(source_path, settings.report_level, settings.halt_level, stream=settings.warning_stream, debug=settings.debug, encoding=settings.error_encoding, error_handler=settings.error_encoding_error_handler) return reporter
[ "def", "new_reporter", "(", "source_path", ",", "settings", ")", ":", "reporter", "=", "Reporter", "(", "source_path", ",", "settings", ".", "report_level", ",", "settings", ".", "halt_level", ",", "stream", "=", "settings", ".", "warning_stream", ",", "debug", "=", "settings", ".", "debug", ",", "encoding", "=", "settings", ".", "error_encoding", ",", "error_handler", "=", "settings", ".", "error_encoding_error_handler", ")", "return", "reporter" ]
return a new reporter object .
train
false
38,492
def setup_github(): try: if (sickbeard.GIT_USERNAME and sickbeard.GIT_PASSWORD): sickbeard.gh = Github(login_or_token=sickbeard.GIT_USERNAME, password=sickbeard.GIT_PASSWORD, user_agent=u'SickRage') except Exception as error: sickbeard.gh = None sickbeard.logger.log(u'Unable to setup GitHub properly with your github login. Please check your credentials. Error: {0}'.format(error), sickbeard.logger.WARNING) if (not sickbeard.gh): try: sickbeard.gh = Github(user_agent=u'SickRage') except Exception as error: sickbeard.gh = None sickbeard.logger.log(u'Unable to setup GitHub properly. GitHub will not be available. Error: {0}'.format(error), sickbeard.logger.WARNING)
[ "def", "setup_github", "(", ")", ":", "try", ":", "if", "(", "sickbeard", ".", "GIT_USERNAME", "and", "sickbeard", ".", "GIT_PASSWORD", ")", ":", "sickbeard", ".", "gh", "=", "Github", "(", "login_or_token", "=", "sickbeard", ".", "GIT_USERNAME", ",", "password", "=", "sickbeard", ".", "GIT_PASSWORD", ",", "user_agent", "=", "u'SickRage'", ")", "except", "Exception", "as", "error", ":", "sickbeard", ".", "gh", "=", "None", "sickbeard", ".", "logger", ".", "log", "(", "u'Unable to setup GitHub properly with your github login. Please check your credentials. Error: {0}'", ".", "format", "(", "error", ")", ",", "sickbeard", ".", "logger", ".", "WARNING", ")", "if", "(", "not", "sickbeard", ".", "gh", ")", ":", "try", ":", "sickbeard", ".", "gh", "=", "Github", "(", "user_agent", "=", "u'SickRage'", ")", "except", "Exception", "as", "error", ":", "sickbeard", ".", "gh", "=", "None", "sickbeard", ".", "logger", ".", "log", "(", "u'Unable to setup GitHub properly. GitHub will not be available. Error: {0}'", ".", "format", "(", "error", ")", ",", "sickbeard", ".", "logger", ".", "WARNING", ")" ]
instantiate the global github connection .
train
false
38,493
def pil_image(source, exif_orientation=True, **options): if (not source): return source = BytesIO(source.read()) image = Image.open(source) try: image.load() except IOError: pass image.load() if exif_orientation: image = utils.exif_orientation(image) return image
[ "def", "pil_image", "(", "source", ",", "exif_orientation", "=", "True", ",", "**", "options", ")", ":", "if", "(", "not", "source", ")", ":", "return", "source", "=", "BytesIO", "(", "source", ".", "read", "(", ")", ")", "image", "=", "Image", ".", "open", "(", "source", ")", "try", ":", "image", ".", "load", "(", ")", "except", "IOError", ":", "pass", "image", ".", "load", "(", ")", "if", "exif_orientation", ":", "image", "=", "utils", ".", "exif_orientation", "(", "image", ")", "return", "image" ]
try to open the source file directly using pil .
train
true
38,494
def libvlc_media_add_option_flag(p_md, psz_options, i_flags): f = (_Cfunctions.get('libvlc_media_add_option_flag', None) or _Cfunction('libvlc_media_add_option_flag', ((1,), (1,), (1,)), None, None, Media, ctypes.c_char_p, ctypes.c_uint)) return f(p_md, psz_options, i_flags)
[ "def", "libvlc_media_add_option_flag", "(", "p_md", ",", "psz_options", ",", "i_flags", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_add_option_flag'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_add_option_flag'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "None", ",", "Media", ",", "ctypes", ".", "c_char_p", ",", "ctypes", ".", "c_uint", ")", ")", "return", "f", "(", "p_md", ",", "psz_options", ",", "i_flags", ")" ]
add an option to the media with configurable flags .
train
true
38,495
def test_scenario_show_tags_in_its_representation(): scenario = Scenario.from_string(SCENARIO1, original_string=SCENARIO1.strip(), tags=['slow', 'firefox', 'chrome']) expect(scenario.represented()).to.equal(u' @slow @firefox @chrome\n Scenario: Adding some students to my university database')
[ "def", "test_scenario_show_tags_in_its_representation", "(", ")", ":", "scenario", "=", "Scenario", ".", "from_string", "(", "SCENARIO1", ",", "original_string", "=", "SCENARIO1", ".", "strip", "(", ")", ",", "tags", "=", "[", "'slow'", ",", "'firefox'", ",", "'chrome'", "]", ")", "expect", "(", "scenario", ".", "represented", "(", ")", ")", ".", "to", ".", "equal", "(", "u' @slow @firefox @chrome\\n Scenario: Adding some students to my university database'", ")" ]
scenario#represented should show its tags .
train
false
38,496
def print_tree(task, indent='', last=True): with warnings.catch_warnings(): warnings.filterwarnings(action='ignore', message='Task .* without outputs has no custom complete\\(\\) method') is_task_complete = task.complete() is_complete = (((bcolors.OKGREEN + 'COMPLETE') if is_task_complete else (bcolors.OKBLUE + 'PENDING')) + bcolors.ENDC) name = task.__class__.__name__ params = task.to_str_params(only_significant=True) result = ('\n' + indent) if last: result += '\xe2\x94\x94\xe2\x94\x80--' indent += ' ' else: result += '|--' indent += '| ' result += '[{0}-{1} ({2})]'.format(name, params, is_complete) children = flatten(task.requires()) for (index, child) in enumerate(children): result += print_tree(child, indent, ((index + 1) == len(children))) return result
[ "def", "print_tree", "(", "task", ",", "indent", "=", "''", ",", "last", "=", "True", ")", ":", "with", "warnings", ".", "catch_warnings", "(", ")", ":", "warnings", ".", "filterwarnings", "(", "action", "=", "'ignore'", ",", "message", "=", "'Task .* without outputs has no custom complete\\\\(\\\\) method'", ")", "is_task_complete", "=", "task", ".", "complete", "(", ")", "is_complete", "=", "(", "(", "(", "bcolors", ".", "OKGREEN", "+", "'COMPLETE'", ")", "if", "is_task_complete", "else", "(", "bcolors", ".", "OKBLUE", "+", "'PENDING'", ")", ")", "+", "bcolors", ".", "ENDC", ")", "name", "=", "task", ".", "__class__", ".", "__name__", "params", "=", "task", ".", "to_str_params", "(", "only_significant", "=", "True", ")", "result", "=", "(", "'\\n'", "+", "indent", ")", "if", "last", ":", "result", "+=", "'\\xe2\\x94\\x94\\xe2\\x94\\x80--'", "indent", "+=", "' '", "else", ":", "result", "+=", "'|--'", "indent", "+=", "'| '", "result", "+=", "'[{0}-{1} ({2})]'", ".", "format", "(", "name", ",", "params", ",", "is_complete", ")", "children", "=", "flatten", "(", "task", ".", "requires", "(", ")", ")", "for", "(", "index", ",", "child", ")", "in", "enumerate", "(", "children", ")", ":", "result", "+=", "print_tree", "(", "child", ",", "indent", ",", "(", "(", "index", "+", "1", ")", "==", "len", "(", "children", ")", ")", ")", "return", "result" ]
return a string representation of the tasks .
train
true
38,499
@coroutine def component2(reactor, session): result = (yield session.call(u'com.example.add2', 2, 3)) session.publish(u'com.example.on-hello', u'result={}'.format(result))
[ "@", "coroutine", "def", "component2", "(", "reactor", ",", "session", ")", ":", "result", "=", "(", "yield", "session", ".", "call", "(", "u'com.example.add2'", ",", "2", ",", "3", ")", ")", "session", ".", "publish", "(", "u'com.example.on-hello'", ",", "u'result={}'", ".", "format", "(", "result", ")", ")" ]
a second component .
train
false
38,500
def _get_hostnames_in_bucket(hist_data, bucket): return [hostname for (hostname, pass_rate) in hist_data if (bucket[0] <= pass_rate < bucket[1])]
[ "def", "_get_hostnames_in_bucket", "(", "hist_data", ",", "bucket", ")", ":", "return", "[", "hostname", "for", "(", "hostname", ",", "pass_rate", ")", "in", "hist_data", "if", "(", "bucket", "[", "0", "]", "<=", "pass_rate", "<", "bucket", "[", "1", "]", ")", "]" ]
get all the hostnames that constitute a particular bucket in the histogram .
train
false
38,502
@verbose def cross_talk_function(inverse_operator, forward, labels, method='dSPM', lambda2=(1 / 9.0), signed=False, mode='mean', n_svd_comp=1, verbose=None): forward = convert_forward_solution(forward, force_fixed=True, surf_ori=True) out = _get_matrix_from_inverse_operator(inverse_operator, forward, labels=labels, method=method, lambda2=lambda2, mode=mode, n_svd_comp=n_svd_comp) (invmat, label_singvals) = out leadfield = _pick_leadfield(forward['sol']['data'], forward, inverse_operator['info']['ch_names']) ctfs = np.dot(invmat, leadfield) ctfs = np.vstack((ctfs, ctfs.sum(axis=0))) if (not signed): ctfs = np.abs(ctfs, out=ctfs) vertno = [ss['vertno'] for ss in inverse_operator['src']] stc_ctf = SourceEstimate(ctfs.T, vertno, tmin=0.0, tstep=1.0) stc_ctf.subject = _subject_from_inverse(inverse_operator) return stc_ctf
[ "@", "verbose", "def", "cross_talk_function", "(", "inverse_operator", ",", "forward", ",", "labels", ",", "method", "=", "'dSPM'", ",", "lambda2", "=", "(", "1", "/", "9.0", ")", ",", "signed", "=", "False", ",", "mode", "=", "'mean'", ",", "n_svd_comp", "=", "1", ",", "verbose", "=", "None", ")", ":", "forward", "=", "convert_forward_solution", "(", "forward", ",", "force_fixed", "=", "True", ",", "surf_ori", "=", "True", ")", "out", "=", "_get_matrix_from_inverse_operator", "(", "inverse_operator", ",", "forward", ",", "labels", "=", "labels", ",", "method", "=", "method", ",", "lambda2", "=", "lambda2", ",", "mode", "=", "mode", ",", "n_svd_comp", "=", "n_svd_comp", ")", "(", "invmat", ",", "label_singvals", ")", "=", "out", "leadfield", "=", "_pick_leadfield", "(", "forward", "[", "'sol'", "]", "[", "'data'", "]", ",", "forward", ",", "inverse_operator", "[", "'info'", "]", "[", "'ch_names'", "]", ")", "ctfs", "=", "np", ".", "dot", "(", "invmat", ",", "leadfield", ")", "ctfs", "=", "np", ".", "vstack", "(", "(", "ctfs", ",", "ctfs", ".", "sum", "(", "axis", "=", "0", ")", ")", ")", "if", "(", "not", "signed", ")", ":", "ctfs", "=", "np", ".", "abs", "(", "ctfs", ",", "out", "=", "ctfs", ")", "vertno", "=", "[", "ss", "[", "'vertno'", "]", "for", "ss", "in", "inverse_operator", "[", "'src'", "]", "]", "stc_ctf", "=", "SourceEstimate", "(", "ctfs", ".", "T", ",", "vertno", ",", "tmin", "=", "0.0", ",", "tstep", "=", "1.0", ")", "stc_ctf", ".", "subject", "=", "_subject_from_inverse", "(", "inverse_operator", ")", "return", "stc_ctf" ]
compute cross-talk functions for linear estimators .
train
false
38,503
def find_increasing_sequences(worder): items = iter(worder) (a, b) = (None, next(items, None)) result = [b] while (b is not None): (a, b) = (b, next(items, None)) if ((b is not None) and ((a + 1) == b)): result.append(b) else: if (len(result) > 1): (yield tuple(result)) result = [b]
[ "def", "find_increasing_sequences", "(", "worder", ")", ":", "items", "=", "iter", "(", "worder", ")", "(", "a", ",", "b", ")", "=", "(", "None", ",", "next", "(", "items", ",", "None", ")", ")", "result", "=", "[", "b", "]", "while", "(", "b", "is", "not", "None", ")", ":", "(", "a", ",", "b", ")", "=", "(", "b", ",", "next", "(", "items", ",", "None", ")", ")", "if", "(", "(", "b", "is", "not", "None", ")", "and", "(", "(", "a", "+", "1", ")", "==", "b", ")", ")", ":", "result", ".", "append", "(", "b", ")", "else", ":", "if", "(", "len", "(", "result", ")", ">", "1", ")", ":", "(", "yield", "tuple", "(", "result", ")", ")", "result", "=", "[", "b", "]" ]
given the *worder* list .
train
false
38,505
def cnv_dateTime(attribute, arg, element): return str(arg)
[ "def", "cnv_dateTime", "(", "attribute", ",", "arg", ",", "element", ")", ":", "return", "str", "(", "arg", ")" ]
a dateordatetime value is either an [xmlschema-2] date value or an [xmlschema-2] datetime value .
train
false
38,506
def Rademacher(name): return rv(name, RademacherDistribution)
[ "def", "Rademacher", "(", "name", ")", ":", "return", "rv", "(", "name", ",", "RademacherDistribution", ")" ]
create a finite random variable representing a rademacher distribution .
train
false
38,508
def xywh_to_xyxy(boxes): return np.hstack((boxes[:, 0:2], ((boxes[:, 0:2] + boxes[:, 2:4]) - 1)))
[ "def", "xywh_to_xyxy", "(", "boxes", ")", ":", "return", "np", ".", "hstack", "(", "(", "boxes", "[", ":", ",", "0", ":", "2", "]", ",", "(", "(", "boxes", "[", ":", ",", "0", ":", "2", "]", "+", "boxes", "[", ":", ",", "2", ":", "4", "]", ")", "-", "1", ")", ")", ")" ]
convert [x y w h] box format to [x1 y1 x2 y2] format .
train
false
38,510
def places_autocomplete(client, input_text, offset=None, location=None, radius=None, language=None, type=None, components=None): return _autocomplete(client, '', input_text, offset=offset, location=location, radius=radius, language=language, type=type, components=components)
[ "def", "places_autocomplete", "(", "client", ",", "input_text", ",", "offset", "=", "None", ",", "location", "=", "None", ",", "radius", "=", "None", ",", "language", "=", "None", ",", "type", "=", "None", ",", "components", "=", "None", ")", ":", "return", "_autocomplete", "(", "client", ",", "''", ",", "input_text", ",", "offset", "=", "offset", ",", "location", "=", "location", ",", "radius", "=", "radius", ",", "language", "=", "language", ",", "type", "=", "type", ",", "components", "=", "components", ")" ]
returns place predictions given a textual search string and optional geographic bounds .
train
false
38,511
def _mytype(f, x): if (x not in f.free_symbols): return () elif f.is_Function: return (type(f),) else: types = [_mytype(a, x) for a in f.args] res = [] for t in types: res += list(t) res.sort() return tuple(res)
[ "def", "_mytype", "(", "f", ",", "x", ")", ":", "if", "(", "x", "not", "in", "f", ".", "free_symbols", ")", ":", "return", "(", ")", "elif", "f", ".", "is_Function", ":", "return", "(", "type", "(", "f", ")", ",", ")", "else", ":", "types", "=", "[", "_mytype", "(", "a", ",", "x", ")", "for", "a", "in", "f", ".", "args", "]", "res", "=", "[", "]", "for", "t", "in", "types", ":", "res", "+=", "list", "(", "t", ")", "res", ".", "sort", "(", ")", "return", "tuple", "(", "res", ")" ]
create a hashable entity describing the type of f .
train
false
38,512
def system_call(cmd, error_msg): proc = Popen(cmd, shell=True, universal_newlines=True, stdout=PIPE, stderr=PIPE) (stdout, stderr) = proc.communicate() return_value = proc.returncode success = (return_value == 0) if (not success): status(('Unable to %s:' % error_msg)) status(' stdout:') for line in stdout.split('\n'): status((' ' + line)) status(' stderr:') for line in stderr.split('\n'): status((' ' + line)) return success
[ "def", "system_call", "(", "cmd", ",", "error_msg", ")", ":", "proc", "=", "Popen", "(", "cmd", ",", "shell", "=", "True", ",", "universal_newlines", "=", "True", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ")", "(", "stdout", ",", "stderr", ")", "=", "proc", ".", "communicate", "(", ")", "return_value", "=", "proc", ".", "returncode", "success", "=", "(", "return_value", "==", "0", ")", "if", "(", "not", "success", ")", ":", "status", "(", "(", "'Unable to %s:'", "%", "error_msg", ")", ")", "status", "(", "' stdout:'", ")", "for", "line", "in", "stdout", ".", "split", "(", "'\\n'", ")", ":", "status", "(", "(", "' '", "+", "line", ")", ")", "status", "(", "' stderr:'", ")", "for", "line", "in", "stderr", ".", "split", "(", "'\\n'", ")", ":", "status", "(", "(", "' '", "+", "line", ")", ")", "return", "success" ]
call cmd and return whether it was successful or not .
train
false
38,515
def wrapper(func, *args, **kwds): res = None try: stdscr = curses.initscr() curses.noecho() curses.cbreak() stdscr.keypad(1) try: curses.start_color() except: pass return func(stdscr, *args, **kwds) finally: stdscr.keypad(0) curses.echo() curses.nocbreak() curses.endwin()
[ "def", "wrapper", "(", "func", ",", "*", "args", ",", "**", "kwds", ")", ":", "res", "=", "None", "try", ":", "stdscr", "=", "curses", ".", "initscr", "(", ")", "curses", ".", "noecho", "(", ")", "curses", ".", "cbreak", "(", ")", "stdscr", ".", "keypad", "(", "1", ")", "try", ":", "curses", ".", "start_color", "(", ")", "except", ":", "pass", "return", "func", "(", "stdscr", ",", "*", "args", ",", "**", "kwds", ")", "finally", ":", "stdscr", ".", "keypad", "(", "0", ")", "curses", ".", "echo", "(", ")", "curses", ".", "nocbreak", "(", ")", "curses", ".", "endwin", "(", ")" ]
install rvm wrapper scripts ruby_string ruby/gemset to install wrappers for wrapper_prefix what to prepend to the name of the generated wrapper binaries runas the user under which to run rvm .
train
false
38,516
def unregister_class_loader(loader): try: CLASS_LOADERS.remove(loader) except KeyError: raise LookupError('loader not found')
[ "def", "unregister_class_loader", "(", "loader", ")", ":", "try", ":", "CLASS_LOADERS", ".", "remove", "(", "loader", ")", "except", "KeyError", ":", "raise", "LookupError", "(", "'loader not found'", ")" ]
unregisters a class loader .
train
false
38,517
@register.tag def lorem(parser, token): bits = list(token.split_contents()) tagname = bits[0] common = (bits[(-1)] != 'random') if (not common): bits.pop() if (bits[(-1)] in ('w', 'p', 'b')): method = bits.pop() else: method = 'b' if (len(bits) > 1): count = bits.pop() else: count = '1' count = parser.compile_filter(count) if (len(bits) != 1): raise template.TemplateSyntaxError(('Incorrect format for %r tag' % tagname)) return LoremNode(count, method, common)
[ "@", "register", ".", "tag", "def", "lorem", "(", "parser", ",", "token", ")", ":", "bits", "=", "list", "(", "token", ".", "split_contents", "(", ")", ")", "tagname", "=", "bits", "[", "0", "]", "common", "=", "(", "bits", "[", "(", "-", "1", ")", "]", "!=", "'random'", ")", "if", "(", "not", "common", ")", ":", "bits", ".", "pop", "(", ")", "if", "(", "bits", "[", "(", "-", "1", ")", "]", "in", "(", "'w'", ",", "'p'", ",", "'b'", ")", ")", ":", "method", "=", "bits", ".", "pop", "(", ")", "else", ":", "method", "=", "'b'", "if", "(", "len", "(", "bits", ")", ">", "1", ")", ":", "count", "=", "bits", ".", "pop", "(", ")", "else", ":", "count", "=", "'1'", "count", "=", "parser", ".", "compile_filter", "(", "count", ")", "if", "(", "len", "(", "bits", ")", "!=", "1", ")", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "(", "'Incorrect format for %r tag'", "%", "tagname", ")", ")", "return", "LoremNode", "(", "count", ",", "method", ",", "common", ")" ]
creates random latin text useful for providing test data in templates .
train
false
38,518
def timezone(zone): if (zone.upper() == 'UTC'): return utc try: zone = zone.encode('US-ASCII') except UnicodeEncodeError: raise UnknownTimeZoneError(zone) zone = _unmunge_zone(zone) if (zone not in _tzinfo_cache): if (zone in all_timezones_set): _tzinfo_cache[zone] = build_tzinfo(zone, open_resource(zone)) else: raise UnknownTimeZoneError(zone) return _tzinfo_cache[zone]
[ "def", "timezone", "(", "zone", ")", ":", "if", "(", "zone", ".", "upper", "(", ")", "==", "'UTC'", ")", ":", "return", "utc", "try", ":", "zone", "=", "zone", ".", "encode", "(", "'US-ASCII'", ")", "except", "UnicodeEncodeError", ":", "raise", "UnknownTimeZoneError", "(", "zone", ")", "zone", "=", "_unmunge_zone", "(", "zone", ")", "if", "(", "zone", "not", "in", "_tzinfo_cache", ")", ":", "if", "(", "zone", "in", "all_timezones_set", ")", ":", "_tzinfo_cache", "[", "zone", "]", "=", "build_tzinfo", "(", "zone", ",", "open_resource", "(", "zone", ")", ")", "else", ":", "raise", "UnknownTimeZoneError", "(", "zone", ")", "return", "_tzinfo_cache", "[", "zone", "]" ]
return a datetime .
train
true
38,519
def is_same_graph_with_merge(var1, var2, givens=None): if (givens is None): givens = {} copied = copy.deepcopy([var1, var2, givens]) vars = copied[0:2] givens = copied[2] inputs = theano.gof.graph.inputs(vars) fgraph = theano.gof.fg.FunctionGraph(inputs, vars, clone=False) for (to_replace, replace_by) in iteritems(givens): fgraph.replace(to_replace, replace_by) MergeOptimizer().optimize(fgraph) vars_replaced = [givens.get(v, v) for v in vars] (o1, o2) = [v.owner for v in vars_replaced] if ((o1 is None) and (o2 is None)): return (vars_replaced[0] == vars_replaced[1]) else: return (o1 is o2)
[ "def", "is_same_graph_with_merge", "(", "var1", ",", "var2", ",", "givens", "=", "None", ")", ":", "if", "(", "givens", "is", "None", ")", ":", "givens", "=", "{", "}", "copied", "=", "copy", ".", "deepcopy", "(", "[", "var1", ",", "var2", ",", "givens", "]", ")", "vars", "=", "copied", "[", "0", ":", "2", "]", "givens", "=", "copied", "[", "2", "]", "inputs", "=", "theano", ".", "gof", ".", "graph", ".", "inputs", "(", "vars", ")", "fgraph", "=", "theano", ".", "gof", ".", "fg", ".", "FunctionGraph", "(", "inputs", ",", "vars", ",", "clone", "=", "False", ")", "for", "(", "to_replace", ",", "replace_by", ")", "in", "iteritems", "(", "givens", ")", ":", "fgraph", ".", "replace", "(", "to_replace", ",", "replace_by", ")", "MergeOptimizer", "(", ")", ".", "optimize", "(", "fgraph", ")", "vars_replaced", "=", "[", "givens", ".", "get", "(", "v", ",", "v", ")", "for", "v", "in", "vars", "]", "(", "o1", ",", "o2", ")", "=", "[", "v", ".", "owner", "for", "v", "in", "vars_replaced", "]", "if", "(", "(", "o1", "is", "None", ")", "and", "(", "o2", "is", "None", ")", ")", ":", "return", "(", "vars_replaced", "[", "0", "]", "==", "vars_replaced", "[", "1", "]", ")", "else", ":", "return", "(", "o1", "is", "o2", ")" ]
merge-based implementation of theano .
train
false
38,522
def select_hash(cache, selector): items = cache.id_map[ascii_lower(selector.id)] if (len(items) > 0): for elem in cache.iterparsedselector(selector.selector): if (elem in items): (yield elem)
[ "def", "select_hash", "(", "cache", ",", "selector", ")", ":", "items", "=", "cache", ".", "id_map", "[", "ascii_lower", "(", "selector", ".", "id", ")", "]", "if", "(", "len", "(", "items", ")", ">", "0", ")", ":", "for", "elem", "in", "cache", ".", "iterparsedselector", "(", "selector", ".", "selector", ")", ":", "if", "(", "elem", "in", "items", ")", ":", "(", "yield", "elem", ")" ]
an id selector .
train
false
38,523
def get_user_id_from_email(email): class _FakeUser(ndb.Model, ): _use_memcache = False _use_cache = False user = ndb.UserProperty(required=True) try: fake_user = users.User(email) except users.UserNotFoundError: logging.error(('The email address %s does not correspond to a valid user_id' % email)) return None key = _FakeUser(id=email, user=fake_user).put() obj = _FakeUser.get_by_id(key.id()) user_id = obj.user.user_id() if user_id: return unicode(user_id) else: return None
[ "def", "get_user_id_from_email", "(", "email", ")", ":", "class", "_FakeUser", "(", "ndb", ".", "Model", ",", ")", ":", "_use_memcache", "=", "False", "_use_cache", "=", "False", "user", "=", "ndb", ".", "UserProperty", "(", "required", "=", "True", ")", "try", ":", "fake_user", "=", "users", ".", "User", "(", "email", ")", "except", "users", ".", "UserNotFoundError", ":", "logging", ".", "error", "(", "(", "'The email address %s does not correspond to a valid user_id'", "%", "email", ")", ")", "return", "None", "key", "=", "_FakeUser", "(", "id", "=", "email", ",", "user", "=", "fake_user", ")", ".", "put", "(", ")", "obj", "=", "_FakeUser", ".", "get_by_id", "(", "key", ".", "id", "(", ")", ")", "user_id", "=", "obj", ".", "user", ".", "user_id", "(", ")", "if", "user_id", ":", "return", "unicode", "(", "user_id", ")", "else", ":", "return", "None" ]
given an email address .
train
false
38,524
def prepare_instrumentation(factory): if (factory in __canned_instrumentation): factory = __canned_instrumentation[factory] cls = type(factory()) if (cls in __canned_instrumentation): factory = __converting_factory(cls, factory) cls = factory() if __instrumentation_mutex.acquire(): try: if (getattr(cls, '_sa_instrumented', None) != id(cls)): _instrument_class(cls) finally: __instrumentation_mutex.release() return factory
[ "def", "prepare_instrumentation", "(", "factory", ")", ":", "if", "(", "factory", "in", "__canned_instrumentation", ")", ":", "factory", "=", "__canned_instrumentation", "[", "factory", "]", "cls", "=", "type", "(", "factory", "(", ")", ")", "if", "(", "cls", "in", "__canned_instrumentation", ")", ":", "factory", "=", "__converting_factory", "(", "cls", ",", "factory", ")", "cls", "=", "factory", "(", ")", "if", "__instrumentation_mutex", ".", "acquire", "(", ")", ":", "try", ":", "if", "(", "getattr", "(", "cls", ",", "'_sa_instrumented'", ",", "None", ")", "!=", "id", "(", "cls", ")", ")", ":", "_instrument_class", "(", "cls", ")", "finally", ":", "__instrumentation_mutex", ".", "release", "(", ")", "return", "factory" ]
prepare a callable for future use as a collection class factory .
train
false
38,525
def generateSimpleCoincMatrix(nCoinc=10, length=500, activity=50): assert ((nCoinc * activity) <= length), "can't generate non-overlapping coincidences" coincMatrix = SM32(0, length) coinc = numpy.zeros(length, dtype='int32') for i in xrange(nCoinc): coinc[:] = 0 coinc[(i * activity):((i + 1) * activity)] = 1 coincMatrix.addRow(coinc) return coincMatrix
[ "def", "generateSimpleCoincMatrix", "(", "nCoinc", "=", "10", ",", "length", "=", "500", ",", "activity", "=", "50", ")", ":", "assert", "(", "(", "nCoinc", "*", "activity", ")", "<=", "length", ")", ",", "\"can't generate non-overlapping coincidences\"", "coincMatrix", "=", "SM32", "(", "0", ",", "length", ")", "coinc", "=", "numpy", ".", "zeros", "(", "length", ",", "dtype", "=", "'int32'", ")", "for", "i", "in", "xrange", "(", "nCoinc", ")", ":", "coinc", "[", ":", "]", "=", "0", "coinc", "[", "(", "i", "*", "activity", ")", ":", "(", "(", "i", "+", "1", ")", "*", "activity", ")", "]", "=", "1", "coincMatrix", ".", "addRow", "(", "coinc", ")", "return", "coincMatrix" ]
generate a non overlapping coincidence matrix .
train
true
38,526
def _filterRecord(filterList, record): for (fieldIdx, fp, params) in filterList: x = dict() x['value'] = record[fieldIdx] x['acceptValues'] = params['acceptValues'] x['min'] = params['min'] x['max'] = params['max'] if (not fp(x)): return False return True
[ "def", "_filterRecord", "(", "filterList", ",", "record", ")", ":", "for", "(", "fieldIdx", ",", "fp", ",", "params", ")", "in", "filterList", ":", "x", "=", "dict", "(", ")", "x", "[", "'value'", "]", "=", "record", "[", "fieldIdx", "]", "x", "[", "'acceptValues'", "]", "=", "params", "[", "'acceptValues'", "]", "x", "[", "'min'", "]", "=", "params", "[", "'min'", "]", "x", "[", "'max'", "]", "=", "params", "[", "'max'", "]", "if", "(", "not", "fp", "(", "x", ")", ")", ":", "return", "False", "return", "True" ]
takes a record and returns true if record meets filter criteria .
train
true
38,527
def sockfam_to_enum(num): if (enum is None): return num else: try: return socket.AddressFamily(num) except (ValueError, AttributeError): return num
[ "def", "sockfam_to_enum", "(", "num", ")", ":", "if", "(", "enum", "is", "None", ")", ":", "return", "num", "else", ":", "try", ":", "return", "socket", ".", "AddressFamily", "(", "num", ")", "except", "(", "ValueError", ",", "AttributeError", ")", ":", "return", "num" ]
convert a numeric socket family value to an intenum member .
train
false
38,529
def test_needs_eeg_average_ref_proj(): raw = read_raw_fif(raw_fname) assert_true(_needs_eeg_average_ref_proj(raw.info)) raw.set_eeg_reference() assert_true((not _needs_eeg_average_ref_proj(raw.info))) raw = read_raw_fif(raw_fname, preload=True) eeg = [raw.ch_names[c] for c in pick_types(raw.info, meg=False, eeg=True)] raw.drop_channels(eeg) assert_true((not _needs_eeg_average_ref_proj(raw.info))) raw = read_raw_fif(raw_fname) raw.info['custom_ref_applied'] = True assert_true((not _needs_eeg_average_ref_proj(raw.info)))
[ "def", "test_needs_eeg_average_ref_proj", "(", ")", ":", "raw", "=", "read_raw_fif", "(", "raw_fname", ")", "assert_true", "(", "_needs_eeg_average_ref_proj", "(", "raw", ".", "info", ")", ")", "raw", ".", "set_eeg_reference", "(", ")", "assert_true", "(", "(", "not", "_needs_eeg_average_ref_proj", "(", "raw", ".", "info", ")", ")", ")", "raw", "=", "read_raw_fif", "(", "raw_fname", ",", "preload", "=", "True", ")", "eeg", "=", "[", "raw", ".", "ch_names", "[", "c", "]", "for", "c", "in", "pick_types", "(", "raw", ".", "info", ",", "meg", "=", "False", ",", "eeg", "=", "True", ")", "]", "raw", ".", "drop_channels", "(", "eeg", ")", "assert_true", "(", "(", "not", "_needs_eeg_average_ref_proj", "(", "raw", ".", "info", ")", ")", ")", "raw", "=", "read_raw_fif", "(", "raw_fname", ")", "raw", ".", "info", "[", "'custom_ref_applied'", "]", "=", "True", "assert_true", "(", "(", "not", "_needs_eeg_average_ref_proj", "(", "raw", ".", "info", ")", ")", ")" ]
test checking whether a recording needs an eeg average reference .
train
false
38,530
def bracket_parse(s): raise NameError(u'Use Tree.read(s, remove_empty_top_bracketing=True) instead.')
[ "def", "bracket_parse", "(", "s", ")", ":", "raise", "NameError", "(", "u'Use Tree.read(s, remove_empty_top_bracketing=True) instead.'", ")" ]
use tree .
train
false
38,531
def reload_config(): _env_reloader.update()
[ "def", "reload_config", "(", ")", ":", "_env_reloader", ".", "update", "(", ")" ]
reload config .
train
false
38,532
def _setDBMS(): if (not conf.dbms): return debugMsg = 'forcing back-end DBMS to user defined value' logger.debug(debugMsg) conf.dbms = conf.dbms.lower() regex = re.search(('%s ([\\d\\.]+)' % ('(%s)' % '|'.join([alias for alias in SUPPORTED_DBMS]))), conf.dbms, re.I) if regex: conf.dbms = regex.group(1) Backend.setVersion(regex.group(2)) if (conf.dbms not in SUPPORTED_DBMS): errMsg = 'you provided an unsupported back-end database management ' errMsg += ('system. Supported DBMSes are as follows: %s. ' % ', '.join(sorted((_ for _ in DBMS_DICT)))) errMsg += 'If you do not know the back-end DBMS, do not provide ' errMsg += 'it and sqlmap will fingerprint it for you.' raise SqlmapUnsupportedDBMSException(errMsg) for (dbms, aliases) in DBMS_ALIASES: if (conf.dbms in aliases): conf.dbms = dbms break
[ "def", "_setDBMS", "(", ")", ":", "if", "(", "not", "conf", ".", "dbms", ")", ":", "return", "debugMsg", "=", "'forcing back-end DBMS to user defined value'", "logger", ".", "debug", "(", "debugMsg", ")", "conf", ".", "dbms", "=", "conf", ".", "dbms", ".", "lower", "(", ")", "regex", "=", "re", ".", "search", "(", "(", "'%s ([\\\\d\\\\.]+)'", "%", "(", "'(%s)'", "%", "'|'", ".", "join", "(", "[", "alias", "for", "alias", "in", "SUPPORTED_DBMS", "]", ")", ")", ")", ",", "conf", ".", "dbms", ",", "re", ".", "I", ")", "if", "regex", ":", "conf", ".", "dbms", "=", "regex", ".", "group", "(", "1", ")", "Backend", ".", "setVersion", "(", "regex", ".", "group", "(", "2", ")", ")", "if", "(", "conf", ".", "dbms", "not", "in", "SUPPORTED_DBMS", ")", ":", "errMsg", "=", "'you provided an unsupported back-end database management '", "errMsg", "+=", "(", "'system. Supported DBMSes are as follows: %s. '", "%", "', '", ".", "join", "(", "sorted", "(", "(", "_", "for", "_", "in", "DBMS_DICT", ")", ")", ")", ")", "errMsg", "+=", "'If you do not know the back-end DBMS, do not provide '", "errMsg", "+=", "'it and sqlmap will fingerprint it for you.'", "raise", "SqlmapUnsupportedDBMSException", "(", "errMsg", ")", "for", "(", "dbms", ",", "aliases", ")", "in", "DBMS_ALIASES", ":", "if", "(", "conf", ".", "dbms", "in", "aliases", ")", ":", "conf", ".", "dbms", "=", "dbms", "break" ]
force the back-end dbms option .
train
false
38,533
def _numpy_matrix_to_zero(e): if (not np): raise ImportError test = np.zeros_like(e) if np.allclose(e, test): return 0.0 else: return e
[ "def", "_numpy_matrix_to_zero", "(", "e", ")", ":", "if", "(", "not", "np", ")", ":", "raise", "ImportError", "test", "=", "np", ".", "zeros_like", "(", "e", ")", "if", "np", ".", "allclose", "(", "e", ",", "test", ")", ":", "return", "0.0", "else", ":", "return", "e" ]
convert a numpy zero matrix to the zero scalar .
train
false
38,535
def get_obj_spec(client_factory, obj, select_set=None): obj_spec = client_factory.create('ns0:ObjectSpec') obj_spec.obj = obj obj_spec.skip = False if (select_set is not None): obj_spec.selectSet = select_set return obj_spec
[ "def", "get_obj_spec", "(", "client_factory", ",", "obj", ",", "select_set", "=", "None", ")", ":", "obj_spec", "=", "client_factory", ".", "create", "(", "'ns0:ObjectSpec'", ")", "obj_spec", ".", "obj", "=", "obj", "obj_spec", ".", "skip", "=", "False", "if", "(", "select_set", "is", "not", "None", ")", ":", "obj_spec", ".", "selectSet", "=", "select_set", "return", "obj_spec" ]
builds the object spec object .
train
false
38,536
def target_release(version): if (not is_pre_release(version)): raise NotAPreRelease(version) parsed_version = parse_version(version) return parsed_version.release
[ "def", "target_release", "(", "version", ")", ":", "if", "(", "not", "is_pre_release", "(", "version", ")", ")", ":", "raise", "NotAPreRelease", "(", "version", ")", "parsed_version", "=", "parse_version", "(", "version", ")", "return", "parsed_version", ".", "release" ]
return the target final release for a pre-release .
train
false
38,537
def _document_form_initial(document): return {'title': document.title, 'slug': document.slug, 'category': document.category, 'is_localizable': document.is_localizable, 'is_archived': document.is_archived, 'topics': Topic.objects.filter(document=document).values_list('id', flat=True), 'products': Product.objects.filter(document=document).values_list('id', flat=True), 'related_documents': Document.objects.filter(related_documents=document).values_list('id', flat=True), 'allow_discussion': document.allow_discussion, 'needs_change': document.needs_change, 'needs_change_comment': document.needs_change_comment}
[ "def", "_document_form_initial", "(", "document", ")", ":", "return", "{", "'title'", ":", "document", ".", "title", ",", "'slug'", ":", "document", ".", "slug", ",", "'category'", ":", "document", ".", "category", ",", "'is_localizable'", ":", "document", ".", "is_localizable", ",", "'is_archived'", ":", "document", ".", "is_archived", ",", "'topics'", ":", "Topic", ".", "objects", ".", "filter", "(", "document", "=", "document", ")", ".", "values_list", "(", "'id'", ",", "flat", "=", "True", ")", ",", "'products'", ":", "Product", ".", "objects", ".", "filter", "(", "document", "=", "document", ")", ".", "values_list", "(", "'id'", ",", "flat", "=", "True", ")", ",", "'related_documents'", ":", "Document", ".", "objects", ".", "filter", "(", "related_documents", "=", "document", ")", ".", "values_list", "(", "'id'", ",", "flat", "=", "True", ")", ",", "'allow_discussion'", ":", "document", ".", "allow_discussion", ",", "'needs_change'", ":", "document", ".", "needs_change", ",", "'needs_change_comment'", ":", "document", ".", "needs_change_comment", "}" ]
return a dict with the document data pertinent for the form .
train
false
38,538
def force_list(value, min=None, max=None): if (not isinstance(value, (list, tuple))): value = [value] return is_list(value, min, max)
[ "def", "force_list", "(", "value", ",", "min", "=", "None", ",", "max", "=", "None", ")", ":", "if", "(", "not", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ")", ")", ")", ":", "value", "=", "[", "value", "]", "return", "is_list", "(", "value", ",", "min", ",", "max", ")" ]
check that a value is a list .
train
true
38,539
def get_model_matrix(array_type=c_float, glGetMethod=glGetFloatv): m = (array_type * 16)() glGetMethod(GL_MODELVIEW_MATRIX, m) return m
[ "def", "get_model_matrix", "(", "array_type", "=", "c_float", ",", "glGetMethod", "=", "glGetFloatv", ")", ":", "m", "=", "(", "array_type", "*", "16", ")", "(", ")", "glGetMethod", "(", "GL_MODELVIEW_MATRIX", ",", "m", ")", "return", "m" ]
returns the current modelview matrix .
train
false
38,540
def getGridHorizontalFrame(gridPosition): gridHorizontal = settings.GridHorizontal(0, 0) gridHorizontal.master = settings.Tkinter.Frame(gridPosition.master, borderwidth=1, padx=3, relief='raised') gridHorizontal.master.grid(row=gridPosition.row, column=gridPosition.column, sticky=settings.Tkinter.E) return gridHorizontal
[ "def", "getGridHorizontalFrame", "(", "gridPosition", ")", ":", "gridHorizontal", "=", "settings", ".", "GridHorizontal", "(", "0", ",", "0", ")", "gridHorizontal", ".", "master", "=", "settings", ".", "Tkinter", ".", "Frame", "(", "gridPosition", ".", "master", ",", "borderwidth", "=", "1", ",", "padx", "=", "3", ",", "relief", "=", "'raised'", ")", "gridHorizontal", ".", "master", ".", "grid", "(", "row", "=", "gridPosition", ".", "row", ",", "column", "=", "gridPosition", ".", "column", ",", "sticky", "=", "settings", ".", "Tkinter", ".", "E", ")", "return", "gridHorizontal" ]
get the grid horizontal object with a frame from the grid position .
train
false
38,541
def read_csr(csr): csr = _get_request_obj(csr) ret = {'Version': (csr.get_version() + 1), 'Subject': _parse_subject(csr.get_subject()), 'Subject Hash': _dec2hex(csr.get_subject().as_hash()), 'Public Key Hash': hashlib.sha1(csr.get_pubkey().get_modulus()).hexdigest()} ret['X509v3 Extensions'] = _get_csr_extensions(csr) return ret
[ "def", "read_csr", "(", "csr", ")", ":", "csr", "=", "_get_request_obj", "(", "csr", ")", "ret", "=", "{", "'Version'", ":", "(", "csr", ".", "get_version", "(", ")", "+", "1", ")", ",", "'Subject'", ":", "_parse_subject", "(", "csr", ".", "get_subject", "(", ")", ")", ",", "'Subject Hash'", ":", "_dec2hex", "(", "csr", ".", "get_subject", "(", ")", ".", "as_hash", "(", ")", ")", ",", "'Public Key Hash'", ":", "hashlib", ".", "sha1", "(", "csr", ".", "get_pubkey", "(", ")", ".", "get_modulus", "(", ")", ")", ".", "hexdigest", "(", ")", "}", "ret", "[", "'X509v3 Extensions'", "]", "=", "_get_csr_extensions", "(", "csr", ")", "return", "ret" ]
returns a dict containing details of a certificate request .
train
true
38,542
@register.inclusion_tag('inclusion.html') def inclusion_only_unlimited_args(*args): return {'result': ('inclusion_only_unlimited_args - Expected result: %s' % ', '.join([unicode(arg) for arg in args]))}
[ "@", "register", ".", "inclusion_tag", "(", "'inclusion.html'", ")", "def", "inclusion_only_unlimited_args", "(", "*", "args", ")", ":", "return", "{", "'result'", ":", "(", "'inclusion_only_unlimited_args - Expected result: %s'", "%", "', '", ".", "join", "(", "[", "unicode", "(", "arg", ")", "for", "arg", "in", "args", "]", ")", ")", "}" ]
expected inclusion_only_unlimited_args __doc__ .
train
false
38,543
def generate_array(initializer, shape, xp): dtype = numpy.float32 if (hasattr(initializer, 'dtype') and (initializer.dtype is not None)): dtype = initializer.dtype array = xp.empty(shape, dtype=dtype) initializer(array) return array
[ "def", "generate_array", "(", "initializer", ",", "shape", ",", "xp", ")", ":", "dtype", "=", "numpy", ".", "float32", "if", "(", "hasattr", "(", "initializer", ",", "'dtype'", ")", "and", "(", "initializer", ".", "dtype", "is", "not", "None", ")", ")", ":", "dtype", "=", "initializer", ".", "dtype", "array", "=", "xp", ".", "empty", "(", "shape", ",", "dtype", "=", "dtype", ")", "initializer", "(", "array", ")", "return", "array" ]
return initialized array .
train
false
38,545
def test_repository_url_should_clone(mocker, template_url, user_config_data): mock_clone = mocker.patch('cookiecutter.repository.clone', return_value='tests/fake-repo-tmpl', autospec=True) project_dir = repository.determine_repo_dir(template_url, abbreviations={}, clone_to_dir=user_config_data['cookiecutters_dir'], checkout=None, no_input=True) mock_clone.assert_called_once_with(repo_url=template_url, checkout=None, clone_to_dir=user_config_data['cookiecutters_dir'], no_input=True) assert os.path.isdir(project_dir) assert ('tests/fake-repo-tmpl' == project_dir)
[ "def", "test_repository_url_should_clone", "(", "mocker", ",", "template_url", ",", "user_config_data", ")", ":", "mock_clone", "=", "mocker", ".", "patch", "(", "'cookiecutter.repository.clone'", ",", "return_value", "=", "'tests/fake-repo-tmpl'", ",", "autospec", "=", "True", ")", "project_dir", "=", "repository", ".", "determine_repo_dir", "(", "template_url", ",", "abbreviations", "=", "{", "}", ",", "clone_to_dir", "=", "user_config_data", "[", "'cookiecutters_dir'", "]", ",", "checkout", "=", "None", ",", "no_input", "=", "True", ")", "mock_clone", ".", "assert_called_once_with", "(", "repo_url", "=", "template_url", ",", "checkout", "=", "None", ",", "clone_to_dir", "=", "user_config_data", "[", "'cookiecutters_dir'", "]", ",", "no_input", "=", "True", ")", "assert", "os", ".", "path", ".", "isdir", "(", "project_dir", ")", "assert", "(", "'tests/fake-repo-tmpl'", "==", "project_dir", ")" ]
clone() should be called with correct args when determine_repo_dir() is passed a repository template url .
train
false
38,546
def getLoopInsideContainingLoop(containingLoop, loops): for loop in loops: if (loop != containingLoop): if isPathInsideLoop(containingLoop, loop): return loop return None
[ "def", "getLoopInsideContainingLoop", "(", "containingLoop", ",", "loops", ")", ":", "for", "loop", "in", "loops", ":", "if", "(", "loop", "!=", "containingLoop", ")", ":", "if", "isPathInsideLoop", "(", "containingLoop", ",", "loop", ")", ":", "return", "loop", "return", "None" ]
get a loop that is inside the containing loop .
train
false
38,548
def reshapelist(shape, seq): if (len(shape) == 1): return list(seq) else: n = int((len(seq) / shape[0])) return [reshapelist(shape[1:], part) for part in partition(n, seq)]
[ "def", "reshapelist", "(", "shape", ",", "seq", ")", ":", "if", "(", "len", "(", "shape", ")", "==", "1", ")", ":", "return", "list", "(", "seq", ")", "else", ":", "n", "=", "int", "(", "(", "len", "(", "seq", ")", "/", "shape", "[", "0", "]", ")", ")", "return", "[", "reshapelist", "(", "shape", "[", "1", ":", "]", ",", "part", ")", "for", "part", "in", "partition", "(", "n", ",", "seq", ")", "]" ]
reshape iterator to nested shape .
train
false
38,549
def dctDecode(stream, parameters): decodedStream = '' return ((-1), 'DctDecode not supported yet')
[ "def", "dctDecode", "(", "stream", ",", "parameters", ")", ":", "decodedStream", "=", "''", "return", "(", "(", "-", "1", ")", ",", "'DctDecode not supported yet'", ")" ]
method to decode streams using a dct technique based on the jpeg standard .
train
false
38,550
def disease(): return s3_rest_controller(rheader=s3db.disease_rheader)
[ "def", "disease", "(", ")", ":", "return", "s3_rest_controller", "(", "rheader", "=", "s3db", ".", "disease_rheader", ")" ]
disease information controller .
train
false
38,551
def TearDownStubs(): pass
[ "def", "TearDownStubs", "(", ")", ":", "pass" ]
clean up any stubs that need cleanup .
train
false
38,554
def copymod(dct, without=None, **kwargs): if (without is None): without = [] rval = copy(dct) for a in without: if (a in rval): del rval[a] for (kw, val) in iteritems(kwargs): rval[kw] = val return rval
[ "def", "copymod", "(", "dct", ",", "without", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "without", "is", "None", ")", ":", "without", "=", "[", "]", "rval", "=", "copy", "(", "dct", ")", "for", "a", "in", "without", ":", "if", "(", "a", "in", "rval", ")", ":", "del", "rval", "[", "a", "]", "for", "(", "kw", ",", "val", ")", "in", "iteritems", "(", "kwargs", ")", ":", "rval", "[", "kw", "]", "=", "val", "return", "rval" ]
return dct but with the keys named by args removed .
train
false
38,555
def ascii_art(text): fi = figlet_format(text, font='doom') print '\n'.join([next(dg['cyc'])(i) for i in fi.split('\n')])
[ "def", "ascii_art", "(", "text", ")", ":", "fi", "=", "figlet_format", "(", "text", ",", "font", "=", "'doom'", ")", "print", "'\\n'", ".", "join", "(", "[", "next", "(", "dg", "[", "'cyc'", "]", ")", "(", "i", ")", "for", "i", "in", "fi", ".", "split", "(", "'\\n'", ")", "]", ")" ]
draw the ascii art .
train
false
38,556
def _determine_scheduled_actions(scheduled_actions, scheduled_actions_from_pillar): tmp = copy.deepcopy(__salt__['config.option'](scheduled_actions_from_pillar, {})) if scheduled_actions: tmp = dictupdate.update(tmp, scheduled_actions) return tmp
[ "def", "_determine_scheduled_actions", "(", "scheduled_actions", ",", "scheduled_actions_from_pillar", ")", ":", "tmp", "=", "copy", ".", "deepcopy", "(", "__salt__", "[", "'config.option'", "]", "(", "scheduled_actions_from_pillar", ",", "{", "}", ")", ")", "if", "scheduled_actions", ":", "tmp", "=", "dictupdate", ".", "update", "(", "tmp", ",", "scheduled_actions", ")", "return", "tmp" ]
helper method for present .
train
true
38,558
def select_and_appoint_device(song, device_to_select, ignore_unmapped_macros=True): appointed_device = device_to_select if ignore_unmapped_macros: appointed_device = device_to_appoint(device_to_select) song.view.select_device(device_to_select, False) song.appointed_device = appointed_device
[ "def", "select_and_appoint_device", "(", "song", ",", "device_to_select", ",", "ignore_unmapped_macros", "=", "True", ")", ":", "appointed_device", "=", "device_to_select", "if", "ignore_unmapped_macros", ":", "appointed_device", "=", "device_to_appoint", "(", "device_to_select", ")", "song", ".", "view", ".", "select_device", "(", "device_to_select", ",", "False", ")", "song", ".", "appointed_device", "=", "appointed_device" ]
convenience function for selecting a device for a control surface to control .
train
false
38,559
@register.tag('localtime') def localtime_tag(parser, token): bits = token.split_contents() if (len(bits) == 1): use_tz = True elif ((len(bits) > 2) or (bits[1] not in ('on', 'off'))): raise TemplateSyntaxError(("%r argument should be 'on' or 'off'" % bits[0])) else: use_tz = (bits[1] == 'on') nodelist = parser.parse(('endlocaltime',)) parser.delete_first_token() return LocalTimeNode(nodelist, use_tz)
[ "@", "register", ".", "tag", "(", "'localtime'", ")", "def", "localtime_tag", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "(", "len", "(", "bits", ")", "==", "1", ")", ":", "use_tz", "=", "True", "elif", "(", "(", "len", "(", "bits", ")", ">", "2", ")", "or", "(", "bits", "[", "1", "]", "not", "in", "(", "'on'", ",", "'off'", ")", ")", ")", ":", "raise", "TemplateSyntaxError", "(", "(", "\"%r argument should be 'on' or 'off'\"", "%", "bits", "[", "0", "]", ")", ")", "else", ":", "use_tz", "=", "(", "bits", "[", "1", "]", "==", "'on'", ")", "nodelist", "=", "parser", ".", "parse", "(", "(", "'endlocaltime'", ",", ")", ")", "parser", ".", "delete_first_token", "(", ")", "return", "LocalTimeNode", "(", "nodelist", ",", "use_tz", ")" ]
forces or prevents conversion of datetime objects to local time .
train
false
38,560
def boxplot(iterable, **kwargs): kwargs.pop('p', None) kwargs.pop('sort', None) s = sorted(iterable) Q1 = quantile(s, p=0.25, sort=False, **kwargs) Q2 = quantile(s, p=0.5, sort=False, **kwargs) Q3 = quantile(s, p=0.75, sort=False, **kwargs) return (float(min(s)), Q1, Q2, Q3, float(max(s)))
[ "def", "boxplot", "(", "iterable", ",", "**", "kwargs", ")", ":", "kwargs", ".", "pop", "(", "'p'", ",", "None", ")", "kwargs", ".", "pop", "(", "'sort'", ",", "None", ")", "s", "=", "sorted", "(", "iterable", ")", "Q1", "=", "quantile", "(", "s", ",", "p", "=", "0.25", ",", "sort", "=", "False", ",", "**", "kwargs", ")", "Q2", "=", "quantile", "(", "s", ",", "p", "=", "0.5", ",", "sort", "=", "False", ",", "**", "kwargs", ")", "Q3", "=", "quantile", "(", "s", ",", "p", "=", "0.75", ",", "sort", "=", "False", ",", "**", "kwargs", ")", "return", "(", "float", "(", "min", "(", "s", ")", ")", ",", "Q1", ",", "Q2", ",", "Q3", ",", "float", "(", "max", "(", "s", ")", ")", ")" ]
returns a tuple (min .
train
false
38,562
def _checkSabResponse(jdata): if (u'error' in jdata): logger.log(jdata[u'error'], logger.ERROR) return (False, jdata[u'error']) else: return (True, jdata)
[ "def", "_checkSabResponse", "(", "jdata", ")", ":", "if", "(", "u'error'", "in", "jdata", ")", ":", "logger", ".", "log", "(", "jdata", "[", "u'error'", "]", ",", "logger", ".", "ERROR", ")", "return", "(", "False", ",", "jdata", "[", "u'error'", "]", ")", "else", ":", "return", "(", "True", ",", "jdata", ")" ]
check response from sab .
train
false
38,565
@profiler.trace def flavor_extra_set(request, flavor_id, metadata): flavor = novaclient(request).flavors.get(flavor_id) if (not metadata): return None return flavor.set_keys(metadata)
[ "@", "profiler", ".", "trace", "def", "flavor_extra_set", "(", "request", ",", "flavor_id", ",", "metadata", ")", ":", "flavor", "=", "novaclient", "(", "request", ")", ".", "flavors", ".", "get", "(", "flavor_id", ")", "if", "(", "not", "metadata", ")", ":", "return", "None", "return", "flavor", ".", "set_keys", "(", "metadata", ")" ]
set the flavor extra spec keys .
train
false
38,566
def cfg_fast_edges_check(arch, binary_path, edges): path = os.path.join(test_location, arch, binary_path) proj = angr.Project(path, load_options={'auto_load_libs': False}) cfg = proj.analyses.CFGFast() for (src, dst) in edges: src_node = cfg.get_any_node(src) dst_node = cfg.get_any_node(dst) nose.tools.assert_in(dst_node, src_node.successors)
[ "def", "cfg_fast_edges_check", "(", "arch", ",", "binary_path", ",", "edges", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "test_location", ",", "arch", ",", "binary_path", ")", "proj", "=", "angr", ".", "Project", "(", "path", ",", "load_options", "=", "{", "'auto_load_libs'", ":", "False", "}", ")", "cfg", "=", "proj", ".", "analyses", ".", "CFGFast", "(", ")", "for", "(", "src", ",", "dst", ")", "in", "edges", ":", "src_node", "=", "cfg", ".", "get_any_node", "(", "src", ")", "dst_node", "=", "cfg", ".", "get_any_node", "(", "dst", ")", "nose", ".", "tools", ".", "assert_in", "(", "dst_node", ",", "src_node", ".", "successors", ")" ]
generate a fast cfg on the given binary .
train
false
38,568
def get_cmd(some_file, notebook_options=''): if some_file.endswith('.py'): command = 'python' elif some_file.endswith('.ipynb'): command = ('ipython notebook %s' % notebook_options) return command
[ "def", "get_cmd", "(", "some_file", ",", "notebook_options", "=", "''", ")", ":", "if", "some_file", ".", "endswith", "(", "'.py'", ")", ":", "command", "=", "'python'", "elif", "some_file", ".", "endswith", "(", "'.ipynb'", ")", ":", "command", "=", "(", "'ipython notebook %s'", "%", "notebook_options", ")", "return", "command" ]
determines how to open a file depending on whether it is a .
train
false
38,571
def p_expression_1(t): pass
[ "def", "p_expression_1", "(", "t", ")", ":", "pass" ]
expression : assignment_expression .
train
false
38,572
def _init_mutable_colormap(): greys = color_palette('Greys', 256) cmap = LinearSegmentedColormap.from_list('interactive', greys) cmap._init() cmap._set_extremes() return cmap
[ "def", "_init_mutable_colormap", "(", ")", ":", "greys", "=", "color_palette", "(", "'Greys'", ",", "256", ")", "cmap", "=", "LinearSegmentedColormap", ".", "from_list", "(", "'interactive'", ",", "greys", ")", "cmap", ".", "_init", "(", ")", "cmap", ".", "_set_extremes", "(", ")", "return", "cmap" ]
create a matplotlib colormap that will be updated by the widgets .
train
false
38,573
def save_pem(contents, pem_marker): (pem_start, pem_end) = _markers(pem_marker) b64 = base64.encodestring(contents).replace(b('\n'), b('')) pem_lines = [pem_start] for block_start in range(0, len(b64), 64): block = b64[block_start:(block_start + 64)] pem_lines.append(block) pem_lines.append(pem_end) pem_lines.append(b('')) return b('\n').join(pem_lines)
[ "def", "save_pem", "(", "contents", ",", "pem_marker", ")", ":", "(", "pem_start", ",", "pem_end", ")", "=", "_markers", "(", "pem_marker", ")", "b64", "=", "base64", ".", "encodestring", "(", "contents", ")", ".", "replace", "(", "b", "(", "'\\n'", ")", ",", "b", "(", "''", ")", ")", "pem_lines", "=", "[", "pem_start", "]", "for", "block_start", "in", "range", "(", "0", ",", "len", "(", "b64", ")", ",", "64", ")", ":", "block", "=", "b64", "[", "block_start", ":", "(", "block_start", "+", "64", ")", "]", "pem_lines", ".", "append", "(", "block", ")", "pem_lines", ".", "append", "(", "pem_end", ")", "pem_lines", ".", "append", "(", "b", "(", "''", ")", ")", "return", "b", "(", "'\\n'", ")", ".", "join", "(", "pem_lines", ")" ]
saves a pem file .
train
false
38,576
def DeadlockWrap(function, *_args, **_kwargs): sleeptime = _deadlock_MinSleepTime max_retries = _kwargs.get('max_retries', (-1)) if ('max_retries' in _kwargs): del _kwargs['max_retries'] while True: try: return function(*_args, **_kwargs) except db.DBLockDeadlockError: if _deadlock_VerboseFile: _deadlock_VerboseFile.write(('dbutils.DeadlockWrap: sleeping %1.3f\n' % sleeptime)) _sleep(sleeptime) sleeptime *= 2 if (sleeptime > _deadlock_MaxSleepTime): sleeptime = _deadlock_MaxSleepTime max_retries -= 1 if (max_retries == (-1)): raise
[ "def", "DeadlockWrap", "(", "function", ",", "*", "_args", ",", "**", "_kwargs", ")", ":", "sleeptime", "=", "_deadlock_MinSleepTime", "max_retries", "=", "_kwargs", ".", "get", "(", "'max_retries'", ",", "(", "-", "1", ")", ")", "if", "(", "'max_retries'", "in", "_kwargs", ")", ":", "del", "_kwargs", "[", "'max_retries'", "]", "while", "True", ":", "try", ":", "return", "function", "(", "*", "_args", ",", "**", "_kwargs", ")", "except", "db", ".", "DBLockDeadlockError", ":", "if", "_deadlock_VerboseFile", ":", "_deadlock_VerboseFile", ".", "write", "(", "(", "'dbutils.DeadlockWrap: sleeping %1.3f\\n'", "%", "sleeptime", ")", ")", "_sleep", "(", "sleeptime", ")", "sleeptime", "*=", "2", "if", "(", "sleeptime", ">", "_deadlock_MaxSleepTime", ")", ":", "sleeptime", "=", "_deadlock_MaxSleepTime", "max_retries", "-=", "1", "if", "(", "max_retries", "==", "(", "-", "1", ")", ")", ":", "raise" ]
deadlockwrap - automatically retries function in case of a database deadlock .
train
false
38,578
@contextlib.contextmanager def frequent_thread_switches(): interval = None if (not sys.platform.startswith('java')): if hasattr(sys, 'getswitchinterval'): interval = sys.getswitchinterval() sys.setswitchinterval(1e-06) else: interval = sys.getcheckinterval() sys.setcheckinterval(1) try: (yield) finally: if (not sys.platform.startswith('java')): if hasattr(sys, 'setswitchinterval'): sys.setswitchinterval(interval) else: sys.setcheckinterval(interval)
[ "@", "contextlib", ".", "contextmanager", "def", "frequent_thread_switches", "(", ")", ":", "interval", "=", "None", "if", "(", "not", "sys", ".", "platform", ".", "startswith", "(", "'java'", ")", ")", ":", "if", "hasattr", "(", "sys", ",", "'getswitchinterval'", ")", ":", "interval", "=", "sys", ".", "getswitchinterval", "(", ")", "sys", ".", "setswitchinterval", "(", "1e-06", ")", "else", ":", "interval", "=", "sys", ".", "getcheckinterval", "(", ")", "sys", ".", "setcheckinterval", "(", "1", ")", "try", ":", "(", "yield", ")", "finally", ":", "if", "(", "not", "sys", ".", "platform", ".", "startswith", "(", "'java'", ")", ")", ":", "if", "hasattr", "(", "sys", ",", "'setswitchinterval'", ")", ":", "sys", ".", "setswitchinterval", "(", "interval", ")", "else", ":", "sys", ".", "setcheckinterval", "(", "interval", ")" ]
make concurrency bugs more likely to manifest .
train
false
38,580
def cumulative_distribution(distribution): cdf = [0.0] psum = float(sum(distribution)) for i in range(0, len(distribution)): cdf.append((cdf[i] + (distribution[i] / psum))) return cdf
[ "def", "cumulative_distribution", "(", "distribution", ")", ":", "cdf", "=", "[", "0.0", "]", "psum", "=", "float", "(", "sum", "(", "distribution", ")", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "distribution", ")", ")", ":", "cdf", ".", "append", "(", "(", "cdf", "[", "i", "]", "+", "(", "distribution", "[", "i", "]", "/", "psum", ")", ")", ")", "return", "cdf" ]
return cumulative distribution function for the given image .
train
false
38,581
def GeneratePasswordHash(password): if (len(password) < 8): raise InvalidRequestError(_PASSWORD_TOO_SHORT) salt = base64.b64encode(os.urandom(16)) pwd_hash = HashPassword(password, salt) return (pwd_hash, salt)
[ "def", "GeneratePasswordHash", "(", "password", ")", ":", "if", "(", "len", "(", "password", ")", "<", "8", ")", ":", "raise", "InvalidRequestError", "(", "_PASSWORD_TOO_SHORT", ")", "salt", "=", "base64", ".", "b64encode", "(", "os", ".", "urandom", "(", "16", ")", ")", "pwd_hash", "=", "HashPassword", "(", "password", ",", "salt", ")", "return", "(", "pwd_hash", ",", "salt", ")" ]
generates a password hash from the given password str .
train
false
38,582
def normalize_DESeq2(input_path, out_path, DESeq_negatives_to_zero): tmp_bt = load_table(input_path) with tempfile.NamedTemporaryFile(dir=get_qiime_temp_dir(), prefix='QIIME-normalize-table-temp-table-', suffix='.biom') as temp_fh: temp_fh.write(tmp_bt.to_json('forR')) temp_fh.flush() run_DESeq2(temp_fh.name, out_path, DESeq_negatives_to_zero)
[ "def", "normalize_DESeq2", "(", "input_path", ",", "out_path", ",", "DESeq_negatives_to_zero", ")", ":", "tmp_bt", "=", "load_table", "(", "input_path", ")", "with", "tempfile", ".", "NamedTemporaryFile", "(", "dir", "=", "get_qiime_temp_dir", "(", ")", ",", "prefix", "=", "'QIIME-normalize-table-temp-table-'", ",", "suffix", "=", "'.biom'", ")", "as", "temp_fh", ":", "temp_fh", ".", "write", "(", "tmp_bt", ".", "to_json", "(", "'forR'", ")", ")", "temp_fh", ".", "flush", "(", ")", "run_DESeq2", "(", "temp_fh", ".", "name", ",", "out_path", ",", "DESeq_negatives_to_zero", ")" ]
performs deseq2vs normalization on a single raw abundance otu matrix .
train
false
38,583
def static_slotname(instance): return instance.code
[ "def", "static_slotname", "(", "instance", ")", ":", "return", "instance", ".", "code" ]
returns a string to be used as the slot for the static placeholder field .
train
false
38,584
def csch(arg): return (1 / numpy.sinh(arg))
[ "def", "csch", "(", "arg", ")", ":", "return", "(", "1", "/", "numpy", ".", "sinh", "(", "arg", ")", ")" ]
hyperbolic cosecant .
train
false
38,585
@pytest.mark.network def test_git_with_tag_name_and_update(script, tmpdir): result = script.pip('install', '-e', ('%s#egg=pip-test-package' % local_checkout('git+http://github.com/pypa/pip-test-package.git', tmpdir.join('cache'))), expect_error=True) result.assert_installed('pip-test-package', with_files=['.git']) result = script.pip('install', '--global-option=--version', '-e', ('%s@0.1.2#egg=pip-test-package' % local_checkout('git+http://github.com/pypa/pip-test-package.git', tmpdir.join('cache'))), expect_error=True) assert ('0.1.2' in result.stdout)
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_git_with_tag_name_and_update", "(", "script", ",", "tmpdir", ")", ":", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'-e'", ",", "(", "'%s#egg=pip-test-package'", "%", "local_checkout", "(", "'git+http://github.com/pypa/pip-test-package.git'", ",", "tmpdir", ".", "join", "(", "'cache'", ")", ")", ")", ",", "expect_error", "=", "True", ")", "result", ".", "assert_installed", "(", "'pip-test-package'", ",", "with_files", "=", "[", "'.git'", "]", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'--global-option=--version'", ",", "'-e'", ",", "(", "'%s@0.1.2#egg=pip-test-package'", "%", "local_checkout", "(", "'git+http://github.com/pypa/pip-test-package.git'", ",", "tmpdir", ".", "join", "(", "'cache'", ")", ")", ")", ",", "expect_error", "=", "True", ")", "assert", "(", "'0.1.2'", "in", "result", ".", "stdout", ")" ]
test cloning a git repository and updating to a different version .
train
false
38,586
def delete_snapshots(name, *names, **kwargs): deleted = dict() for snap in _get_domain(name).listAllSnapshots(): if ((snap.getName() in names) or (not names)): deleted[snap.getName()] = _parse_snapshot_description(snap) snap.delete() return {'available': list_snapshots(name), 'deleted': deleted}
[ "def", "delete_snapshots", "(", "name", ",", "*", "names", ",", "**", "kwargs", ")", ":", "deleted", "=", "dict", "(", ")", "for", "snap", "in", "_get_domain", "(", "name", ")", ".", "listAllSnapshots", "(", ")", ":", "if", "(", "(", "snap", ".", "getName", "(", ")", "in", "names", ")", "or", "(", "not", "names", ")", ")", ":", "deleted", "[", "snap", ".", "getName", "(", ")", "]", "=", "_parse_snapshot_description", "(", "snap", ")", "snap", ".", "delete", "(", ")", "return", "{", "'available'", ":", "list_snapshots", "(", "name", ")", ",", "'deleted'", ":", "deleted", "}" ]
delete one or more snapshots of the given vm .
train
false
38,587
def _read_array(f, typecode, array_desc): if (typecode in [1, 3, 4, 5, 6, 9, 13, 14, 15]): if (typecode == 1): nbytes = _read_int32(f) if (nbytes != array_desc['nbytes']): warnings.warn('Not able to verify number of bytes from header') array = np.fromstring(f.read(array_desc['nbytes']), dtype=DTYPE_DICT[typecode]) elif (typecode in [2, 12]): array = np.fromstring(f.read((array_desc['nbytes'] * 2)), dtype=DTYPE_DICT[typecode])[1::2] else: array = [] for i in range(array_desc['nelements']): dtype = typecode data = _read_data(f, dtype) array.append(data) array = np.array(array, dtype=np.object_) if (array_desc['ndims'] > 1): dims = array_desc['dims'][:int(array_desc['ndims'])] dims.reverse() array = array.reshape(dims) _align_32(f) return array
[ "def", "_read_array", "(", "f", ",", "typecode", ",", "array_desc", ")", ":", "if", "(", "typecode", "in", "[", "1", ",", "3", ",", "4", ",", "5", ",", "6", ",", "9", ",", "13", ",", "14", ",", "15", "]", ")", ":", "if", "(", "typecode", "==", "1", ")", ":", "nbytes", "=", "_read_int32", "(", "f", ")", "if", "(", "nbytes", "!=", "array_desc", "[", "'nbytes'", "]", ")", ":", "warnings", ".", "warn", "(", "'Not able to verify number of bytes from header'", ")", "array", "=", "np", ".", "fromstring", "(", "f", ".", "read", "(", "array_desc", "[", "'nbytes'", "]", ")", ",", "dtype", "=", "DTYPE_DICT", "[", "typecode", "]", ")", "elif", "(", "typecode", "in", "[", "2", ",", "12", "]", ")", ":", "array", "=", "np", ".", "fromstring", "(", "f", ".", "read", "(", "(", "array_desc", "[", "'nbytes'", "]", "*", "2", ")", ")", ",", "dtype", "=", "DTYPE_DICT", "[", "typecode", "]", ")", "[", "1", ":", ":", "2", "]", "else", ":", "array", "=", "[", "]", "for", "i", "in", "range", "(", "array_desc", "[", "'nelements'", "]", ")", ":", "dtype", "=", "typecode", "data", "=", "_read_data", "(", "f", ",", "dtype", ")", "array", ".", "append", "(", "data", ")", "array", "=", "np", ".", "array", "(", "array", ",", "dtype", "=", "np", ".", "object_", ")", "if", "(", "array_desc", "[", "'ndims'", "]", ">", "1", ")", ":", "dims", "=", "array_desc", "[", "'dims'", "]", "[", ":", "int", "(", "array_desc", "[", "'ndims'", "]", ")", "]", "dims", ".", "reverse", "(", ")", "array", "=", "array", ".", "reshape", "(", "dims", ")", "_align_32", "(", "f", ")", "return", "array" ]
read an array of type typecode .
train
false
38,588
def mock_capa_module(): capa_module = Mock() capa_module.location.to_deprecated_string.return_value = 'i4x://Foo/bar/mock/abc' return capa_module
[ "def", "mock_capa_module", "(", ")", ":", "capa_module", "=", "Mock", "(", ")", "capa_module", ".", "location", ".", "to_deprecated_string", ".", "return_value", "=", "'i4x://Foo/bar/mock/abc'", "return", "capa_module" ]
capa response types needs just two things from the capa_module: location and track_function .
train
false
38,589
def get_official_languages(territory, regional=False, de_facto=False): territory = str(territory).upper() allowed_stati = set(('official',)) if regional: allowed_stati.add('official_regional') if de_facto: allowed_stati.add('de_facto_official') languages = get_global('territory_languages').get(territory, {}) pairs = [(info['population_percent'], language) for (language, info) in languages.items() if (info.get('official_status') in allowed_stati)] pairs.sort(reverse=True) return tuple((lang for (_, lang) in pairs))
[ "def", "get_official_languages", "(", "territory", ",", "regional", "=", "False", ",", "de_facto", "=", "False", ")", ":", "territory", "=", "str", "(", "territory", ")", ".", "upper", "(", ")", "allowed_stati", "=", "set", "(", "(", "'official'", ",", ")", ")", "if", "regional", ":", "allowed_stati", ".", "add", "(", "'official_regional'", ")", "if", "de_facto", ":", "allowed_stati", ".", "add", "(", "'de_facto_official'", ")", "languages", "=", "get_global", "(", "'territory_languages'", ")", ".", "get", "(", "territory", ",", "{", "}", ")", "pairs", "=", "[", "(", "info", "[", "'population_percent'", "]", ",", "language", ")", "for", "(", "language", ",", "info", ")", "in", "languages", ".", "items", "(", ")", "if", "(", "info", ".", "get", "(", "'official_status'", ")", "in", "allowed_stati", ")", "]", "pairs", ".", "sort", "(", "reverse", "=", "True", ")", "return", "tuple", "(", "(", "lang", "for", "(", "_", ",", "lang", ")", "in", "pairs", ")", ")" ]
get the official language(s) for the given territory .
train
false
38,591
def memorized_timedelta(seconds): try: return _timedelta_cache[seconds] except KeyError: delta = timedelta(seconds=seconds) _timedelta_cache[seconds] = delta return delta
[ "def", "memorized_timedelta", "(", "seconds", ")", ":", "try", ":", "return", "_timedelta_cache", "[", "seconds", "]", "except", "KeyError", ":", "delta", "=", "timedelta", "(", "seconds", "=", "seconds", ")", "_timedelta_cache", "[", "seconds", "]", "=", "delta", "return", "delta" ]
create only one instance of each distinct timedelta .
train
true
38,592
def OpenDocumentText(): doc = OpenDocument('application/vnd.oasis.opendocument.text') doc.text = Text() doc.body.addElement(doc.text) return doc
[ "def", "OpenDocumentText", "(", ")", ":", "doc", "=", "OpenDocument", "(", "'application/vnd.oasis.opendocument.text'", ")", "doc", ".", "text", "=", "Text", "(", ")", "doc", ".", "body", ".", "addElement", "(", "doc", ".", "text", ")", "return", "doc" ]
creates a text document .
train
false
38,594
def notify(conf, context, topic, msg, envelope): topic = topic.replace('.', '-') cast(conf, context, topic, msg, envelope=envelope)
[ "def", "notify", "(", "conf", ",", "context", ",", "topic", ",", "msg", ",", "envelope", ")", ":", "topic", "=", "topic", ".", "replace", "(", "'.'", ",", "'-'", ")", "cast", "(", "conf", ",", "context", ",", "topic", ",", "msg", ",", "envelope", "=", "envelope", ")" ]
sends a notification via rpc .
train
false
38,595
def get_template_context_processors(): context_processors = _builtin_context_processors context_processors += tuple(settings.DEFAULT_TEMPLATE_ENGINE['OPTIONS']['context_processors']) return tuple((import_string(path) for path in context_processors))
[ "def", "get_template_context_processors", "(", ")", ":", "context_processors", "=", "_builtin_context_processors", "context_processors", "+=", "tuple", "(", "settings", ".", "DEFAULT_TEMPLATE_ENGINE", "[", "'OPTIONS'", "]", "[", "'context_processors'", "]", ")", "return", "tuple", "(", "(", "import_string", "(", "path", ")", "for", "path", "in", "context_processors", ")", ")" ]
returns the context processors defined in settings .
train
false