id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
52,538
def update_app(id, config): if ('id' not in config): config['id'] = id config.pop('version', None) config.pop('fetch', None) data = json.dumps(config) try: response = salt.utils.http.query('{0}/v2/apps/{1}?force=true'.format(_base_url(), id), method='PUT', decode_type='json', decode=True, data=data, header_dict={'Content-Type': 'application/json', 'Accept': 'application/json'}) log.debug('update response: %s', response) return response['dict'] except Exception as ex: log.error('unable to update marathon app: %s', get_error_message(ex)) return {'exception': {'message': get_error_message(ex)}}
[ "def", "update_app", "(", "id", ",", "config", ")", ":", "if", "(", "'id'", "not", "in", "config", ")", ":", "config", "[", "'id'", "]", "=", "id", "config", ".", "pop", "(", "'version'", ",", "None", ")", "config", ".", "pop", "(", "'fetch'", ",", "None", ")", "data", "=", "json", ".", "dumps", "(", "config", ")", "try", ":", "response", "=", "salt", ".", "utils", ".", "http", ".", "query", "(", "'{0}/v2/apps/{1}?force=true'", ".", "format", "(", "_base_url", "(", ")", ",", "id", ")", ",", "method", "=", "'PUT'", ",", "decode_type", "=", "'json'", ",", "decode", "=", "True", ",", "data", "=", "data", ",", "header_dict", "=", "{", "'Content-Type'", ":", "'application/json'", ",", "'Accept'", ":", "'application/json'", "}", ")", "log", ".", "debug", "(", "'update response: %s'", ",", "response", ")", "return", "response", "[", "'dict'", "]", "except", "Exception", "as", "ex", ":", "log", ".", "error", "(", "'unable to update marathon app: %s'", ",", "get_error_message", "(", "ex", ")", ")", "return", "{", "'exception'", ":", "{", "'message'", ":", "get_error_message", "(", "ex", ")", "}", "}" ]
update the specified app with the given configuration .
train
true
52,539
def is_prerequisite_courses_enabled(): return (settings.FEATURES.get('ENABLE_PREREQUISITE_COURSES') and settings.FEATURES.get('MILESTONES_APP'))
[ "def", "is_prerequisite_courses_enabled", "(", ")", ":", "return", "(", "settings", ".", "FEATURES", ".", "get", "(", "'ENABLE_PREREQUISITE_COURSES'", ")", "and", "settings", ".", "FEATURES", ".", "get", "(", "'MILESTONES_APP'", ")", ")" ]
returns boolean indicating prerequisite courses enabled system wide or not .
train
false
52,540
def list2string(obj): return '.'.join([str(x) for x in obj])
[ "def", "list2string", "(", "obj", ")", ":", "return", "'.'", ".", "join", "(", "[", "str", "(", "x", ")", "for", "x", "in", "obj", "]", ")" ]
convert list to string .
train
false
52,542
def signal_to_noise_oir_ccd(t, source_eps, sky_eps, dark_eps, rd, npix, gain=1.0): signal = ((t * source_eps) * gain) noise = np.sqrt(((t * ((source_eps * gain) + (npix * ((sky_eps * gain) + dark_eps)))) + (npix * (rd ** 2)))) return (signal / noise)
[ "def", "signal_to_noise_oir_ccd", "(", "t", ",", "source_eps", ",", "sky_eps", ",", "dark_eps", ",", "rd", ",", "npix", ",", "gain", "=", "1.0", ")", ":", "signal", "=", "(", "(", "t", "*", "source_eps", ")", "*", "gain", ")", "noise", "=", "np", ".", "sqrt", "(", "(", "(", "t", "*", "(", "(", "source_eps", "*", "gain", ")", "+", "(", "npix", "*", "(", "(", "sky_eps", "*", "gain", ")", "+", "dark_eps", ")", ")", ")", ")", "+", "(", "npix", "*", "(", "rd", "**", "2", ")", ")", ")", ")", "return", "(", "signal", "/", "noise", ")" ]
computes the signal to noise ratio for source being observed in the optical/ir using a ccd .
train
false
52,543
def get_python_version(): return sys.version[:3]
[ "def", "get_python_version", "(", ")", ":", "return", "sys", ".", "version", "[", ":", "3", "]" ]
return a string containing the major and minor python version .
train
false
52,544
def extend_api(route='', api=None, base_url=''): def decorator(extend_with): apply_to_api = (hug.API(api) if api else hug.api.from_object(extend_with)) for extended_api in extend_with(): apply_to_api.extend(extended_api, route, base_url) return extend_with return decorator
[ "def", "extend_api", "(", "route", "=", "''", ",", "api", "=", "None", ",", "base_url", "=", "''", ")", ":", "def", "decorator", "(", "extend_with", ")", ":", "apply_to_api", "=", "(", "hug", ".", "API", "(", "api", ")", "if", "api", "else", "hug", ".", "api", ".", "from_object", "(", "extend_with", ")", ")", "for", "extended_api", "in", "extend_with", "(", ")", ":", "apply_to_api", ".", "extend", "(", "extended_api", ",", "route", ",", "base_url", ")", "return", "extend_with", "return", "decorator" ]
extends the current api .
train
true
52,545
def test_parse_command(): arg_str = 'selector click' expected_output = {'selector': 'selector', 'action': 'click', 'options': []} actual_output = screenshot._parse_command(arg_str) assert_equal(expected_output, actual_output) arg_str = 'selector submit' expected_output = {'selector': 'selector', 'action': 'submit', 'options': []} actual_output = screenshot._parse_command(arg_str) assert_equal(expected_output, actual_output) arg_str = 'selector send_keys' expected_output = {'selector': 'selector', 'action': 'send_keys', 'options': []} actual_output = screenshot._parse_command(arg_str) assert_equal(expected_output, actual_output) arg_str = 'selector send_keys keystroke1 keystroke2' expected_output = {'selector': 'selector', 'action': 'send_keys', 'options': ['keystroke1', 'keystroke2']} actual_output = screenshot._parse_command(arg_str) assert_equal(expected_output, actual_output)
[ "def", "test_parse_command", "(", ")", ":", "arg_str", "=", "'selector click'", "expected_output", "=", "{", "'selector'", ":", "'selector'", ",", "'action'", ":", "'click'", ",", "'options'", ":", "[", "]", "}", "actual_output", "=", "screenshot", ".", "_parse_command", "(", "arg_str", ")", "assert_equal", "(", "expected_output", ",", "actual_output", ")", "arg_str", "=", "'selector submit'", "expected_output", "=", "{", "'selector'", ":", "'selector'", ",", "'action'", ":", "'submit'", ",", "'options'", ":", "[", "]", "}", "actual_output", "=", "screenshot", ".", "_parse_command", "(", "arg_str", ")", "assert_equal", "(", "expected_output", ",", "actual_output", ")", "arg_str", "=", "'selector send_keys'", "expected_output", "=", "{", "'selector'", ":", "'selector'", ",", "'action'", ":", "'send_keys'", ",", "'options'", ":", "[", "]", "}", "actual_output", "=", "screenshot", ".", "_parse_command", "(", "arg_str", ")", "assert_equal", "(", "expected_output", ",", "actual_output", ")", "arg_str", "=", "'selector send_keys keystroke1 keystroke2'", "expected_output", "=", "{", "'selector'", ":", "'selector'", ",", "'action'", ":", "'send_keys'", ",", "'options'", ":", "[", "'keystroke1'", ",", "'keystroke2'", "]", "}", "actual_output", "=", "screenshot", ".", "_parse_command", "(", "arg_str", ")", "assert_equal", "(", "expected_output", ",", "actual_output", ")" ]
test screenshot .
train
false
52,546
def _tabulate(results, metrics, formats): column_width = max((max((len(k) for k in formats)) + 1), 8) first_width = max((len(k) for k in metrics)) head_fmt = ('{:<{fw}s}' + ('{:>{cw}s}' * len(formats))) row_fmt = ('{:<{fw}s}' + ('{:>{cw}.3f}' * len(formats))) print(head_fmt.format('Metric', cw=column_width, fw=first_width, *formats)) for (metric, row) in zip(metrics, results[:, :, (-1), (-1), (-1)]): print(row_fmt.format(metric, cw=column_width, fw=first_width, *row))
[ "def", "_tabulate", "(", "results", ",", "metrics", ",", "formats", ")", ":", "column_width", "=", "max", "(", "(", "max", "(", "(", "len", "(", "k", ")", "for", "k", "in", "formats", ")", ")", "+", "1", ")", ",", "8", ")", "first_width", "=", "max", "(", "(", "len", "(", "k", ")", "for", "k", "in", "metrics", ")", ")", "head_fmt", "=", "(", "'{:<{fw}s}'", "+", "(", "'{:>{cw}s}'", "*", "len", "(", "formats", ")", ")", ")", "row_fmt", "=", "(", "'{:<{fw}s}'", "+", "(", "'{:>{cw}.3f}'", "*", "len", "(", "formats", ")", ")", ")", "print", "(", "head_fmt", ".", "format", "(", "'Metric'", ",", "cw", "=", "column_width", ",", "fw", "=", "first_width", ",", "*", "formats", ")", ")", "for", "(", "metric", ",", "row", ")", "in", "zip", "(", "metrics", ",", "results", "[", ":", ",", ":", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", "]", ")", ":", "print", "(", "row_fmt", ".", "format", "(", "metric", ",", "cw", "=", "column_width", ",", "fw", "=", "first_width", ",", "*", "row", ")", ")" ]
prints results by metric and format uses the last value of other fields .
train
false
52,548
def new_uuid(): return uuid.uuid4().hex
[ "def", "new_uuid", "(", ")", ":", "return", "uuid", ".", "uuid4", "(", ")", ".", "hex" ]
return a string uuid .
train
false
52,549
@cacheit def _has_simple_delta(expr, index): if expr.has(KroneckerDelta): if _is_simple_delta(expr, index): return True if (expr.is_Add or expr.is_Mul): for arg in expr.args: if _has_simple_delta(arg, index): return True return False
[ "@", "cacheit", "def", "_has_simple_delta", "(", "expr", ",", "index", ")", ":", "if", "expr", ".", "has", "(", "KroneckerDelta", ")", ":", "if", "_is_simple_delta", "(", "expr", ",", "index", ")", ":", "return", "True", "if", "(", "expr", ".", "is_Add", "or", "expr", ".", "is_Mul", ")", ":", "for", "arg", "in", "expr", ".", "args", ":", "if", "_has_simple_delta", "(", "arg", ",", "index", ")", ":", "return", "True", "return", "False" ]
returns true if expr is an expression that contains a kroneckerdelta that is simple in the index index .
train
false
52,550
def task_enable_docker(distribution): docker_tls_options = '--tlsverify --tlscacert=/etc/flocker/cluster.crt --tlscert=/etc/flocker/node.crt --tlskey=/etc/flocker/node.key -H=0.0.0.0:2376' unixsock_opt = '-H unix:///var/run/docker.sock' if is_systemd_distribution(distribution): conf_path = '/etc/systemd/system/docker.service.d/01-TimeoutStartSec.conf' return sequence([run('mkdir -p /etc/systemd/system/docker.service.d'), put(path=conf_path, content=dedent(' [Service]\n TimeoutStartSec=10min\n ')), put(path='/etc/systemd/system/docker.service.d/02-TLS.conf', content=dedent(' [Service]\n ExecStart=\n ExecStart=/usr/bin/dockerd {} {}\n '.format(unixsock_opt, docker_tls_options))), run_from_args(['systemctl', 'enable', 'docker.service'])]) elif is_ubuntu(distribution): return sequence([put(path='/etc/default/docker', content='DOCKER_OPTS="{} {}"'.format(unixsock_opt, docker_tls_options))]) else: raise DistributionNotSupported(distribution=distribution)
[ "def", "task_enable_docker", "(", "distribution", ")", ":", "docker_tls_options", "=", "'--tlsverify --tlscacert=/etc/flocker/cluster.crt --tlscert=/etc/flocker/node.crt --tlskey=/etc/flocker/node.key -H=0.0.0.0:2376'", "unixsock_opt", "=", "'-H unix:///var/run/docker.sock'", "if", "is_systemd_distribution", "(", "distribution", ")", ":", "conf_path", "=", "'/etc/systemd/system/docker.service.d/01-TimeoutStartSec.conf'", "return", "sequence", "(", "[", "run", "(", "'mkdir -p /etc/systemd/system/docker.service.d'", ")", ",", "put", "(", "path", "=", "conf_path", ",", "content", "=", "dedent", "(", "' [Service]\\n TimeoutStartSec=10min\\n '", ")", ")", ",", "put", "(", "path", "=", "'/etc/systemd/system/docker.service.d/02-TLS.conf'", ",", "content", "=", "dedent", "(", "' [Service]\\n ExecStart=\\n ExecStart=/usr/bin/dockerd {} {}\\n '", ".", "format", "(", "unixsock_opt", ",", "docker_tls_options", ")", ")", ")", ",", "run_from_args", "(", "[", "'systemctl'", ",", "'enable'", ",", "'docker.service'", "]", ")", "]", ")", "elif", "is_ubuntu", "(", "distribution", ")", ":", "return", "sequence", "(", "[", "put", "(", "path", "=", "'/etc/default/docker'", ",", "content", "=", "'DOCKER_OPTS=\"{} {}\"'", ".", "format", "(", "unixsock_opt", ",", "docker_tls_options", ")", ")", "]", ")", "else", ":", "raise", "DistributionNotSupported", "(", "distribution", "=", "distribution", ")" ]
configure docker .
train
false
52,551
def force_tcg(force=True): global forceTCG forceTCG = force
[ "def", "force_tcg", "(", "force", "=", "True", ")", ":", "global", "forceTCG", "forceTCG", "=", "force" ]
prevent libguestfs trying to use kvm acceleration it is a good idea to call this if it is known that kvm is not desired .
train
false
52,552
def WaitForFlow(flow_urn, token=None, timeout=DEFAULT_TIMEOUT, max_sleep_time=1, min_sleep_time=0.2, dampening_multiplier=0.9): start_time = time.time() sleep_time = max_sleep_time while True: with aff4.FACTORY.Open(flow_urn, token=token, aff4_type=flow.GRRFlow) as flow_obj: if ((time.time() - start_time) > timeout): logging.warn('Timed out after waiting %ss for %s!', timeout, flow_obj) raise IOError('Timed out trying to access client! Is it connected?') if (not flow_obj.GetRunner().IsRunning()): break sleep_time = max((sleep_time * dampening_multiplier), min_sleep_time) time.sleep(sleep_time) logging.debug('Waiting for %s, sleeping for %.3fs', flow_obj, sleep_time)
[ "def", "WaitForFlow", "(", "flow_urn", ",", "token", "=", "None", ",", "timeout", "=", "DEFAULT_TIMEOUT", ",", "max_sleep_time", "=", "1", ",", "min_sleep_time", "=", "0.2", ",", "dampening_multiplier", "=", "0.9", ")", ":", "start_time", "=", "time", ".", "time", "(", ")", "sleep_time", "=", "max_sleep_time", "while", "True", ":", "with", "aff4", ".", "FACTORY", ".", "Open", "(", "flow_urn", ",", "token", "=", "token", ",", "aff4_type", "=", "flow", ".", "GRRFlow", ")", "as", "flow_obj", ":", "if", "(", "(", "time", ".", "time", "(", ")", "-", "start_time", ")", ">", "timeout", ")", ":", "logging", ".", "warn", "(", "'Timed out after waiting %ss for %s!'", ",", "timeout", ",", "flow_obj", ")", "raise", "IOError", "(", "'Timed out trying to access client! Is it connected?'", ")", "if", "(", "not", "flow_obj", ".", "GetRunner", "(", ")", ".", "IsRunning", "(", ")", ")", ":", "break", "sleep_time", "=", "max", "(", "(", "sleep_time", "*", "dampening_multiplier", ")", ",", "min_sleep_time", ")", "time", ".", "sleep", "(", "sleep_time", ")", "logging", ".", "debug", "(", "'Waiting for %s, sleeping for %.3fs'", ",", "flow_obj", ",", "sleep_time", ")" ]
waits for a flow to finish .
train
true
52,553
def clean_slashes(path): return (path.strip(u'/') if settings.APPEND_SLASH else path.lstrip(u'/'))
[ "def", "clean_slashes", "(", "path", ")", ":", "return", "(", "path", ".", "strip", "(", "u'/'", ")", "if", "settings", ".", "APPEND_SLASH", "else", "path", ".", "lstrip", "(", "u'/'", ")", ")" ]
canonicalize path by removing leading slashes and conditionally removing trailing slashes .
train
false
52,554
def c_examines(client): cmds = [('examine %s' % obj) for obj in client.objs] if (not cmds): cmds = [('examine %s' % exi) for exi in client.exits] if (not cmds): cmds = 'examine me' return cmds
[ "def", "c_examines", "(", "client", ")", ":", "cmds", "=", "[", "(", "'examine %s'", "%", "obj", ")", "for", "obj", "in", "client", ".", "objs", "]", "if", "(", "not", "cmds", ")", ":", "cmds", "=", "[", "(", "'examine %s'", "%", "exi", ")", "for", "exi", "in", "client", ".", "exits", "]", "if", "(", "not", "cmds", ")", ":", "cmds", "=", "'examine me'", "return", "cmds" ]
examines various objects .
train
false
52,555
def setStateNormalDisabled(active, widget): if active: widget.config(state=settings.Tkinter.NORMAL) else: widget.config(state=settings.Tkinter.DISABLED)
[ "def", "setStateNormalDisabled", "(", "active", ",", "widget", ")", ":", "if", "active", ":", "widget", ".", "config", "(", "state", "=", "settings", ".", "Tkinter", ".", "NORMAL", ")", "else", ":", "widget", ".", "config", "(", "state", "=", "settings", ".", "Tkinter", ".", "DISABLED", ")" ]
set the state of the widget to normal if active and disabled if inactive .
train
false
52,556
@utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) def do_interface_list(cs, args): server = _find_server(cs, args.server) res = server.interface_list() if isinstance(res, list): _print_interfaces(res)
[ "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "def", "do_interface_list", "(", "cs", ",", "args", ")", ":", "server", "=", "_find_server", "(", "cs", ",", "args", ".", "server", ")", "res", "=", "server", ".", "interface_list", "(", ")", "if", "isinstance", "(", "res", ",", "list", ")", ":", "_print_interfaces", "(", "res", ")" ]
list interfaces attached to a server .
train
false
52,558
def get_metadata_revisions(repository, repo, sort_revisions=True, reverse=False, downloadable=True): if downloadable: metadata_revisions = repository.downloadable_revisions else: metadata_revisions = repository.metadata_revisions changeset_tups = [] for repository_metadata in metadata_revisions: ctx = hg_util.get_changectx_for_changeset(repo, repository_metadata.changeset_revision) if ctx: rev = ('%04d' % ctx.rev()) else: rev = (-1) changeset_tups.append((rev, repository_metadata.changeset_revision)) if sort_revisions: changeset_tups.sort(key=itemgetter(0), reverse=reverse) return changeset_tups
[ "def", "get_metadata_revisions", "(", "repository", ",", "repo", ",", "sort_revisions", "=", "True", ",", "reverse", "=", "False", ",", "downloadable", "=", "True", ")", ":", "if", "downloadable", ":", "metadata_revisions", "=", "repository", ".", "downloadable_revisions", "else", ":", "metadata_revisions", "=", "repository", ".", "metadata_revisions", "changeset_tups", "=", "[", "]", "for", "repository_metadata", "in", "metadata_revisions", ":", "ctx", "=", "hg_util", ".", "get_changectx_for_changeset", "(", "repo", ",", "repository_metadata", ".", "changeset_revision", ")", "if", "ctx", ":", "rev", "=", "(", "'%04d'", "%", "ctx", ".", "rev", "(", ")", ")", "else", ":", "rev", "=", "(", "-", "1", ")", "changeset_tups", ".", "append", "(", "(", "rev", ",", "repository_metadata", ".", "changeset_revision", ")", ")", "if", "sort_revisions", ":", "changeset_tups", ".", "sort", "(", "key", "=", "itemgetter", "(", "0", ")", ",", "reverse", "=", "reverse", ")", "return", "changeset_tups" ]
return a list of changesets for the provided repository .
train
false
52,559
def list_available(): cmd = 'Import-Module ServerManager; Get-WindowsFeature -ErrorAction SilentlyContinue -WarningAction SilentlyContinue' return __salt__['cmd.shell'](cmd, shell='powershell')
[ "def", "list_available", "(", ")", ":", "cmd", "=", "'Import-Module ServerManager; Get-WindowsFeature -ErrorAction SilentlyContinue -WarningAction SilentlyContinue'", "return", "__salt__", "[", "'cmd.shell'", "]", "(", "cmd", ",", "shell", "=", "'powershell'", ")" ]
list available features to install :return: a list of available features :rtype: list cli example: .
train
false
52,560
def _list_fonts(): (stdout_, stderr) = run_subprocess(['fc-list', ':scalable=true', 'family']) vals = [v.split(',')[0] for v in stdout_.strip().splitlines(False)] return vals
[ "def", "_list_fonts", "(", ")", ":", "(", "stdout_", ",", "stderr", ")", "=", "run_subprocess", "(", "[", "'fc-list'", ",", "':scalable=true'", ",", "'family'", "]", ")", "vals", "=", "[", "v", ".", "split", "(", "','", ")", "[", "0", "]", "for", "v", "in", "stdout_", ".", "strip", "(", ")", ".", "splitlines", "(", "False", ")", "]", "return", "vals" ]
list system fonts .
train
true
52,563
def resolve_user_restricted_access(document, resource): resource_def = app.config['DOMAIN'][resource] auth = resource_def['authentication'] auth_field = resource_def['auth_field'] if (auth and auth_field): request_auth_value = auth.get_request_auth_value() if request_auth_value: document[auth_field] = request_auth_value
[ "def", "resolve_user_restricted_access", "(", "document", ",", "resource", ")", ":", "resource_def", "=", "app", ".", "config", "[", "'DOMAIN'", "]", "[", "resource", "]", "auth", "=", "resource_def", "[", "'authentication'", "]", "auth_field", "=", "resource_def", "[", "'auth_field'", "]", "if", "(", "auth", "and", "auth_field", ")", ":", "request_auth_value", "=", "auth", ".", "get_request_auth_value", "(", ")", "if", "request_auth_value", ":", "document", "[", "auth_field", "]", "=", "request_auth_value" ]
adds user restricted access medadata to the document if applicable .
train
false
52,564
@np.deprecate(message='scipy.special.sph_yn is deprecated in scipy 0.18.0. Use scipy.special.spherical_yn instead. Note that the new function has a different signature.') def sph_yn(n, z): if (not (isscalar(n) and isscalar(z))): raise ValueError('arguments must be scalars.') if ((n != floor(n)) or (n < 0)): raise ValueError('n must be a non-negative integer.') if (n < 1): n1 = 1 else: n1 = n if (iscomplex(z) or less(z, 0)): (nm, jn, jnp, yn, ynp) = specfun.csphjy(n1, z) else: (nm, yn, ynp) = specfun.sphy(n1, z) return (yn[:(n + 1)], ynp[:(n + 1)])
[ "@", "np", ".", "deprecate", "(", "message", "=", "'scipy.special.sph_yn is deprecated in scipy 0.18.0. Use scipy.special.spherical_yn instead. Note that the new function has a different signature.'", ")", "def", "sph_yn", "(", "n", ",", "z", ")", ":", "if", "(", "not", "(", "isscalar", "(", "n", ")", "and", "isscalar", "(", "z", ")", ")", ")", ":", "raise", "ValueError", "(", "'arguments must be scalars.'", ")", "if", "(", "(", "n", "!=", "floor", "(", "n", ")", ")", "or", "(", "n", "<", "0", ")", ")", ":", "raise", "ValueError", "(", "'n must be a non-negative integer.'", ")", "if", "(", "n", "<", "1", ")", ":", "n1", "=", "1", "else", ":", "n1", "=", "n", "if", "(", "iscomplex", "(", "z", ")", "or", "less", "(", "z", ",", "0", ")", ")", ":", "(", "nm", ",", "jn", ",", "jnp", ",", "yn", ",", "ynp", ")", "=", "specfun", ".", "csphjy", "(", "n1", ",", "z", ")", "else", ":", "(", "nm", ",", "yn", ",", "ynp", ")", "=", "specfun", ".", "sphy", "(", "n1", ",", "z", ")", "return", "(", "yn", "[", ":", "(", "n", "+", "1", ")", "]", ",", "ynp", "[", ":", "(", "n", "+", "1", ")", "]", ")" ]
compute spherical bessel function yn(z) and derivative .
train
false
52,565
def huge_node(node_prototype): image = DockerImage.from_string(u'postgresql') applications = {a.name: a for a in [Application(name=u'postgres-{}'.format(i), image=image) for i in range(_MANY_CONTAINERS)]} return node_prototype.set(applications=applications)
[ "def", "huge_node", "(", "node_prototype", ")", ":", "image", "=", "DockerImage", ".", "from_string", "(", "u'postgresql'", ")", "applications", "=", "{", "a", ".", "name", ":", "a", "for", "a", "in", "[", "Application", "(", "name", "=", "u'postgres-{}'", ".", "format", "(", "i", ")", ",", "image", "=", "image", ")", "for", "i", "in", "range", "(", "_MANY_CONTAINERS", ")", "]", "}", "return", "node_prototype", ".", "set", "(", "applications", "=", "applications", ")" ]
return a node with many applications .
train
false
52,566
def prompt_user_password(): password = None if (hasattr(sys.stdin, 'isatty') and sys.stdin.isatty()): try: password = getpass.getpass('Password: ') except EOFError: pass return password
[ "def", "prompt_user_password", "(", ")", ":", "password", "=", "None", "if", "(", "hasattr", "(", "sys", ".", "stdin", ",", "'isatty'", ")", "and", "sys", ".", "stdin", ".", "isatty", "(", ")", ")", ":", "try", ":", "password", "=", "getpass", ".", "getpass", "(", "'Password: '", ")", "except", "EOFError", ":", "pass", "return", "password" ]
prompt user for a password .
train
false
52,567
@register.inclusion_tag('utilities/templatetags/utilization_graph.html') def utilization_graph(utilization, warning_threshold=75, danger_threshold=90): return {'utilization': utilization, 'warning_threshold': warning_threshold, 'danger_threshold': danger_threshold}
[ "@", "register", ".", "inclusion_tag", "(", "'utilities/templatetags/utilization_graph.html'", ")", "def", "utilization_graph", "(", "utilization", ",", "warning_threshold", "=", "75", ",", "danger_threshold", "=", "90", ")", ":", "return", "{", "'utilization'", ":", "utilization", ",", "'warning_threshold'", ":", "warning_threshold", ",", "'danger_threshold'", ":", "danger_threshold", "}" ]
display a horizontal bar graph indicating a percentage of utilization .
train
false
52,569
def _fastq_illumina_convert_fastq_sanger(in_handle, out_handle, alphabet=None): mapping = ''.join((([chr(0) for ascii in range(0, 64)] + [chr((33 + q)) for q in range(0, (62 + 1))]) + [chr(0) for ascii in range(127, 256)])) assert (len(mapping) == 256) return _fastq_generic(in_handle, out_handle, mapping)
[ "def", "_fastq_illumina_convert_fastq_sanger", "(", "in_handle", ",", "out_handle", ",", "alphabet", "=", "None", ")", ":", "mapping", "=", "''", ".", "join", "(", "(", "(", "[", "chr", "(", "0", ")", "for", "ascii", "in", "range", "(", "0", ",", "64", ")", "]", "+", "[", "chr", "(", "(", "33", "+", "q", ")", ")", "for", "q", "in", "range", "(", "0", ",", "(", "62", "+", "1", ")", ")", "]", ")", "+", "[", "chr", "(", "0", ")", "for", "ascii", "in", "range", "(", "127", ",", "256", ")", "]", ")", ")", "assert", "(", "len", "(", "mapping", ")", "==", "256", ")", "return", "_fastq_generic", "(", "in_handle", ",", "out_handle", ",", "mapping", ")" ]
fast illumina 1 .
train
false
52,570
def get_sparse_matrix(M, N, frac=0.1): data = (np.zeros((M, N)) * 0.0) for i in range(int(((M * N) * frac))): x = np.random.randint(0, (M - 1)) y = np.random.randint(0, (N - 1)) data[(x, y)] = np.random.rand() return data
[ "def", "get_sparse_matrix", "(", "M", ",", "N", ",", "frac", "=", "0.1", ")", ":", "data", "=", "(", "np", ".", "zeros", "(", "(", "M", ",", "N", ")", ")", "*", "0.0", ")", "for", "i", "in", "range", "(", "int", "(", "(", "(", "M", "*", "N", ")", "*", "frac", ")", ")", ")", ":", "x", "=", "np", ".", "random", ".", "randint", "(", "0", ",", "(", "M", "-", "1", ")", ")", "y", "=", "np", ".", "random", ".", "randint", "(", "0", ",", "(", "N", "-", "1", ")", ")", "data", "[", "(", "x", ",", "y", ")", "]", "=", "np", ".", "random", ".", "rand", "(", ")", "return", "data" ]
return a *m* x *n* sparse matrix with *frac* elements randomly filled .
train
false
52,571
def check_start_date(user, days_early_for_beta, start, course_key): start_dates_disabled = settings.FEATURES['DISABLE_START_DATES'] if (start_dates_disabled and (not is_masquerading_as_student(user, course_key))): return ACCESS_GRANTED else: now = datetime.now(UTC()) if ((start is None) or in_preview_mode()): return ACCESS_GRANTED effective_start = adjust_start_date(user, days_early_for_beta, start, course_key) if (now > effective_start): return ACCESS_GRANTED return StartDateError(start)
[ "def", "check_start_date", "(", "user", ",", "days_early_for_beta", ",", "start", ",", "course_key", ")", ":", "start_dates_disabled", "=", "settings", ".", "FEATURES", "[", "'DISABLE_START_DATES'", "]", "if", "(", "start_dates_disabled", "and", "(", "not", "is_masquerading_as_student", "(", "user", ",", "course_key", ")", ")", ")", ":", "return", "ACCESS_GRANTED", "else", ":", "now", "=", "datetime", ".", "now", "(", "UTC", "(", ")", ")", "if", "(", "(", "start", "is", "None", ")", "or", "in_preview_mode", "(", ")", ")", ":", "return", "ACCESS_GRANTED", "effective_start", "=", "adjust_start_date", "(", "user", ",", "days_early_for_beta", ",", "start", ",", "course_key", ")", "if", "(", "now", ">", "effective_start", ")", ":", "return", "ACCESS_GRANTED", "return", "StartDateError", "(", "start", ")" ]
verifies whether the given user is allowed access given the start date and the beta offset for the given course .
train
false
52,572
@contextmanager def temp_pipeline_engine(calendar, sids, random_seed, symbols=None): equity_info = make_simple_equity_info(sids=sids, start_date=calendar[0], end_date=calendar[(-1)], symbols=symbols) loader = make_seeded_random_loader(random_seed, calendar, sids) def get_loader(column): return loader with tmp_asset_finder(equities=equity_info) as finder: (yield SimplePipelineEngine(get_loader, calendar, finder))
[ "@", "contextmanager", "def", "temp_pipeline_engine", "(", "calendar", ",", "sids", ",", "random_seed", ",", "symbols", "=", "None", ")", ":", "equity_info", "=", "make_simple_equity_info", "(", "sids", "=", "sids", ",", "start_date", "=", "calendar", "[", "0", "]", ",", "end_date", "=", "calendar", "[", "(", "-", "1", ")", "]", ",", "symbols", "=", "symbols", ")", "loader", "=", "make_seeded_random_loader", "(", "random_seed", ",", "calendar", ",", "sids", ")", "def", "get_loader", "(", "column", ")", ":", "return", "loader", "with", "tmp_asset_finder", "(", "equities", "=", "equity_info", ")", "as", "finder", ":", "(", "yield", "SimplePipelineEngine", "(", "get_loader", ",", "calendar", ",", "finder", ")", ")" ]
a contextmanager that yields a simplepipelineengine holding a reference to an assetfinder generated via tmp_asset_finder .
train
false
52,573
def getPassword(prompt='Password: ', confirm=0, forceTTY=0, confirmPrompt='Confirm password: ', mismatchMessage="Passwords don't match."): isaTTY = (hasattr(sys.stdin, 'isatty') and sys.stdin.isatty()) old = None try: if (not isaTTY): if forceTTY: try: old = (sys.stdin, sys.stdout) sys.stdin = sys.stdout = open('/dev/tty', 'r+') except: raise RuntimeError('Cannot obtain a TTY') else: password = sys.stdin.readline() if (password[(-1)] == '\n'): password = password[:(-1)] return password while 1: try1 = _getpass(prompt) if (not confirm): return try1 try2 = _getpass(confirmPrompt) if (try1 == try2): return try1 else: sys.stderr.write((mismatchMessage + '\n')) finally: if old: sys.stdin.close() (sys.stdin, sys.stdout) = old
[ "def", "getPassword", "(", "prompt", "=", "'Password: '", ",", "confirm", "=", "0", ",", "forceTTY", "=", "0", ",", "confirmPrompt", "=", "'Confirm password: '", ",", "mismatchMessage", "=", "\"Passwords don't match.\"", ")", ":", "isaTTY", "=", "(", "hasattr", "(", "sys", ".", "stdin", ",", "'isatty'", ")", "and", "sys", ".", "stdin", ".", "isatty", "(", ")", ")", "old", "=", "None", "try", ":", "if", "(", "not", "isaTTY", ")", ":", "if", "forceTTY", ":", "try", ":", "old", "=", "(", "sys", ".", "stdin", ",", "sys", ".", "stdout", ")", "sys", ".", "stdin", "=", "sys", ".", "stdout", "=", "open", "(", "'/dev/tty'", ",", "'r+'", ")", "except", ":", "raise", "RuntimeError", "(", "'Cannot obtain a TTY'", ")", "else", ":", "password", "=", "sys", ".", "stdin", ".", "readline", "(", ")", "if", "(", "password", "[", "(", "-", "1", ")", "]", "==", "'\\n'", ")", ":", "password", "=", "password", "[", ":", "(", "-", "1", ")", "]", "return", "password", "while", "1", ":", "try1", "=", "_getpass", "(", "prompt", ")", "if", "(", "not", "confirm", ")", ":", "return", "try1", "try2", "=", "_getpass", "(", "confirmPrompt", ")", "if", "(", "try1", "==", "try2", ")", ":", "return", "try1", "else", ":", "sys", ".", "stderr", ".", "write", "(", "(", "mismatchMessage", "+", "'\\n'", ")", ")", "finally", ":", "if", "old", ":", "sys", ".", "stdin", ".", "close", "(", ")", "(", "sys", ".", "stdin", ",", "sys", ".", "stdout", ")", "=", "old" ]
obtain a password by prompting or from stdin .
train
false
52,574
def get_configured_hdfs_client(): config = hdfs() custom = config.client conf_usinf_snakebite = ['snakebite_with_hadoopcli_fallback', 'snakebite'] if (six.PY3 and (custom in conf_usinf_snakebite)): warnings.warn('snakebite client not compatible with python3 at the momentfalling back on hadoopcli', stacklevel=2) return 'hadoopcli' return custom
[ "def", "get_configured_hdfs_client", "(", ")", ":", "config", "=", "hdfs", "(", ")", "custom", "=", "config", ".", "client", "conf_usinf_snakebite", "=", "[", "'snakebite_with_hadoopcli_fallback'", ",", "'snakebite'", "]", "if", "(", "six", ".", "PY3", "and", "(", "custom", "in", "conf_usinf_snakebite", ")", ")", ":", "warnings", ".", "warn", "(", "'snakebite client not compatible with python3 at the momentfalling back on hadoopcli'", ",", "stacklevel", "=", "2", ")", "return", "'hadoopcli'", "return", "custom" ]
this is a helper that fetches the configuration value for client in the [hdfs] section .
train
true
52,576
@keras_test def test_temporal_classification(): ((X_train, y_train), (X_test, y_test)) = get_test_data(nb_train=500, nb_test=500, input_shape=(3, 5), classification=True, nb_class=2) y_train = to_categorical(y_train) y_test = to_categorical(y_test) model = Sequential() model.add(GRU(y_train.shape[(-1)], input_shape=(X_train.shape[1], X_train.shape[2]), activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer='adagrad', metrics=['accuracy']) history = model.fit(X_train, y_train, nb_epoch=20, batch_size=32, validation_data=(X_test, y_test), verbose=0) assert (history.history['val_acc'][(-1)] >= 0.8)
[ "@", "keras_test", "def", "test_temporal_classification", "(", ")", ":", "(", "(", "X_train", ",", "y_train", ")", ",", "(", "X_test", ",", "y_test", ")", ")", "=", "get_test_data", "(", "nb_train", "=", "500", ",", "nb_test", "=", "500", ",", "input_shape", "=", "(", "3", ",", "5", ")", ",", "classification", "=", "True", ",", "nb_class", "=", "2", ")", "y_train", "=", "to_categorical", "(", "y_train", ")", "y_test", "=", "to_categorical", "(", "y_test", ")", "model", "=", "Sequential", "(", ")", "model", ".", "add", "(", "GRU", "(", "y_train", ".", "shape", "[", "(", "-", "1", ")", "]", ",", "input_shape", "=", "(", "X_train", ".", "shape", "[", "1", "]", ",", "X_train", ".", "shape", "[", "2", "]", ")", ",", "activation", "=", "'softmax'", ")", ")", "model", ".", "compile", "(", "loss", "=", "'categorical_crossentropy'", ",", "optimizer", "=", "'adagrad'", ",", "metrics", "=", "[", "'accuracy'", "]", ")", "history", "=", "model", ".", "fit", "(", "X_train", ",", "y_train", ",", "nb_epoch", "=", "20", ",", "batch_size", "=", "32", ",", "validation_data", "=", "(", "X_test", ",", "y_test", ")", ",", "verbose", "=", "0", ")", "assert", "(", "history", ".", "history", "[", "'val_acc'", "]", "[", "(", "-", "1", ")", "]", ">=", "0.8", ")" ]
classify temporal sequences of float numbers of length 3 into 2 classes using single layer of gru units and softmax applied to the last activations of the units .
train
false
52,577
def _get_system_username(): import getpass username = None try: username = getpass.getuser() except KeyError: pass except ImportError: if win32: import win32api import win32security import win32profile username = win32api.GetUserName() return username
[ "def", "_get_system_username", "(", ")", ":", "import", "getpass", "username", "=", "None", "try", ":", "username", "=", "getpass", ".", "getuser", "(", ")", "except", "KeyError", ":", "pass", "except", "ImportError", ":", "if", "win32", ":", "import", "win32api", "import", "win32security", "import", "win32profile", "username", "=", "win32api", ".", "GetUserName", "(", ")", "return", "username" ]
obtain name of current system user .
train
true
52,578
def test_disposable(): x = DisposableTest() with x: pass AreEqual(x.Called, True) Assert(hasattr(x, '__enter__')) Assert(hasattr(x, '__exit__')) x = DisposableTest() x.__enter__() try: pass finally: AreEqual(x.__exit__(None, None, None), None) AreEqual(x.Called, True) Assert(('__enter__' in dir(x))) Assert(('__exit__' in dir(x))) Assert(('__enter__' in dir(DisposableTest))) Assert(('__exit__' in dir(DisposableTest)))
[ "def", "test_disposable", "(", ")", ":", "x", "=", "DisposableTest", "(", ")", "with", "x", ":", "pass", "AreEqual", "(", "x", ".", "Called", ",", "True", ")", "Assert", "(", "hasattr", "(", "x", ",", "'__enter__'", ")", ")", "Assert", "(", "hasattr", "(", "x", ",", "'__exit__'", ")", ")", "x", "=", "DisposableTest", "(", ")", "x", ".", "__enter__", "(", ")", "try", ":", "pass", "finally", ":", "AreEqual", "(", "x", ".", "__exit__", "(", "None", ",", "None", ",", "None", ")", ",", "None", ")", "AreEqual", "(", "x", ".", "Called", ",", "True", ")", "Assert", "(", "(", "'__enter__'", "in", "dir", "(", "x", ")", ")", ")", "Assert", "(", "(", "'__exit__'", "in", "dir", "(", "x", ")", ")", ")", "Assert", "(", "(", "'__enter__'", "in", "dir", "(", "DisposableTest", ")", ")", ")", "Assert", "(", "(", "'__exit__'", "in", "dir", "(", "DisposableTest", ")", ")", ")" ]
classes implementing idisposable should automatically support the with statement .
train
false
52,579
def cleanup_mode(mode): if stat.S_ISLNK(mode): return stat.S_IFLNK elif stat.S_ISDIR(mode): return stat.S_IFDIR elif S_ISGITLINK(mode): return S_IFGITLINK ret = (stat.S_IFREG | 420) ret |= (mode & 73) return ret
[ "def", "cleanup_mode", "(", "mode", ")", ":", "if", "stat", ".", "S_ISLNK", "(", "mode", ")", ":", "return", "stat", ".", "S_IFLNK", "elif", "stat", ".", "S_ISDIR", "(", "mode", ")", ":", "return", "stat", ".", "S_IFDIR", "elif", "S_ISGITLINK", "(", "mode", ")", ":", "return", "S_IFGITLINK", "ret", "=", "(", "stat", ".", "S_IFREG", "|", "420", ")", "ret", "|=", "(", "mode", "&", "73", ")", "return", "ret" ]
cleanup a mode value .
train
false
52,583
def load_environment(config_path, environment): env_plugins = dir_find(config_path, 'env.d', raise_if_missing=False) if (env_plugins is not False): _extra_config(user_defined_config=environment, base_dir=env_plugins) logger.debug('Loaded environment from {}'.format(config_path)) return environment
[ "def", "load_environment", "(", "config_path", ",", "environment", ")", ":", "env_plugins", "=", "dir_find", "(", "config_path", ",", "'env.d'", ",", "raise_if_missing", "=", "False", ")", "if", "(", "env_plugins", "is", "not", "False", ")", ":", "_extra_config", "(", "user_defined_config", "=", "environment", ",", "base_dir", "=", "env_plugins", ")", "logger", ".", "debug", "(", "'Loaded environment from {}'", ".", "format", "(", "config_path", ")", ")", "return", "environment" ]
create an environment dictionary from config files .
train
false
52,586
def _WasBuildFileModified(build_file, data, files, toplevel_dir): if (_ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files): if debug: print 'gyp file modified', build_file return True if (len(data[build_file]['included_files']) <= 1): return False for include_file in data[build_file]['included_files'][1:]: rel_include_file = _ToGypPath(gyp.common.UnrelativePath(include_file, build_file)) if (_ToLocalPath(toplevel_dir, rel_include_file) in files): if debug: print 'included gyp file modified, gyp_file=', build_file, 'included file=', rel_include_file return True return False
[ "def", "_WasBuildFileModified", "(", "build_file", ",", "data", ",", "files", ",", "toplevel_dir", ")", ":", "if", "(", "_ToLocalPath", "(", "toplevel_dir", ",", "_ToGypPath", "(", "build_file", ")", ")", "in", "files", ")", ":", "if", "debug", ":", "print", "'gyp file modified'", ",", "build_file", "return", "True", "if", "(", "len", "(", "data", "[", "build_file", "]", "[", "'included_files'", "]", ")", "<=", "1", ")", ":", "return", "False", "for", "include_file", "in", "data", "[", "build_file", "]", "[", "'included_files'", "]", "[", "1", ":", "]", ":", "rel_include_file", "=", "_ToGypPath", "(", "gyp", ".", "common", ".", "UnrelativePath", "(", "include_file", ",", "build_file", ")", ")", "if", "(", "_ToLocalPath", "(", "toplevel_dir", ",", "rel_include_file", ")", "in", "files", ")", ":", "if", "debug", ":", "print", "'included gyp file modified, gyp_file='", ",", "build_file", ",", "'included file='", ",", "rel_include_file", "return", "True", "return", "False" ]
returns true if the build file |build_file| is either in |files| or one of the files included by |build_file| is in |files| .
train
false
52,587
def namedChildren(parent, nodeName): return [n for n in parent.childNodes if (getattr(n, 'tagName', '') == nodeName)]
[ "def", "namedChildren", "(", "parent", ",", "nodeName", ")", ":", "return", "[", "n", "for", "n", "in", "parent", ".", "childNodes", "if", "(", "getattr", "(", "n", ",", "'tagName'", ",", "''", ")", "==", "nodeName", ")", "]" ]
namedchildren -> children of parent that have tagname == nodename .
train
false
52,588
@pytest.mark.skipif('not HAS_YAML') def test_regression_5604(): t = Table() t.meta = {'foo': (5 * u.km), 'foo2': u.s} t['bar'] = ([7] * u.km) out = StringIO() t.write(out, format='ascii.ecsv') assert ('!astropy.units.Unit' in out.getvalue()) assert ('!astropy.units.Quantity' in out.getvalue())
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "'not HAS_YAML'", ")", "def", "test_regression_5604", "(", ")", ":", "t", "=", "Table", "(", ")", "t", ".", "meta", "=", "{", "'foo'", ":", "(", "5", "*", "u", ".", "km", ")", ",", "'foo2'", ":", "u", ".", "s", "}", "t", "[", "'bar'", "]", "=", "(", "[", "7", "]", "*", "u", ".", "km", ")", "out", "=", "StringIO", "(", ")", "t", ".", "write", "(", "out", ",", "format", "=", "'ascii.ecsv'", ")", "assert", "(", "'!astropy.units.Unit'", "in", "out", ".", "getvalue", "(", ")", ")", "assert", "(", "'!astropy.units.Quantity'", "in", "out", ".", "getvalue", "(", ")", ")" ]
see URL for more .
train
false
52,591
def ensure_Image(): global Image if (Image is None): raise RuntimeError("You are trying to use PIL-dependent functionality but don't have PIL installed.")
[ "def", "ensure_Image", "(", ")", ":", "global", "Image", "if", "(", "Image", "is", "None", ")", ":", "raise", "RuntimeError", "(", "\"You are trying to use PIL-dependent functionality but don't have PIL installed.\"", ")" ]
makes sure image has been imported from pil .
train
false
52,593
def demo_high_accuracy_rules(): postag(num_sents=3000, min_acc=0.96, min_score=10)
[ "def", "demo_high_accuracy_rules", "(", ")", ":", "postag", "(", "num_sents", "=", "3000", ",", "min_acc", "=", "0.96", ",", "min_score", "=", "10", ")" ]
discard rules with low accuracy .
train
false
52,595
def cleanup_tempdir(the_dir): if os.path.exists(the_dir): shutil.rmtree(the_dir)
[ "def", "cleanup_tempdir", "(", "the_dir", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "the_dir", ")", ":", "shutil", ".", "rmtree", "(", "the_dir", ")" ]
called on process exit to remove a temp directory .
train
false
52,596
def build_paged_url(request): base = request.build_absolute_uri(request.path) items = [(k, v) for k in request.GET if (k != 'page') for v in request.GET.getlist(k) if v] qsa = urlencode(items) return (u'%s?%s' % (base, qsa))
[ "def", "build_paged_url", "(", "request", ")", ":", "base", "=", "request", ".", "build_absolute_uri", "(", "request", ".", "path", ")", "items", "=", "[", "(", "k", ",", "v", ")", "for", "k", "in", "request", ".", "GET", "if", "(", "k", "!=", "'page'", ")", "for", "v", "in", "request", ".", "GET", ".", "getlist", "(", "k", ")", "if", "v", "]", "qsa", "=", "urlencode", "(", "items", ")", "return", "(", "u'%s?%s'", "%", "(", "base", ",", "qsa", ")", ")" ]
build the url for the paginator .
train
false
52,597
def llite_fs(directory): for fs in os.listdir(directory): (fs_name, _, fs_id) = fs.partition('-') (yield fs_name)
[ "def", "llite_fs", "(", "directory", ")", ":", "for", "fs", "in", "os", ".", "listdir", "(", "directory", ")", ":", "(", "fs_name", ",", "_", ",", "fs_id", ")", "=", "fs", ".", "partition", "(", "'-'", ")", "(", "yield", "fs_name", ")" ]
return fs names based on folder names in llite directory .
train
false
52,598
def _expand_authorized_keys_path(path, user, home): converted_path = '' had_escape = False for char in path: if had_escape: had_escape = False if (char == '%'): converted_path += '%' elif (char == 'u'): converted_path += user elif (char == 'h'): converted_path += home else: error = 'AuthorizedKeysFile path: unknown token character "%{0}"'.format(char) raise CommandExecutionError(error) continue elif (char == '%'): had_escape = True else: converted_path += char if had_escape: error = "AuthorizedKeysFile path: Last character can't be escape character" raise CommandExecutionError(error) return converted_path
[ "def", "_expand_authorized_keys_path", "(", "path", ",", "user", ",", "home", ")", ":", "converted_path", "=", "''", "had_escape", "=", "False", "for", "char", "in", "path", ":", "if", "had_escape", ":", "had_escape", "=", "False", "if", "(", "char", "==", "'%'", ")", ":", "converted_path", "+=", "'%'", "elif", "(", "char", "==", "'u'", ")", ":", "converted_path", "+=", "user", "elif", "(", "char", "==", "'h'", ")", ":", "converted_path", "+=", "home", "else", ":", "error", "=", "'AuthorizedKeysFile path: unknown token character \"%{0}\"'", ".", "format", "(", "char", ")", "raise", "CommandExecutionError", "(", "error", ")", "continue", "elif", "(", "char", "==", "'%'", ")", ":", "had_escape", "=", "True", "else", ":", "converted_path", "+=", "char", "if", "had_escape", ":", "error", "=", "\"AuthorizedKeysFile path: Last character can't be escape character\"", "raise", "CommandExecutionError", "(", "error", ")", "return", "converted_path" ]
expand the authorizedkeysfile expression .
train
true
52,599
def endswith_cr(line): return line.endswith(('\r' if isinstance(line, str) else '\r'))
[ "def", "endswith_cr", "(", "line", ")", ":", "return", "line", ".", "endswith", "(", "(", "'\\r'", "if", "isinstance", "(", "line", ",", "str", ")", "else", "'\\r'", ")", ")" ]
return true if line ends with .
train
false
52,602
def test_SAMPClient(): proxy = SAMPHubProxy() SAMPClient(proxy)
[ "def", "test_SAMPClient", "(", ")", ":", "proxy", "=", "SAMPHubProxy", "(", ")", "SAMPClient", "(", "proxy", ")" ]
test that sampclient can be instantiated .
train
false
52,603
def find_fork_top_parent(pid): while True: ppid = find_parent_pid(pid) if (ppid and (ppid != pid) and pid_invoked_with_cmdline(ppid, get_cmdline_from_pid(pid))): pid = ppid else: return pid
[ "def", "find_fork_top_parent", "(", "pid", ")", ":", "while", "True", ":", "ppid", "=", "find_parent_pid", "(", "pid", ")", "if", "(", "ppid", "and", "(", "ppid", "!=", "pid", ")", "and", "pid_invoked_with_cmdline", "(", "ppid", ",", "get_cmdline_from_pid", "(", "pid", ")", ")", ")", ":", "pid", "=", "ppid", "else", ":", "return", "pid" ]
retrieve the pid of the top parent of the given pid through a fork .
train
false
52,604
def unmount_partition(device): p = partition(job=None, device=device) p.unmount(record=False)
[ "def", "unmount_partition", "(", "device", ")", ":", "p", "=", "partition", "(", "job", "=", "None", ",", "device", "=", "device", ")", "p", ".", "unmount", "(", "record", "=", "False", ")" ]
unmount a mounted partition .
train
false
52,605
def _getOneModelInfo(nupicModelID): return _iterModels([nupicModelID]).next()
[ "def", "_getOneModelInfo", "(", "nupicModelID", ")", ":", "return", "_iterModels", "(", "[", "nupicModelID", "]", ")", ".", "next", "(", ")" ]
a convenience function that retrieves inforamtion about a single model see also: _itermodels() parameters: nupicmodelid: nupic modelid retval: _nupicmodelinfo instance for the given nupicmodelid .
train
false
52,609
def print_summary(junit_results, runner_name='ROSUNIT'): buff = StringIO() buff.write(((('[%s]' % runner_name) + ('-' * 71)) + '\n\n')) for tc_result in junit_results.test_case_results: buff.write(tc_result.description) buff.write('\nSUMMARY\n') if ((junit_results.num_errors + junit_results.num_failures) == 0): buff.write('\x1b[32m * RESULT: SUCCESS\x1b[0m\n') else: buff.write('\x1b[1;31m * RESULT: FAIL\x1b[0m\n') buff.write((' * TESTS: %s\n' % junit_results.num_tests)) num_errors = junit_results.num_errors if num_errors: buff.write(('\x1b[1;31m * ERRORS: %s\x1b[0m\n' % num_errors)) else: buff.write(' * ERRORS: 0\n') num_failures = junit_results.num_failures if num_failures: buff.write(('\x1b[1;31m * FAILURES: %s\x1b[0m\n' % num_failures)) else: buff.write(' * FAILURES: 0\n') print(buff.getvalue())
[ "def", "print_summary", "(", "junit_results", ",", "runner_name", "=", "'ROSUNIT'", ")", ":", "buff", "=", "StringIO", "(", ")", "buff", ".", "write", "(", "(", "(", "(", "'[%s]'", "%", "runner_name", ")", "+", "(", "'-'", "*", "71", ")", ")", "+", "'\\n\\n'", ")", ")", "for", "tc_result", "in", "junit_results", ".", "test_case_results", ":", "buff", ".", "write", "(", "tc_result", ".", "description", ")", "buff", ".", "write", "(", "'\\nSUMMARY\\n'", ")", "if", "(", "(", "junit_results", ".", "num_errors", "+", "junit_results", ".", "num_failures", ")", "==", "0", ")", ":", "buff", ".", "write", "(", "'\\x1b[32m * RESULT: SUCCESS\\x1b[0m\\n'", ")", "else", ":", "buff", ".", "write", "(", "'\\x1b[1;31m * RESULT: FAIL\\x1b[0m\\n'", ")", "buff", ".", "write", "(", "(", "' * TESTS: %s\\n'", "%", "junit_results", ".", "num_tests", ")", ")", "num_errors", "=", "junit_results", ".", "num_errors", "if", "num_errors", ":", "buff", ".", "write", "(", "(", "'\\x1b[1;31m * ERRORS: %s\\x1b[0m\\n'", "%", "num_errors", ")", ")", "else", ":", "buff", ".", "write", "(", "' * ERRORS: 0\\n'", ")", "num_failures", "=", "junit_results", ".", "num_failures", "if", "num_failures", ":", "buff", ".", "write", "(", "(", "'\\x1b[1;31m * FAILURES: %s\\x1b[0m\\n'", "%", "num_failures", ")", ")", "else", ":", "buff", ".", "write", "(", "' * FAILURES: 0\\n'", ")", "print", "(", "buff", ".", "getvalue", "(", ")", ")" ]
print summary of junitxml results to stdout .
train
false
52,610
def kruskal(*args): output = argstoarray(*args) ranks = ma.masked_equal(rankdata(output, use_missing=False), 0) sumrk = ranks.sum((-1)) ngrp = ranks.count((-1)) ntot = ranks.count() H = (((12.0 / (ntot * (ntot + 1))) * ((sumrk ** 2) / ngrp).sum()) - (3 * (ntot + 1))) ties = count_tied_groups(ranks) T = (1.0 - (np.sum(((v * ((k ** 3) - k)) for (k, v) in iteritems(ties))) / float(((ntot ** 3) - ntot)))) if (T == 0): raise ValueError('All numbers are identical in kruskal') H /= T df = (len(output) - 1) prob = distributions.chi2.sf(H, df) return KruskalResult(H, prob)
[ "def", "kruskal", "(", "*", "args", ")", ":", "output", "=", "argstoarray", "(", "*", "args", ")", "ranks", "=", "ma", ".", "masked_equal", "(", "rankdata", "(", "output", ",", "use_missing", "=", "False", ")", ",", "0", ")", "sumrk", "=", "ranks", ".", "sum", "(", "(", "-", "1", ")", ")", "ngrp", "=", "ranks", ".", "count", "(", "(", "-", "1", ")", ")", "ntot", "=", "ranks", ".", "count", "(", ")", "H", "=", "(", "(", "(", "12.0", "/", "(", "ntot", "*", "(", "ntot", "+", "1", ")", ")", ")", "*", "(", "(", "sumrk", "**", "2", ")", "/", "ngrp", ")", ".", "sum", "(", ")", ")", "-", "(", "3", "*", "(", "ntot", "+", "1", ")", ")", ")", "ties", "=", "count_tied_groups", "(", "ranks", ")", "T", "=", "(", "1.0", "-", "(", "np", ".", "sum", "(", "(", "(", "v", "*", "(", "(", "k", "**", "3", ")", "-", "k", ")", ")", "for", "(", "k", ",", "v", ")", "in", "iteritems", "(", "ties", ")", ")", ")", "/", "float", "(", "(", "(", "ntot", "**", "3", ")", "-", "ntot", ")", ")", ")", ")", "if", "(", "T", "==", "0", ")", ":", "raise", "ValueError", "(", "'All numbers are identical in kruskal'", ")", "H", "/=", "T", "df", "=", "(", "len", "(", "output", ")", "-", "1", ")", "prob", "=", "distributions", ".", "chi2", ".", "sf", "(", "H", ",", "df", ")", "return", "KruskalResult", "(", "H", ",", "prob", ")" ]
compute the kruskal-wallis h-test for independent samples parameters sample1 .
train
false
52,612
def _maybe_download_cifar10(path): if (not os.path.exists(path)): os.makedirs(path) filepath = os.path.join(path, CIFAR10_FILE) if (not os.path.exists(filepath)): print('Downloading CIFAR10 dataset to {}'.format(filepath)) url = os.path.join(CIFAR10_URL, CIFAR10_FILE) (filepath, _) = urllib.request.urlretrieve(url, filepath) statinfo = os.stat(filepath) print('Successfully downloaded {} bytes'.format(statinfo.st_size)) tarfile.open(filepath, 'r:gz').extractall(path)
[ "def", "_maybe_download_cifar10", "(", "path", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ")", ":", "os", ".", "makedirs", "(", "path", ")", "filepath", "=", "os", ".", "path", ".", "join", "(", "path", ",", "CIFAR10_FILE", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "filepath", ")", ")", ":", "print", "(", "'Downloading CIFAR10 dataset to {}'", ".", "format", "(", "filepath", ")", ")", "url", "=", "os", ".", "path", ".", "join", "(", "CIFAR10_URL", ",", "CIFAR10_FILE", ")", "(", "filepath", ",", "_", ")", "=", "urllib", ".", "request", ".", "urlretrieve", "(", "url", ",", "filepath", ")", "statinfo", "=", "os", ".", "stat", "(", "filepath", ")", "print", "(", "'Successfully downloaded {} bytes'", ".", "format", "(", "statinfo", ".", "st_size", ")", ")", "tarfile", ".", "open", "(", "filepath", ",", "'r:gz'", ")", ".", "extractall", "(", "path", ")" ]
download and extract the tarball from alexs website .
train
false
52,614
def bpf_select(fds_list, timeout=None): bpf_scks_buffered = list() select_fds = list() for tmp_fd in fds_list: if isBPFSocket(tmp_fd): if tmp_fd.buffered_frames(): bpf_scks_buffered.append(tmp_fd) continue select_fds.append(tmp_fd) if len(select_fds): if (timeout is None): timeout = 0.05 (ready_list, _, _) = select(select_fds, [], [], timeout) return (bpf_scks_buffered + ready_list) else: return bpf_scks_buffered
[ "def", "bpf_select", "(", "fds_list", ",", "timeout", "=", "None", ")", ":", "bpf_scks_buffered", "=", "list", "(", ")", "select_fds", "=", "list", "(", ")", "for", "tmp_fd", "in", "fds_list", ":", "if", "isBPFSocket", "(", "tmp_fd", ")", ":", "if", "tmp_fd", ".", "buffered_frames", "(", ")", ":", "bpf_scks_buffered", ".", "append", "(", "tmp_fd", ")", "continue", "select_fds", ".", "append", "(", "tmp_fd", ")", "if", "len", "(", "select_fds", ")", ":", "if", "(", "timeout", "is", "None", ")", ":", "timeout", "=", "0.05", "(", "ready_list", ",", "_", ",", "_", ")", "=", "select", "(", "select_fds", ",", "[", "]", ",", "[", "]", ",", "timeout", ")", "return", "(", "bpf_scks_buffered", "+", "ready_list", ")", "else", ":", "return", "bpf_scks_buffered" ]
a call to recv() can return several frames .
train
true
52,616
def souptest_sniff_node(node): if (node.tag is lxml.etree.Comment): if (node.text.strip() not in {'SC_ON', 'SC_OFF'}): raise SoupUnexpectedCommentError(node) elif isinstance(node.tag, basestring): tag_name = node.tag if (tag_name not in allowed_tags): raise SoupUnsupportedTagError(tag_name) for (attr, val) in node.items(): if (attr not in allowed_tags[tag_name]): raise SoupUnsupportedAttrError(attr) if ((tag_name == 'a') and (attr == 'href')): lv = val.lower() if (not lv.startswith(valid_link_schemes)): raise SoupUnsupportedSchemeError(val) parsed_url = urlparse.urlparse(lv) if (parsed_url.hostname and (len(parsed_url.hostname) > 255)): raise SoupDetectedCrasherError(parsed_url.hostname) if ('%00' in urllib.unquote(parsed_url.path)): raise SoupDetectedCrasherError(lv) else: raise SoupUnsupportedNodeError(node)
[ "def", "souptest_sniff_node", "(", "node", ")", ":", "if", "(", "node", ".", "tag", "is", "lxml", ".", "etree", ".", "Comment", ")", ":", "if", "(", "node", ".", "text", ".", "strip", "(", ")", "not", "in", "{", "'SC_ON'", ",", "'SC_OFF'", "}", ")", ":", "raise", "SoupUnexpectedCommentError", "(", "node", ")", "elif", "isinstance", "(", "node", ".", "tag", ",", "basestring", ")", ":", "tag_name", "=", "node", ".", "tag", "if", "(", "tag_name", "not", "in", "allowed_tags", ")", ":", "raise", "SoupUnsupportedTagError", "(", "tag_name", ")", "for", "(", "attr", ",", "val", ")", "in", "node", ".", "items", "(", ")", ":", "if", "(", "attr", "not", "in", "allowed_tags", "[", "tag_name", "]", ")", ":", "raise", "SoupUnsupportedAttrError", "(", "attr", ")", "if", "(", "(", "tag_name", "==", "'a'", ")", "and", "(", "attr", "==", "'href'", ")", ")", ":", "lv", "=", "val", ".", "lower", "(", ")", "if", "(", "not", "lv", ".", "startswith", "(", "valid_link_schemes", ")", ")", ":", "raise", "SoupUnsupportedSchemeError", "(", "val", ")", "parsed_url", "=", "urlparse", ".", "urlparse", "(", "lv", ")", "if", "(", "parsed_url", ".", "hostname", "and", "(", "len", "(", "parsed_url", ".", "hostname", ")", ">", "255", ")", ")", ":", "raise", "SoupDetectedCrasherError", "(", "parsed_url", ".", "hostname", ")", "if", "(", "'%00'", "in", "urllib", ".", "unquote", "(", "parsed_url", ".", "path", ")", ")", ":", "raise", "SoupDetectedCrasherError", "(", "lv", ")", "else", ":", "raise", "SoupUnsupportedNodeError", "(", "node", ")" ]
check that a node from an (x)html document passes the sniff test .
train
false
52,617
def datetime_ceil(dateval): if (dateval.microsecond > 0): return (dateval + timedelta(seconds=1, microseconds=(- dateval.microsecond))) return dateval
[ "def", "datetime_ceil", "(", "dateval", ")", ":", "if", "(", "dateval", ".", "microsecond", ">", "0", ")", ":", "return", "(", "dateval", "+", "timedelta", "(", "seconds", "=", "1", ",", "microseconds", "=", "(", "-", "dateval", ".", "microsecond", ")", ")", ")", "return", "dateval" ]
rounds the given datetime object upwards .
train
false
52,618
def set_database(function): def wrapper(*args, **kwargs): language = kwargs.get('language', 'en') path = kwargs.pop('database_path', None) if (not path): path = CONTENT_DATABASE_PATH.format(channel=kwargs.get('channel', CHANNEL), language=language) db = SqliteDatabase(path, pragmas=settings.CONTENT_DB_SQLITE_PRAGMAS) kwargs['db'] = db db.connect() with Using(db, [Item, AssessmentItem]): try: output = function(*args, **kwargs) except DoesNotExist: output = None except OperationalError: logging.error('No content database file found') raise db.close() return output return wrapper
[ "def", "set_database", "(", "function", ")", ":", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "language", "=", "kwargs", ".", "get", "(", "'language'", ",", "'en'", ")", "path", "=", "kwargs", ".", "pop", "(", "'database_path'", ",", "None", ")", "if", "(", "not", "path", ")", ":", "path", "=", "CONTENT_DATABASE_PATH", ".", "format", "(", "channel", "=", "kwargs", ".", "get", "(", "'channel'", ",", "CHANNEL", ")", ",", "language", "=", "language", ")", "db", "=", "SqliteDatabase", "(", "path", ",", "pragmas", "=", "settings", ".", "CONTENT_DB_SQLITE_PRAGMAS", ")", "kwargs", "[", "'db'", "]", "=", "db", "db", ".", "connect", "(", ")", "with", "Using", "(", "db", ",", "[", "Item", ",", "AssessmentItem", "]", ")", ":", "try", ":", "output", "=", "function", "(", "*", "args", ",", "**", "kwargs", ")", "except", "DoesNotExist", ":", "output", "=", "None", "except", "OperationalError", ":", "logging", ".", "error", "(", "'No content database file found'", ")", "raise", "db", ".", "close", "(", ")", "return", "output", "return", "wrapper" ]
sets the appropriate database for the ensuing model interactions .
train
false
52,619
def getNumTestPatterns(short=0): if (short == 0): LOGGER.info('Running short tests') numPatterns = numpy.random.randint(300, 600) numClasses = numpy.random.randint(50, 150) elif (short == 1): LOGGER.info('\nRunning medium tests') numPatterns = numpy.random.randint(500, 1500) numClasses = numpy.random.randint(50, 150) else: LOGGER.info('\nRunning long tests') numPatterns = numpy.random.randint(500, 3000) numClasses = numpy.random.randint(30, 1000) LOGGER.info('number of patterns is %s', numPatterns) LOGGER.info('number of classes is %s', numClasses) return (numPatterns, numClasses)
[ "def", "getNumTestPatterns", "(", "short", "=", "0", ")", ":", "if", "(", "short", "==", "0", ")", ":", "LOGGER", ".", "info", "(", "'Running short tests'", ")", "numPatterns", "=", "numpy", ".", "random", ".", "randint", "(", "300", ",", "600", ")", "numClasses", "=", "numpy", ".", "random", ".", "randint", "(", "50", ",", "150", ")", "elif", "(", "short", "==", "1", ")", ":", "LOGGER", ".", "info", "(", "'\\nRunning medium tests'", ")", "numPatterns", "=", "numpy", ".", "random", ".", "randint", "(", "500", ",", "1500", ")", "numClasses", "=", "numpy", ".", "random", ".", "randint", "(", "50", ",", "150", ")", "else", ":", "LOGGER", ".", "info", "(", "'\\nRunning long tests'", ")", "numPatterns", "=", "numpy", ".", "random", ".", "randint", "(", "500", ",", "3000", ")", "numClasses", "=", "numpy", ".", "random", ".", "randint", "(", "30", ",", "1000", ")", "LOGGER", ".", "info", "(", "'number of patterns is %s'", ",", "numPatterns", ")", "LOGGER", ".", "info", "(", "'number of classes is %s'", ",", "numClasses", ")", "return", "(", "numPatterns", ",", "numClasses", ")" ]
return the number of patterns and classes the test should use .
train
false
52,620
def _get_date_time_mgr(host_reference): return host_reference.configManager.dateTimeSystem
[ "def", "_get_date_time_mgr", "(", "host_reference", ")", ":", "return", "host_reference", ".", "configManager", ".", "dateTimeSystem" ]
helper function that returns a datetimemanager object .
train
false
52,621
def linear_momentum(frame, *body): if (not isinstance(frame, ReferenceFrame)): raise TypeError('Please specify a valid ReferenceFrame') else: linear_momentum_sys = Vector(0) for e in body: if isinstance(e, (RigidBody, Particle)): linear_momentum_sys += e.linear_momentum(frame) else: raise TypeError('*body must have only Particle or RigidBody') return linear_momentum_sys
[ "def", "linear_momentum", "(", "frame", ",", "*", "body", ")", ":", "if", "(", "not", "isinstance", "(", "frame", ",", "ReferenceFrame", ")", ")", ":", "raise", "TypeError", "(", "'Please specify a valid ReferenceFrame'", ")", "else", ":", "linear_momentum_sys", "=", "Vector", "(", "0", ")", "for", "e", "in", "body", ":", "if", "isinstance", "(", "e", ",", "(", "RigidBody", ",", "Particle", ")", ")", ":", "linear_momentum_sys", "+=", "e", ".", "linear_momentum", "(", "frame", ")", "else", ":", "raise", "TypeError", "(", "'*body must have only Particle or RigidBody'", ")", "return", "linear_momentum_sys" ]
linear momentum of the system .
train
false
52,622
def get_template_setting(template_key, default=None): templates_var = getattr(settings, u'TEMPLATES', None) if ((templates_var is not None) and (template_key in templates_var[0])): return templates_var[0][template_key] if (template_key == u'DIRS'): pre18_template_key = (u'TEMPLATES_%s' % template_key) value = getattr(settings, pre18_template_key, default) return value return default
[ "def", "get_template_setting", "(", "template_key", ",", "default", "=", "None", ")", ":", "templates_var", "=", "getattr", "(", "settings", ",", "u'TEMPLATES'", ",", "None", ")", "if", "(", "(", "templates_var", "is", "not", "None", ")", "and", "(", "template_key", "in", "templates_var", "[", "0", "]", ")", ")", ":", "return", "templates_var", "[", "0", "]", "[", "template_key", "]", "if", "(", "template_key", "==", "u'DIRS'", ")", ":", "pre18_template_key", "=", "(", "u'TEMPLATES_%s'", "%", "template_key", ")", "value", "=", "getattr", "(", "settings", ",", "pre18_template_key", ",", "default", ")", "return", "value", "return", "default" ]
read template settings pre and post django 1 .
train
false
52,623
def read_metadata(fd): metadata = '' key = 0 try: while True: metadata += getxattr(fd, ('%s%s' % (METADATA_KEY, (key or '')))) key += 1 except IOError: pass return pickle.loads(metadata)
[ "def", "read_metadata", "(", "fd", ")", ":", "metadata", "=", "''", "key", "=", "0", "try", ":", "while", "True", ":", "metadata", "+=", "getxattr", "(", "fd", ",", "(", "'%s%s'", "%", "(", "METADATA_KEY", ",", "(", "key", "or", "''", ")", ")", ")", ")", "key", "+=", "1", "except", "IOError", ":", "pass", "return", "pickle", ".", "loads", "(", "metadata", ")" ]
helper function to read the pickled metadata from an object file .
train
false
52,625
def write_combined_fasta(fasta_name_to_sample_id, fasta_files, output_dir='.', counter=0): combined_file_out = open(join((output_dir + '/'), 'combined_seqs.fna'), 'w') for curr_fasta in fasta_files: for (label, seq) in parse_fasta(open(curr_fasta, 'U')): combined_file_out.write(('>%s_%d %s\n' % (fasta_name_to_sample_id[basename(curr_fasta)], counter, label))) combined_file_out.write(('%s\n' % seq)) counter += 1
[ "def", "write_combined_fasta", "(", "fasta_name_to_sample_id", ",", "fasta_files", ",", "output_dir", "=", "'.'", ",", "counter", "=", "0", ")", ":", "combined_file_out", "=", "open", "(", "join", "(", "(", "output_dir", "+", "'/'", ")", ",", "'combined_seqs.fna'", ")", ",", "'w'", ")", "for", "curr_fasta", "in", "fasta_files", ":", "for", "(", "label", ",", "seq", ")", "in", "parse_fasta", "(", "open", "(", "curr_fasta", ",", "'U'", ")", ")", ":", "combined_file_out", ".", "write", "(", "(", "'>%s_%d %s\\n'", "%", "(", "fasta_name_to_sample_id", "[", "basename", "(", "curr_fasta", ")", "]", ",", "counter", ",", "label", ")", ")", ")", "combined_file_out", ".", "write", "(", "(", "'%s\\n'", "%", "seq", ")", ")", "counter", "+=", "1" ]
writes combined .
train
false
52,626
def _encode_url(url): data = bytes(QUrl.toPercentEncoding(url.toString(), ':/#?&+=@%*')) return data.decode('ascii')
[ "def", "_encode_url", "(", "url", ")", ":", "data", "=", "bytes", "(", "QUrl", ".", "toPercentEncoding", "(", "url", ".", "toString", "(", ")", ",", "':/#?&+=@%*'", ")", ")", "return", "data", ".", "decode", "(", "'ascii'", ")" ]
encode a qurl suitable to pass to qwebhistory .
train
false
52,628
def CFUNCTYPE(restype, *argtypes, **kw): flags = _FUNCFLAG_CDECL if kw.pop('use_errno', False): flags |= _FUNCFLAG_USE_ERRNO if kw.pop('use_last_error', False): flags |= _FUNCFLAG_USE_LASTERROR if kw: raise ValueError(('unexpected keyword argument(s) %s' % kw.keys())) try: return _c_functype_cache[(restype, argtypes, flags)] except KeyError: class CFunctionType(_CFuncPtr, ): _argtypes_ = argtypes _restype_ = restype _flags_ = flags _c_functype_cache[(restype, argtypes, flags)] = CFunctionType return CFunctionType
[ "def", "CFUNCTYPE", "(", "restype", ",", "*", "argtypes", ",", "**", "kw", ")", ":", "flags", "=", "_FUNCFLAG_CDECL", "if", "kw", ".", "pop", "(", "'use_errno'", ",", "False", ")", ":", "flags", "|=", "_FUNCFLAG_USE_ERRNO", "if", "kw", ".", "pop", "(", "'use_last_error'", ",", "False", ")", ":", "flags", "|=", "_FUNCFLAG_USE_LASTERROR", "if", "kw", ":", "raise", "ValueError", "(", "(", "'unexpected keyword argument(s) %s'", "%", "kw", ".", "keys", "(", ")", ")", ")", "try", ":", "return", "_c_functype_cache", "[", "(", "restype", ",", "argtypes", ",", "flags", ")", "]", "except", "KeyError", ":", "class", "CFunctionType", "(", "_CFuncPtr", ",", ")", ":", "_argtypes_", "=", "argtypes", "_restype_", "=", "restype", "_flags_", "=", "flags", "_c_functype_cache", "[", "(", "restype", ",", "argtypes", ",", "flags", ")", "]", "=", "CFunctionType", "return", "CFunctionType" ]
cfunctype -> function prototype .
train
false
52,629
def in2out(*local_opts, **kwargs): name = (kwargs and kwargs.pop('name', None)) if (len(local_opts) > 1): local_opts = LocalOptGroup(*local_opts) else: (local_opts,) = local_opts if (not name): name = local_opts.__name__ ret = TopoOptimizer(local_opts, order='in_to_out', failure_callback=TopoOptimizer.warn_inplace, **kwargs) if name: ret.__name__ = name return ret
[ "def", "in2out", "(", "*", "local_opts", ",", "**", "kwargs", ")", ":", "name", "=", "(", "kwargs", "and", "kwargs", ".", "pop", "(", "'name'", ",", "None", ")", ")", "if", "(", "len", "(", "local_opts", ")", ">", "1", ")", ":", "local_opts", "=", "LocalOptGroup", "(", "*", "local_opts", ")", "else", ":", "(", "local_opts", ",", ")", "=", "local_opts", "if", "(", "not", "name", ")", ":", "name", "=", "local_opts", ".", "__name__", "ret", "=", "TopoOptimizer", "(", "local_opts", ",", "order", "=", "'in_to_out'", ",", "failure_callback", "=", "TopoOptimizer", ".", "warn_inplace", ",", "**", "kwargs", ")", "if", "name", ":", "ret", ".", "__name__", "=", "name", "return", "ret" ]
uses the topooptimizer from the input nodes to output nodes of the graph .
train
false
52,630
@content_type('application/json') def json_camelcase(content, **kwargs): return json(_camelcase(content), **kwargs)
[ "@", "content_type", "(", "'application/json'", ")", "def", "json_camelcase", "(", "content", ",", "**", "kwargs", ")", ":", "return", "json", "(", "_camelcase", "(", "content", ")", ",", "**", "kwargs", ")" ]
json with all keys camelcased .
train
false
52,631
def broker_tops(days=5, retry_count=3, pause=0.001): if (ct._check_lhb_input(days) is True): ct._write_head() df = _broker_tops(days, pageNo=1, retry_count=retry_count, pause=pause) return df
[ "def", "broker_tops", "(", "days", "=", "5", ",", "retry_count", "=", "3", ",", "pause", "=", "0.001", ")", ":", "if", "(", "ct", ".", "_check_lhb_input", "(", "days", ")", "is", "True", ")", ":", "ct", ".", "_write_head", "(", ")", "df", "=", "_broker_tops", "(", "days", ",", "pageNo", "=", "1", ",", "retry_count", "=", "retry_count", ",", "pause", "=", "pause", ")", "return", "df" ]
parameters days:int 天数,统计n天以来上榜次数,默认为5天,其余是10、30、60 retry_count : int .
train
false
52,632
def model_mapper_factory(mapper_class, field_map): model_mapper = mapper_class(field_map) model_mapper.register_field_mapper('ForeignKey', RelationMapper(model_mapper)) model_mapper.register_field_mapper('OneToOneField', RelationMapper(model_mapper)) model_mapper.register_field_mapper('DecimalField', DecimalMapper(primitive.Decimal)) return model_mapper
[ "def", "model_mapper_factory", "(", "mapper_class", ",", "field_map", ")", ":", "model_mapper", "=", "mapper_class", "(", "field_map", ")", "model_mapper", ".", "register_field_mapper", "(", "'ForeignKey'", ",", "RelationMapper", "(", "model_mapper", ")", ")", "model_mapper", ".", "register_field_mapper", "(", "'OneToOneField'", ",", "RelationMapper", "(", "model_mapper", ")", ")", "model_mapper", ".", "register_field_mapper", "(", "'DecimalField'", ",", "DecimalMapper", "(", "primitive", ".", "Decimal", ")", ")", "return", "model_mapper" ]
factory for model mappers .
train
false
52,633
def lean_projection(expr): fields = expr.fields return _lean(expr, fields=fields)[0]
[ "def", "lean_projection", "(", "expr", ")", ":", "fields", "=", "expr", ".", "fields", "return", "_lean", "(", "expr", ",", "fields", "=", "fields", ")", "[", "0", "]" ]
insert projections to keep dataset as thin as possible .
train
false
52,634
def test_rgb_to_hsl_part_16(): assert (rgb_to_hsl(0, 51, 51) == (180, 100, 10)) assert (rgb_to_hsl(0, 102, 102) == (180, 100, 20)) assert (rgb_to_hsl(0, 153, 153) == (180, 100, 30)) assert (rgb_to_hsl(0, 204, 204) == (180, 100, 40)) assert (rgb_to_hsl(0, 255, 255) == (180, 100, 50)) assert (rgb_to_hsl(51, 255, 255) == (180, 100, 60)) assert (rgb_to_hsl(102, 255, 255) == (180, 100, 70)) assert (rgb_to_hsl(153, 255, 255) == (180, 100, 80)) assert (rgb_to_hsl(204, 255, 255) == (180, 100, 90))
[ "def", "test_rgb_to_hsl_part_16", "(", ")", ":", "assert", "(", "rgb_to_hsl", "(", "0", ",", "51", ",", "51", ")", "==", "(", "180", ",", "100", ",", "10", ")", ")", "assert", "(", "rgb_to_hsl", "(", "0", ",", "102", ",", "102", ")", "==", "(", "180", ",", "100", ",", "20", ")", ")", "assert", "(", "rgb_to_hsl", "(", "0", ",", "153", ",", "153", ")", "==", "(", "180", ",", "100", ",", "30", ")", ")", "assert", "(", "rgb_to_hsl", "(", "0", ",", "204", ",", "204", ")", "==", "(", "180", ",", "100", ",", "40", ")", ")", "assert", "(", "rgb_to_hsl", "(", "0", ",", "255", ",", "255", ")", "==", "(", "180", ",", "100", ",", "50", ")", ")", "assert", "(", "rgb_to_hsl", "(", "51", ",", "255", ",", "255", ")", "==", "(", "180", ",", "100", ",", "60", ")", ")", "assert", "(", "rgb_to_hsl", "(", "102", ",", "255", ",", "255", ")", "==", "(", "180", ",", "100", ",", "70", ")", ")", "assert", "(", "rgb_to_hsl", "(", "153", ",", "255", ",", "255", ")", "==", "(", "180", ",", "100", ",", "80", ")", ")", "assert", "(", "rgb_to_hsl", "(", "204", ",", "255", ",", "255", ")", "==", "(", "180", ",", "100", ",", "90", ")", ")" ]
test rgb to hsl color function .
train
false
52,635
def join_trigger(registry, xml_parent, data): jointrigger = XML.SubElement(xml_parent, 'join.JoinTrigger') joinProjectsText = ','.join(data.get('projects', [''])) XML.SubElement(jointrigger, 'joinProjects').text = joinProjectsText publishers = XML.SubElement(jointrigger, 'joinPublishers') for pub in data.get('publishers', []): for edited_node in create_publishers(registry, pub): publishers.append(edited_node) unstable = str(data.get('even-if-unstable', 'false')).lower() XML.SubElement(jointrigger, 'evenIfDownstreamUnstable').text = unstable
[ "def", "join_trigger", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "jointrigger", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'join.JoinTrigger'", ")", "joinProjectsText", "=", "','", ".", "join", "(", "data", ".", "get", "(", "'projects'", ",", "[", "''", "]", ")", ")", "XML", ".", "SubElement", "(", "jointrigger", ",", "'joinProjects'", ")", ".", "text", "=", "joinProjectsText", "publishers", "=", "XML", ".", "SubElement", "(", "jointrigger", ",", "'joinPublishers'", ")", "for", "pub", "in", "data", ".", "get", "(", "'publishers'", ",", "[", "]", ")", ":", "for", "edited_node", "in", "create_publishers", "(", "registry", ",", "pub", ")", ":", "publishers", ".", "append", "(", "edited_node", ")", "unstable", "=", "str", "(", "data", ".", "get", "(", "'even-if-unstable'", ",", "'false'", ")", ")", ".", "lower", "(", ")", "XML", ".", "SubElement", "(", "jointrigger", ",", "'evenIfDownstreamUnstable'", ")", ".", "text", "=", "unstable" ]
yaml: join-trigger trigger a job after all the immediate downstream jobs have completed .
train
false
52,636
def quota_class_create(context, class_name, resource, limit): return IMPL.quota_class_create(context, class_name, resource, limit)
[ "def", "quota_class_create", "(", "context", ",", "class_name", ",", "resource", ",", "limit", ")", ":", "return", "IMPL", ".", "quota_class_create", "(", "context", ",", "class_name", ",", "resource", ",", "limit", ")" ]
create a quota class for the given name and resource .
train
false
52,637
def generate_configuration(directory): conf = osp.join(get_module_source_path('spyder.utils.help'), 'conf.py') layout = osp.join(osp.join(CONFDIR_PATH, 'templates'), 'layout.html') os.makedirs(osp.join(directory, 'templates')) os.makedirs(osp.join(directory, 'static')) shutil.copy(conf, directory) shutil.copy(layout, osp.join(directory, 'templates')) open(osp.join(directory, '__init__.py'), 'w').write('') open(osp.join(directory, 'static', 'empty'), 'w').write('')
[ "def", "generate_configuration", "(", "directory", ")", ":", "conf", "=", "osp", ".", "join", "(", "get_module_source_path", "(", "'spyder.utils.help'", ")", ",", "'conf.py'", ")", "layout", "=", "osp", ".", "join", "(", "osp", ".", "join", "(", "CONFDIR_PATH", ",", "'templates'", ")", ",", "'layout.html'", ")", "os", ".", "makedirs", "(", "osp", ".", "join", "(", "directory", ",", "'templates'", ")", ")", "os", ".", "makedirs", "(", "osp", ".", "join", "(", "directory", ",", "'static'", ")", ")", "shutil", ".", "copy", "(", "conf", ",", "directory", ")", "shutil", ".", "copy", "(", "layout", ",", "osp", ".", "join", "(", "directory", ",", "'templates'", ")", ")", "open", "(", "osp", ".", "join", "(", "directory", ",", "'__init__.py'", ")", ",", "'w'", ")", ".", "write", "(", "''", ")", "open", "(", "osp", ".", "join", "(", "directory", ",", "'static'", ",", "'empty'", ")", ",", "'w'", ")", ".", "write", "(", "''", ")" ]
generates a sphinx configuration in directory .
train
true
52,638
def DNSServiceReconfirmRecord(flags=0, interfaceIndex=kDNSServiceInterfaceIndexAny, fullname=_NO_DEFAULT, rrtype=_NO_DEFAULT, rrclass=kDNSServiceClass_IN, rdata=_NO_DEFAULT): _NO_DEFAULT.check(fullname) _NO_DEFAULT.check(rrtype) _NO_DEFAULT.check(rdata) (rdlen, rdata) = _string_to_length_and_void_p(rdata) _global_lock.acquire() try: _DNSServiceReconfirmRecord(flags, interfaceIndex, fullname, rrtype, rrclass, rdlen, rdata) finally: _global_lock.release()
[ "def", "DNSServiceReconfirmRecord", "(", "flags", "=", "0", ",", "interfaceIndex", "=", "kDNSServiceInterfaceIndexAny", ",", "fullname", "=", "_NO_DEFAULT", ",", "rrtype", "=", "_NO_DEFAULT", ",", "rrclass", "=", "kDNSServiceClass_IN", ",", "rdata", "=", "_NO_DEFAULT", ")", ":", "_NO_DEFAULT", ".", "check", "(", "fullname", ")", "_NO_DEFAULT", ".", "check", "(", "rrtype", ")", "_NO_DEFAULT", ".", "check", "(", "rdata", ")", "(", "rdlen", ",", "rdata", ")", "=", "_string_to_length_and_void_p", "(", "rdata", ")", "_global_lock", ".", "acquire", "(", ")", "try", ":", "_DNSServiceReconfirmRecord", "(", "flags", ",", "interfaceIndex", ",", "fullname", ",", "rrtype", ",", "rrclass", ",", "rdlen", ",", "rdata", ")", "finally", ":", "_global_lock", ".", "release", "(", ")" ]
instruct the daemon to verify the validity of a resource record that appears to be out of date .
train
false
52,639
def _encode_regex(name, value, dummy0, dummy1): flags = value.flags if (flags == 0): return ((('\x0b' + name) + _make_c_string_check(value.pattern)) + '\x00') elif (flags == re.UNICODE): return ((('\x0b' + name) + _make_c_string_check(value.pattern)) + 'u\x00') else: sflags = '' if (flags & re.IGNORECASE): sflags += 'i' if (flags & re.LOCALE): sflags += 'l' if (flags & re.MULTILINE): sflags += 'm' if (flags & re.DOTALL): sflags += 's' if (flags & re.UNICODE): sflags += 'u' if (flags & re.VERBOSE): sflags += 'x' sflags += '\x00' return ((('\x0b' + name) + _make_c_string_check(value.pattern)) + sflags)
[ "def", "_encode_regex", "(", "name", ",", "value", ",", "dummy0", ",", "dummy1", ")", ":", "flags", "=", "value", ".", "flags", "if", "(", "flags", "==", "0", ")", ":", "return", "(", "(", "(", "'\\x0b'", "+", "name", ")", "+", "_make_c_string_check", "(", "value", ".", "pattern", ")", ")", "+", "'\\x00'", ")", "elif", "(", "flags", "==", "re", ".", "UNICODE", ")", ":", "return", "(", "(", "(", "'\\x0b'", "+", "name", ")", "+", "_make_c_string_check", "(", "value", ".", "pattern", ")", ")", "+", "'u\\x00'", ")", "else", ":", "sflags", "=", "''", "if", "(", "flags", "&", "re", ".", "IGNORECASE", ")", ":", "sflags", "+=", "'i'", "if", "(", "flags", "&", "re", ".", "LOCALE", ")", ":", "sflags", "+=", "'l'", "if", "(", "flags", "&", "re", ".", "MULTILINE", ")", ":", "sflags", "+=", "'m'", "if", "(", "flags", "&", "re", ".", "DOTALL", ")", ":", "sflags", "+=", "'s'", "if", "(", "flags", "&", "re", ".", "UNICODE", ")", ":", "sflags", "+=", "'u'", "if", "(", "flags", "&", "re", ".", "VERBOSE", ")", ":", "sflags", "+=", "'x'", "sflags", "+=", "'\\x00'", "return", "(", "(", "(", "'\\x0b'", "+", "name", ")", "+", "_make_c_string_check", "(", "value", ".", "pattern", ")", ")", "+", "sflags", ")" ]
encode a python regex or bson .
train
false
52,640
def register_logging_handler(callback): special_logging_handlers.append(callback)
[ "def", "register_logging_handler", "(", "callback", ")", ":", "special_logging_handlers", ".", "append", "(", "callback", ")" ]
registers a callback for log handling .
train
false
52,641
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
52,643
@disable_for_loaddata def ping_directories_handler(sender, **kwargs): entry = kwargs['instance'] if (entry.is_visible and settings.SAVE_PING_DIRECTORIES): for directory in settings.PING_DIRECTORIES: DirectoryPinger(directory, [entry])
[ "@", "disable_for_loaddata", "def", "ping_directories_handler", "(", "sender", ",", "**", "kwargs", ")", ":", "entry", "=", "kwargs", "[", "'instance'", "]", "if", "(", "entry", ".", "is_visible", "and", "settings", ".", "SAVE_PING_DIRECTORIES", ")", ":", "for", "directory", "in", "settings", ".", "PING_DIRECTORIES", ":", "DirectoryPinger", "(", "directory", ",", "[", "entry", "]", ")" ]
ping directories when an entry is saved .
train
true
52,644
def list_subscribe(t): (owner, slug) = get_slug() try: t.lists.subscribers.create(slug=slug, owner_screen_name=owner) printNicely(green('Done.')) except: debug_option() printNicely(light_magenta("I'm sorry you can not subscribe to this list."))
[ "def", "list_subscribe", "(", "t", ")", ":", "(", "owner", ",", "slug", ")", "=", "get_slug", "(", ")", "try", ":", "t", ".", "lists", ".", "subscribers", ".", "create", "(", "slug", "=", "slug", ",", "owner_screen_name", "=", "owner", ")", "printNicely", "(", "green", "(", "'Done.'", ")", ")", "except", ":", "debug_option", "(", ")", "printNicely", "(", "light_magenta", "(", "\"I'm sorry you can not subscribe to this list.\"", ")", ")" ]
subscribe to a list .
train
false
52,645
@require_authorized_admin @facility_from_request @render_to('coachreports/coach.html') def coach_reports(request, facility=None, zone_id=None): zone = get_object_or_None(Zone, pk=zone_id) if ((not zone) and settings.CENTRAL_SERVER): raise Http404('Zone not found.') if facility: facility_id = facility.id else: facility_id = None return {'facility_id': facility_id, 'zone_id': (zone.id if zone else None)}
[ "@", "require_authorized_admin", "@", "facility_from_request", "@", "render_to", "(", "'coachreports/coach.html'", ")", "def", "coach_reports", "(", "request", ",", "facility", "=", "None", ",", "zone_id", "=", "None", ")", ":", "zone", "=", "get_object_or_None", "(", "Zone", ",", "pk", "=", "zone_id", ")", "if", "(", "(", "not", "zone", ")", "and", "settings", ".", "CENTRAL_SERVER", ")", ":", "raise", "Http404", "(", "'Zone not found.'", ")", "if", "facility", ":", "facility_id", "=", "facility", ".", "id", "else", ":", "facility_id", "=", "None", "return", "{", "'facility_id'", ":", "facility_id", ",", "'zone_id'", ":", "(", "zone", ".", "id", "if", "zone", "else", "None", ")", "}" ]
landing page needs plotting context in order to generate the navbar .
train
false
52,646
def ensure_timestamp(func, argname, arg): try: return pd.Timestamp(arg) except ValueError as e: raise TypeError("{func}() couldn't convert argument {argname}={arg!r} to a pandas Timestamp.\nOriginal error was: {t}: {e}".format(func=_qualified_name(func), argname=argname, arg=arg, t=_qualified_name(type(e)), e=e))
[ "def", "ensure_timestamp", "(", "func", ",", "argname", ",", "arg", ")", ":", "try", ":", "return", "pd", ".", "Timestamp", "(", "arg", ")", "except", "ValueError", "as", "e", ":", "raise", "TypeError", "(", "\"{func}() couldn't convert argument {argname}={arg!r} to a pandas Timestamp.\\nOriginal error was: {t}: {e}\"", ".", "format", "(", "func", "=", "_qualified_name", "(", "func", ")", ",", "argname", "=", "argname", ",", "arg", "=", "arg", ",", "t", "=", "_qualified_name", "(", "type", "(", "e", ")", ")", ",", "e", "=", "e", ")", ")" ]
argument preprocessor that converts the input into a pandas timestamp object .
train
true
52,647
def get_connections_viewer(imgs, W1, W2): W2 = sort_layer2(W2) N1 = W1.shape[1] N = W2.shape[1] N = min(N, 100) count = get_elements_count(N, N1, W2) pv = create_connect_viewer(N, N1, imgs, count, W2) return pv
[ "def", "get_connections_viewer", "(", "imgs", ",", "W1", ",", "W2", ")", ":", "W2", "=", "sort_layer2", "(", "W2", ")", "N1", "=", "W1", ".", "shape", "[", "1", "]", "N", "=", "W2", ".", "shape", "[", "1", "]", "N", "=", "min", "(", "N", ",", "100", ")", "count", "=", "get_elements_count", "(", "N", ",", "N1", ",", "W2", ")", "pv", "=", "create_connect_viewer", "(", "N", ",", "N1", ",", "imgs", ",", "count", ",", "W2", ")", "return", "pv" ]
show connections between 2 hidden layers .
train
false
52,648
def upsert(context, data_dict): engine = _get_engine(data_dict) context['connection'] = engine.connect() timeout = context.get('query_timeout', _TIMEOUT) trans = context['connection'].begin() try: context['connection'].execute(u'SET LOCAL statement_timeout TO {0}'.format(timeout)) upsert_data(context, data_dict) trans.commit() return _unrename_json_field(data_dict) except IntegrityError as e: if (e.orig.pgcode == _PG_ERR_CODE['unique_violation']): raise ValidationError({'constraints': ['Cannot insert records or create index because of uniqueness constraint'], 'info': {'orig': str(e.orig), 'pgcode': e.orig.pgcode}}) raise except DataError as e: raise ValidationError({'data': e.message, 'info': {'orig': [str(e.orig)]}}) except DBAPIError as e: if (e.orig.pgcode == _PG_ERR_CODE['query_canceled']): raise ValidationError({'query': ['Query took too long']}) raise except Exception as e: trans.rollback() raise finally: context['connection'].close()
[ "def", "upsert", "(", "context", ",", "data_dict", ")", ":", "engine", "=", "_get_engine", "(", "data_dict", ")", "context", "[", "'connection'", "]", "=", "engine", ".", "connect", "(", ")", "timeout", "=", "context", ".", "get", "(", "'query_timeout'", ",", "_TIMEOUT", ")", "trans", "=", "context", "[", "'connection'", "]", ".", "begin", "(", ")", "try", ":", "context", "[", "'connection'", "]", ".", "execute", "(", "u'SET LOCAL statement_timeout TO {0}'", ".", "format", "(", "timeout", ")", ")", "upsert_data", "(", "context", ",", "data_dict", ")", "trans", ".", "commit", "(", ")", "return", "_unrename_json_field", "(", "data_dict", ")", "except", "IntegrityError", "as", "e", ":", "if", "(", "e", ".", "orig", ".", "pgcode", "==", "_PG_ERR_CODE", "[", "'unique_violation'", "]", ")", ":", "raise", "ValidationError", "(", "{", "'constraints'", ":", "[", "'Cannot insert records or create index because of uniqueness constraint'", "]", ",", "'info'", ":", "{", "'orig'", ":", "str", "(", "e", ".", "orig", ")", ",", "'pgcode'", ":", "e", ".", "orig", ".", "pgcode", "}", "}", ")", "raise", "except", "DataError", "as", "e", ":", "raise", "ValidationError", "(", "{", "'data'", ":", "e", ".", "message", ",", "'info'", ":", "{", "'orig'", ":", "[", "str", "(", "e", ".", "orig", ")", "]", "}", "}", ")", "except", "DBAPIError", "as", "e", ":", "if", "(", "e", ".", "orig", ".", "pgcode", "==", "_PG_ERR_CODE", "[", "'query_canceled'", "]", ")", ":", "raise", "ValidationError", "(", "{", "'query'", ":", "[", "'Query took too long'", "]", "}", ")", "raise", "except", "Exception", "as", "e", ":", "trans", ".", "rollback", "(", ")", "raise", "finally", ":", "context", "[", "'connection'", "]", ".", "close", "(", ")" ]
this method combines upsert insert and update on the datastore .
train
false
52,649
def getNumberOfBezierPoints(begin, end, xmlElement): numberOfBezierPoints = int(math.ceil((0.5 * evaluate.getSidesMinimumThreeBasedOnPrecision(abs((end - begin)), xmlElement)))) return evaluate.getEvaluatedIntDefault(numberOfBezierPoints, 'sides', xmlElement)
[ "def", "getNumberOfBezierPoints", "(", "begin", ",", "end", ",", "xmlElement", ")", ":", "numberOfBezierPoints", "=", "int", "(", "math", ".", "ceil", "(", "(", "0.5", "*", "evaluate", ".", "getSidesMinimumThreeBasedOnPrecision", "(", "abs", "(", "(", "end", "-", "begin", ")", ")", ",", "xmlElement", ")", ")", ")", ")", "return", "evaluate", ".", "getEvaluatedIntDefault", "(", "numberOfBezierPoints", ",", "'sides'", ",", "xmlElement", ")" ]
get the numberofbezierpoints .
train
false
52,653
def treeify(seq): ret = {} for path in seq: cur = ret for node in path: cur = cur.setdefault(node, {}) return ret
[ "def", "treeify", "(", "seq", ")", ":", "ret", "=", "{", "}", "for", "path", "in", "seq", ":", "cur", "=", "ret", "for", "node", "in", "path", ":", "cur", "=", "cur", ".", "setdefault", "(", "node", ",", "{", "}", ")", "return", "ret" ]
resolve message dependencies URL .
train
false
52,654
def from_string(s): return reduce((lambda a, b: ((a << 8) | b)), map(int, s.split('.')))
[ "def", "from_string", "(", "s", ")", ":", "return", "reduce", "(", "(", "lambda", "a", ",", "b", ":", "(", "(", "a", "<<", "8", ")", "|", "b", ")", ")", ",", "map", "(", "int", ",", "s", ".", "split", "(", "'.'", ")", ")", ")" ]
convert dotted ipv4 address to integer .
train
false
52,655
def _is_hidden_osx(path): file_stat = os.lstat(beets.util.syspath(path)) if (hasattr(file_stat, 'st_flags') and hasattr(stat, 'UF_HIDDEN')): return bool((file_stat.st_flags & stat.UF_HIDDEN)) else: return False
[ "def", "_is_hidden_osx", "(", "path", ")", ":", "file_stat", "=", "os", ".", "lstat", "(", "beets", ".", "util", ".", "syspath", "(", "path", ")", ")", "if", "(", "hasattr", "(", "file_stat", ",", "'st_flags'", ")", "and", "hasattr", "(", "stat", ",", "'UF_HIDDEN'", ")", ")", ":", "return", "bool", "(", "(", "file_stat", ".", "st_flags", "&", "stat", ".", "UF_HIDDEN", ")", ")", "else", ":", "return", "False" ]
return whether or not a file is hidden on os x .
train
false
52,656
@decorators.must_be_logged_in def prereg_draft_registrations(auth, **kwargs): campaign = kwargs.get('campaign', 'prereg') drafts = utils.drafts_for_user(auth.user, campaign) return {'draftRegistrations': [{'dateUpdated': iso8601format(draft.datetime_updated), 'dateInitiated': iso8601format(draft.datetime_initiated), 'node': {'title': draft.branched_from.title}, 'initiator': {'name': draft.initiator.fullname}, 'url': draft.branched_from.web_url_for('edit_draft_registration_page', draft_id=draft._id)} for draft in drafts]}
[ "@", "decorators", ".", "must_be_logged_in", "def", "prereg_draft_registrations", "(", "auth", ",", "**", "kwargs", ")", ":", "campaign", "=", "kwargs", ".", "get", "(", "'campaign'", ",", "'prereg'", ")", "drafts", "=", "utils", ".", "drafts_for_user", "(", "auth", ".", "user", ",", "campaign", ")", "return", "{", "'draftRegistrations'", ":", "[", "{", "'dateUpdated'", ":", "iso8601format", "(", "draft", ".", "datetime_updated", ")", ",", "'dateInitiated'", ":", "iso8601format", "(", "draft", ".", "datetime_initiated", ")", ",", "'node'", ":", "{", "'title'", ":", "draft", ".", "branched_from", ".", "title", "}", ",", "'initiator'", ":", "{", "'name'", ":", "draft", ".", "initiator", ".", "fullname", "}", ",", "'url'", ":", "draft", ".", "branched_from", ".", "web_url_for", "(", "'edit_draft_registration_page'", ",", "draft_id", "=", "draft", ".", "_id", ")", "}", "for", "draft", "in", "drafts", "]", "}" ]
api endpoint; returns prereg draft registrations the user can resume .
train
false
52,657
def get_formatd(r, formatd=None): if (formatd is None): formatd = dict() for (i, name) in enumerate(r.dtype.names): dt = r.dtype[name] format = formatd.get(name) if (format is None): format = defaultformatd.get(dt.type, FormatObj()) formatd[name] = format return formatd
[ "def", "get_formatd", "(", "r", ",", "formatd", "=", "None", ")", ":", "if", "(", "formatd", "is", "None", ")", ":", "formatd", "=", "dict", "(", ")", "for", "(", "i", ",", "name", ")", "in", "enumerate", "(", "r", ".", "dtype", ".", "names", ")", ":", "dt", "=", "r", ".", "dtype", "[", "name", "]", "format", "=", "formatd", ".", "get", "(", "name", ")", "if", "(", "format", "is", "None", ")", ":", "format", "=", "defaultformatd", ".", "get", "(", "dt", ".", "type", ",", "FormatObj", "(", ")", ")", "formatd", "[", "name", "]", "=", "format", "return", "formatd" ]
build a formatd guaranteed to have a key for every dtype name .
train
false
52,659
def test_scenario_aggregate_all_examples_blocks(): scenario = Scenario.from_string(OUTLINED_SCENARIO_WITH_MORE_THAN_ONE_EXAMPLES_BLOCK) assert_equals(scenario.outlines, [{'input_1': '20', 'input_2': '30', 'button': 'add', 'output': '50'}, {'input_1': '2', 'input_2': '5', 'button': 'add', 'output': '7'}, {'input_1': '0', 'input_2': '40', 'button': 'add', 'output': '40'}, {'input_1': '20', 'input_2': '33', 'button': 'add', 'output': '53'}, {'input_1': '12', 'input_2': '40', 'button': 'add', 'output': '52'}])
[ "def", "test_scenario_aggregate_all_examples_blocks", "(", ")", ":", "scenario", "=", "Scenario", ".", "from_string", "(", "OUTLINED_SCENARIO_WITH_MORE_THAN_ONE_EXAMPLES_BLOCK", ")", "assert_equals", "(", "scenario", ".", "outlines", ",", "[", "{", "'input_1'", ":", "'20'", ",", "'input_2'", ":", "'30'", ",", "'button'", ":", "'add'", ",", "'output'", ":", "'50'", "}", ",", "{", "'input_1'", ":", "'2'", ",", "'input_2'", ":", "'5'", ",", "'button'", ":", "'add'", ",", "'output'", ":", "'7'", "}", ",", "{", "'input_1'", ":", "'0'", ",", "'input_2'", ":", "'40'", ",", "'button'", ":", "'add'", ",", "'output'", ":", "'40'", "}", ",", "{", "'input_1'", ":", "'20'", ",", "'input_2'", ":", "'33'", ",", "'button'", ":", "'add'", ",", "'output'", ":", "'53'", "}", ",", "{", "'input_1'", ":", "'12'", ",", "'input_2'", ":", "'40'", ",", "'button'", ":", "'add'", ",", "'output'", ":", "'52'", "}", "]", ")" ]
all scenarios examples block should be translated to outlines .
train
false
52,660
def rewrite_file(filename): with open(filename, 'rU') as file_obj: content_lines = file_obj.read().split('\n') new_content = [] for line in content_lines: new_content.append(transform_line(line)) with open(filename, 'w') as file_obj: file_obj.write('\n'.join(new_content))
[ "def", "rewrite_file", "(", "filename", ")", ":", "with", "open", "(", "filename", ",", "'rU'", ")", "as", "file_obj", ":", "content_lines", "=", "file_obj", ".", "read", "(", ")", ".", "split", "(", "'\\n'", ")", "new_content", "=", "[", "]", "for", "line", "in", "content_lines", ":", "new_content", ".", "append", "(", "transform_line", "(", "line", ")", ")", "with", "open", "(", "filename", ",", "'w'", ")", "as", "file_obj", ":", "file_obj", ".", "write", "(", "'\\n'", ".", "join", "(", "new_content", ")", ")" ]
rewrites a given pb2 modules .
train
false
52,661
@requires_application() def test_arrow_reactive(): if (os.getenv('APPVEYOR', '').lower() == 'true'): raise SkipTest('AppVeyor has unknown failure') with TestingCanvas() as c: arrow = visuals.Arrow(pos=vertices, arrows=arrows, connect='segments', parent=c.scene) arrow.arrow_type = 'stealth' assert_image_approved(c.render(), 'visuals/arrow_reactive1.png') arrow.arrow_size = 20 assert_image_approved(c.render(), 'visuals/arrow_reactive2.png')
[ "@", "requires_application", "(", ")", "def", "test_arrow_reactive", "(", ")", ":", "if", "(", "os", ".", "getenv", "(", "'APPVEYOR'", ",", "''", ")", ".", "lower", "(", ")", "==", "'true'", ")", ":", "raise", "SkipTest", "(", "'AppVeyor has unknown failure'", ")", "with", "TestingCanvas", "(", ")", "as", "c", ":", "arrow", "=", "visuals", ".", "Arrow", "(", "pos", "=", "vertices", ",", "arrows", "=", "arrows", ",", "connect", "=", "'segments'", ",", "parent", "=", "c", ".", "scene", ")", "arrow", ".", "arrow_type", "=", "'stealth'", "assert_image_approved", "(", "c", ".", "render", "(", ")", ",", "'visuals/arrow_reactive1.png'", ")", "arrow", ".", "arrow_size", "=", "20", "assert_image_approved", "(", "c", ".", "render", "(", ")", ",", "'visuals/arrow_reactive2.png'", ")" ]
tests the reactive behaviour of the arrowvisual properties .
train
false
52,663
def save_notebook(work_notebook, write_file): with open(write_file, 'w') as out_nb: json.dump(work_notebook, out_nb, indent=2)
[ "def", "save_notebook", "(", "work_notebook", ",", "write_file", ")", ":", "with", "open", "(", "write_file", ",", "'w'", ")", "as", "out_nb", ":", "json", ".", "dump", "(", "work_notebook", ",", "out_nb", ",", "indent", "=", "2", ")" ]
saves the jupyter work_notebook to write_file .
train
true
52,664
def getBacktrace(empty='Empty backtrace.'): try: info = sys.exc_info() trace = traceback.format_exception(*info) sys.exc_clear() if (trace[0] != 'None\n'): return ''.join(trace) except: return 'Error while trying to get backtrace' return empty
[ "def", "getBacktrace", "(", "empty", "=", "'Empty backtrace.'", ")", ":", "try", ":", "info", "=", "sys", ".", "exc_info", "(", ")", "trace", "=", "traceback", ".", "format_exception", "(", "*", "info", ")", "sys", ".", "exc_clear", "(", ")", "if", "(", "trace", "[", "0", "]", "!=", "'None\\n'", ")", ":", "return", "''", ".", "join", "(", "trace", ")", "except", ":", "return", "'Error while trying to get backtrace'", "return", "empty" ]
try to get backtrace as string .
train
false
52,666
def _send_soap_request(location, upnp_schema, control_path, soap_fn, soap_message): headers = {'SOAPAction': '"urn:schemas-upnp-org:service:{schema}:1#{fn_name}"'.format(schema=upnp_schema, fn_name=soap_fn), 'Content-Type': 'text/xml'} logging.debug('Sending UPnP request to {0}:{1}...'.format(location.hostname, location.port)) conn = httplib.HTTPConnection(location.hostname, location.port) conn.request('POST', control_path, soap_message, headers) response = conn.getresponse() conn.close() return _parse_for_errors(response)
[ "def", "_send_soap_request", "(", "location", ",", "upnp_schema", ",", "control_path", ",", "soap_fn", ",", "soap_message", ")", ":", "headers", "=", "{", "'SOAPAction'", ":", "'\"urn:schemas-upnp-org:service:{schema}:1#{fn_name}\"'", ".", "format", "(", "schema", "=", "upnp_schema", ",", "fn_name", "=", "soap_fn", ")", ",", "'Content-Type'", ":", "'text/xml'", "}", "logging", ".", "debug", "(", "'Sending UPnP request to {0}:{1}...'", ".", "format", "(", "location", ".", "hostname", ",", "location", ".", "port", ")", ")", "conn", "=", "httplib", ".", "HTTPConnection", "(", "location", ".", "hostname", ",", "location", ".", "port", ")", "conn", ".", "request", "(", "'POST'", ",", "control_path", ",", "soap_message", ",", "headers", ")", "response", "=", "conn", ".", "getresponse", "(", ")", "conn", ".", "close", "(", ")", "return", "_parse_for_errors", "(", "response", ")" ]
send out soap request to upnp device and return a response .
train
false
52,667
def validate_ascii_metadata(params, **kwargs): metadata = params.get('Metadata') if ((not metadata) or (not isinstance(metadata, dict))): return for (key, value) in metadata.items(): try: key.encode('ascii') value.encode('ascii') except UnicodeEncodeError as e: error_msg = ('Non ascii characters found in S3 metadata for key "%s", value: "%s". \nS3 metadata can only contain ASCII characters. ' % (key, value)) raise ParamValidationError(report=error_msg)
[ "def", "validate_ascii_metadata", "(", "params", ",", "**", "kwargs", ")", ":", "metadata", "=", "params", ".", "get", "(", "'Metadata'", ")", "if", "(", "(", "not", "metadata", ")", "or", "(", "not", "isinstance", "(", "metadata", ",", "dict", ")", ")", ")", ":", "return", "for", "(", "key", ",", "value", ")", "in", "metadata", ".", "items", "(", ")", ":", "try", ":", "key", ".", "encode", "(", "'ascii'", ")", "value", ".", "encode", "(", "'ascii'", ")", "except", "UnicodeEncodeError", "as", "e", ":", "error_msg", "=", "(", "'Non ascii characters found in S3 metadata for key \"%s\", value: \"%s\". \\nS3 metadata can only contain ASCII characters. '", "%", "(", "key", ",", "value", ")", ")", "raise", "ParamValidationError", "(", "report", "=", "error_msg", ")" ]
verify s3 metadata only contains ascii characters .
train
false
52,670
def _getFlacPath(path=None): global FLAC_PATH if (FLAC_PATH is None): if path: FLAC_PATH = path elif prefs.general['flac']: FLAC_PATH = prefs.general['flac'] else: FLAC_PATH = 'flac' try: (version, se) = core.shellCall([FLAC_PATH, '-v'], stderr=True) if se: raise MicrophoneError except Exception: msg = 'flac not installed (or wrong path in prefs); download from https://xiph.org/flac/download.html' logging.error(msg) raise MicrophoneError(msg) logging.info(('Using ' + version)) return FLAC_PATH
[ "def", "_getFlacPath", "(", "path", "=", "None", ")", ":", "global", "FLAC_PATH", "if", "(", "FLAC_PATH", "is", "None", ")", ":", "if", "path", ":", "FLAC_PATH", "=", "path", "elif", "prefs", ".", "general", "[", "'flac'", "]", ":", "FLAC_PATH", "=", "prefs", ".", "general", "[", "'flac'", "]", "else", ":", "FLAC_PATH", "=", "'flac'", "try", ":", "(", "version", ",", "se", ")", "=", "core", ".", "shellCall", "(", "[", "FLAC_PATH", ",", "'-v'", "]", ",", "stderr", "=", "True", ")", "if", "se", ":", "raise", "MicrophoneError", "except", "Exception", ":", "msg", "=", "'flac not installed (or wrong path in prefs); download from https://xiph.org/flac/download.html'", "logging", ".", "error", "(", "msg", ")", "raise", "MicrophoneError", "(", "msg", ")", "logging", ".", "info", "(", "(", "'Using '", "+", "version", ")", ")", "return", "FLAC_PATH" ]
return a path to flac binary .
train
false
52,671
def _cached_roots_legendre(n): if (n in _cached_roots_legendre.cache): return _cached_roots_legendre.cache[n] _cached_roots_legendre.cache[n] = roots_legendre(n) return _cached_roots_legendre.cache[n]
[ "def", "_cached_roots_legendre", "(", "n", ")", ":", "if", "(", "n", "in", "_cached_roots_legendre", ".", "cache", ")", ":", "return", "_cached_roots_legendre", ".", "cache", "[", "n", "]", "_cached_roots_legendre", ".", "cache", "[", "n", "]", "=", "roots_legendre", "(", "n", ")", "return", "_cached_roots_legendre", ".", "cache", "[", "n", "]" ]
cache roots_legendre results to speed up calls of the fixed_quad function .
train
false
52,672
def test_randomize_corrmat(): a = rs.randn(30) b = (a + (rs.rand(30) * 3)) c = rs.randn(30) d = [a, b, c] (p_mat, dist) = algo.randomize_corrmat(d, tail='upper', corrected=False, return_dist=True) nose.tools.assert_greater(p_mat[(2, 0)], p_mat[(1, 0)]) corrmat = np.corrcoef(d) pctile = (100 - stats.percentileofscore(dist[(2, 1)], corrmat[(2, 1)])) nose.tools.assert_almost_equal((p_mat[(2, 1)] * 100), pctile) d[1] = ((- a) + rs.rand(30)) p_mat = algo.randomize_corrmat(d) nose.tools.assert_greater(0.05, p_mat[(1, 0)])
[ "def", "test_randomize_corrmat", "(", ")", ":", "a", "=", "rs", ".", "randn", "(", "30", ")", "b", "=", "(", "a", "+", "(", "rs", ".", "rand", "(", "30", ")", "*", "3", ")", ")", "c", "=", "rs", ".", "randn", "(", "30", ")", "d", "=", "[", "a", ",", "b", ",", "c", "]", "(", "p_mat", ",", "dist", ")", "=", "algo", ".", "randomize_corrmat", "(", "d", ",", "tail", "=", "'upper'", ",", "corrected", "=", "False", ",", "return_dist", "=", "True", ")", "nose", ".", "tools", ".", "assert_greater", "(", "p_mat", "[", "(", "2", ",", "0", ")", "]", ",", "p_mat", "[", "(", "1", ",", "0", ")", "]", ")", "corrmat", "=", "np", ".", "corrcoef", "(", "d", ")", "pctile", "=", "(", "100", "-", "stats", ".", "percentileofscore", "(", "dist", "[", "(", "2", ",", "1", ")", "]", ",", "corrmat", "[", "(", "2", ",", "1", ")", "]", ")", ")", "nose", ".", "tools", ".", "assert_almost_equal", "(", "(", "p_mat", "[", "(", "2", ",", "1", ")", "]", "*", "100", ")", ",", "pctile", ")", "d", "[", "1", "]", "=", "(", "(", "-", "a", ")", "+", "rs", ".", "rand", "(", "30", ")", ")", "p_mat", "=", "algo", ".", "randomize_corrmat", "(", "d", ")", "nose", ".", "tools", ".", "assert_greater", "(", "0.05", ",", "p_mat", "[", "(", "1", ",", "0", ")", "]", ")" ]
test the correctness of the correlation matrix p values .
train
false