id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
3,627
def hex_version(): try: return _dot2int(__version__.split('-')[(-1)]) except (NameError, ValueError): return 0
[ "def", "hex_version", "(", ")", ":", "try", ":", "return", "_dot2int", "(", "__version__", ".", "split", "(", "'-'", ")", "[", "(", "-", "1", ")", "]", ")", "except", "(", "NameError", ",", "ValueError", ")", ":", "return", "0" ]
return the version of these bindings in hex or 0 if unavailable .
train
false
3,628
def load_from_tree(name, version, release, arch, package_type, path): distro_def = DistroDef(name, version, release, arch) loader_class = DISTRO_PKG_INFO_LOADERS.get(package_type, None) if (loader_class is not None): loader = loader_class(path) distro_def.software_packages = [SoftwarePackage(*args) for args in loader.get_packages_info()] distro_def.software_packages_type = package_type return distro_def
[ "def", "load_from_tree", "(", "name", ",", "version", ",", "release", ",", "arch", ",", "package_type", ",", "path", ")", ":", "distro_def", "=", "DistroDef", "(", "name", ",", "version", ",", "release", ",", "arch", ")", "loader_class", "=", "DISTRO_PKG_INFO_LOADERS", ".", "get", "(", "package_type", ",", "None", ")", "if", "(", "loader_class", "is", "not", "None", ")", ":", "loader", "=", "loader_class", "(", "path", ")", "distro_def", ".", "software_packages", "=", "[", "SoftwarePackage", "(", "*", "args", ")", "for", "args", "in", "loader", ".", "get_packages_info", "(", ")", "]", "distro_def", ".", "software_packages_type", "=", "package_type", "return", "distro_def" ]
loads a distrodef from an installable tree .
train
false
3,629
def empty_urlconf(request): t = Template(EMPTY_URLCONF_TEMPLATE, name='Empty URLConf template') c = Context({'project_name': settings.SETTINGS_MODULE.split('.')[0]}) return HttpResponse(t.render(c), mimetype='text/html')
[ "def", "empty_urlconf", "(", "request", ")", ":", "t", "=", "Template", "(", "EMPTY_URLCONF_TEMPLATE", ",", "name", "=", "'Empty URLConf template'", ")", "c", "=", "Context", "(", "{", "'project_name'", ":", "settings", ".", "SETTINGS_MODULE", ".", "split", "(", "'.'", ")", "[", "0", "]", "}", ")", "return", "HttpResponse", "(", "t", ".", "render", "(", "c", ")", ",", "mimetype", "=", "'text/html'", ")" ]
create an empty urlconf 404 error response .
train
false
3,630
def test_message_delete(db, gmail_account): api_client = new_api_client(db, gmail_account.namespace) generic_thread = add_fake_thread(db.session, gmail_account.namespace.id) gen_message = add_fake_message(db.session, gmail_account.namespace.id, generic_thread) category_ids = [] for i in xrange(10): po_data = api_client.post_data('/labels/', {'display_name': str(i)}) assert (po_data.status_code == 200) category_ids.append(json.loads(po_data.data)['id']) data = {'label_ids': category_ids} resp = api_client.put_data('/messages/{}'.format(gen_message.public_id), data) assert (resp.status_code == 200) associated_mcs = db.session.query(MessageCategory).filter((MessageCategory.message_id == gen_message.id)).all() assert (len(associated_mcs) == 10) db.session.delete(gen_message) db.session.commit() assert (db.session.query(MessageCategory).filter((MessageCategory.message_id == gen_message.id)).all() == [])
[ "def", "test_message_delete", "(", "db", ",", "gmail_account", ")", ":", "api_client", "=", "new_api_client", "(", "db", ",", "gmail_account", ".", "namespace", ")", "generic_thread", "=", "add_fake_thread", "(", "db", ".", "session", ",", "gmail_account", ".", "namespace", ".", "id", ")", "gen_message", "=", "add_fake_message", "(", "db", ".", "session", ",", "gmail_account", ".", "namespace", ".", "id", ",", "generic_thread", ")", "category_ids", "=", "[", "]", "for", "i", "in", "xrange", "(", "10", ")", ":", "po_data", "=", "api_client", ".", "post_data", "(", "'/labels/'", ",", "{", "'display_name'", ":", "str", "(", "i", ")", "}", ")", "assert", "(", "po_data", ".", "status_code", "==", "200", ")", "category_ids", ".", "append", "(", "json", ".", "loads", "(", "po_data", ".", "data", ")", "[", "'id'", "]", ")", "data", "=", "{", "'label_ids'", ":", "category_ids", "}", "resp", "=", "api_client", ".", "put_data", "(", "'/messages/{}'", ".", "format", "(", "gen_message", ".", "public_id", ")", ",", "data", ")", "assert", "(", "resp", ".", "status_code", "==", "200", ")", "associated_mcs", "=", "db", ".", "session", ".", "query", "(", "MessageCategory", ")", ".", "filter", "(", "(", "MessageCategory", ".", "message_id", "==", "gen_message", ".", "id", ")", ")", ".", "all", "(", ")", "assert", "(", "len", "(", "associated_mcs", ")", "==", "10", ")", "db", ".", "session", ".", "delete", "(", "gen_message", ")", "db", ".", "session", ".", "commit", "(", ")", "assert", "(", "db", ".", "session", ".", "query", "(", "MessageCategory", ")", ".", "filter", "(", "(", "MessageCategory", ".", "message_id", "==", "gen_message", ".", "id", ")", ")", ".", "all", "(", ")", "==", "[", "]", ")" ]
ensure that all associated messagecategories are deleted when a message is deleted .
train
false
3,632
def hash_timestamp(afile): md5hex = None if os.path.isfile(afile): md5obj = md5() stat = os.stat(afile) md5obj.update(str(stat.st_size).encode()) md5obj.update(str(stat.st_mtime).encode()) md5hex = md5obj.hexdigest() return md5hex
[ "def", "hash_timestamp", "(", "afile", ")", ":", "md5hex", "=", "None", "if", "os", ".", "path", ".", "isfile", "(", "afile", ")", ":", "md5obj", "=", "md5", "(", ")", "stat", "=", "os", ".", "stat", "(", "afile", ")", "md5obj", ".", "update", "(", "str", "(", "stat", ".", "st_size", ")", ".", "encode", "(", ")", ")", "md5obj", ".", "update", "(", "str", "(", "stat", ".", "st_mtime", ")", ".", "encode", "(", ")", ")", "md5hex", "=", "md5obj", ".", "hexdigest", "(", ")", "return", "md5hex" ]
computes md5 hash of the timestamp of a file .
train
false
3,633
def isLineIntersectingLoop(loop, pointBegin, pointEnd): normalizedSegment = (pointEnd - pointBegin) normalizedSegmentLength = abs(normalizedSegment) if (normalizedSegmentLength > 0.0): normalizedSegment /= normalizedSegmentLength segmentYMirror = complex(normalizedSegment.real, (- normalizedSegment.imag)) pointBeginRotated = (segmentYMirror * pointBegin) pointEndRotated = (segmentYMirror * pointEnd) if isLoopIntersectingInsideXSegment(loop, pointBeginRotated.real, pointEndRotated.real, segmentYMirror, pointBeginRotated.imag): return True return False
[ "def", "isLineIntersectingLoop", "(", "loop", ",", "pointBegin", ",", "pointEnd", ")", ":", "normalizedSegment", "=", "(", "pointEnd", "-", "pointBegin", ")", "normalizedSegmentLength", "=", "abs", "(", "normalizedSegment", ")", "if", "(", "normalizedSegmentLength", ">", "0.0", ")", ":", "normalizedSegment", "/=", "normalizedSegmentLength", "segmentYMirror", "=", "complex", "(", "normalizedSegment", ".", "real", ",", "(", "-", "normalizedSegment", ".", "imag", ")", ")", "pointBeginRotated", "=", "(", "segmentYMirror", "*", "pointBegin", ")", "pointEndRotated", "=", "(", "segmentYMirror", "*", "pointEnd", ")", "if", "isLoopIntersectingInsideXSegment", "(", "loop", ",", "pointBeginRotated", ".", "real", ",", "pointEndRotated", ".", "real", ",", "segmentYMirror", ",", "pointBeginRotated", ".", "imag", ")", ":", "return", "True", "return", "False" ]
determine if the line is intersecting loops .
train
false
3,636
def new(rsa_key): return PKCS115_SigScheme(rsa_key)
[ "def", "new", "(", "rsa_key", ")", ":", "return", "PKCS115_SigScheme", "(", "rsa_key", ")" ]
create a new des cipher .
train
false
3,637
def basic_process_run_results_f(f): (infiles_lists, out_filepaths) = parse_tmp_to_final_filepath_map_file(f) for (infiles_list, out_filepath) in zip(infiles_lists, out_filepaths): try: of = open(out_filepath, 'w') except IOError: raise IOError((("Poller can't open final output file: %s" % out_filepath) + '\nLeaving individual jobs output.\n Do you have write access?')) for fp in infiles_list: for line in open(fp): of.write(('%s\n' % line.strip('\n'))) of.close() return True
[ "def", "basic_process_run_results_f", "(", "f", ")", ":", "(", "infiles_lists", ",", "out_filepaths", ")", "=", "parse_tmp_to_final_filepath_map_file", "(", "f", ")", "for", "(", "infiles_list", ",", "out_filepath", ")", "in", "zip", "(", "infiles_lists", ",", "out_filepaths", ")", ":", "try", ":", "of", "=", "open", "(", "out_filepath", ",", "'w'", ")", "except", "IOError", ":", "raise", "IOError", "(", "(", "(", "\"Poller can't open final output file: %s\"", "%", "out_filepath", ")", "+", "'\\nLeaving individual jobs output.\\n Do you have write access?'", ")", ")", "for", "fp", "in", "infiles_list", ":", "for", "line", "in", "open", "(", "fp", ")", ":", "of", ".", "write", "(", "(", "'%s\\n'", "%", "line", ".", "strip", "(", "'\\n'", ")", ")", ")", "of", ".", "close", "(", ")", "return", "True" ]
copy each list of infiles to each outfile and delete infiles f: file containing one set of mapping instructions per line example f: f1 .
train
false
3,638
def register_namespace_handler(importer_type, namespace_handler): _namespace_handlers[importer_type] = namespace_handler
[ "def", "register_namespace_handler", "(", "importer_type", ",", "namespace_handler", ")", ":", "_namespace_handlers", "[", "importer_type", "]", "=", "namespace_handler" ]
register namespace_handler to declare namespace packages importer_type is the type or class of a pep 302 "importer" .
train
false
3,640
def _convert_other(other, raiseit=False, allow_float=False): if isinstance(other, Decimal): return other if isinstance(other, (int, long)): return Decimal(other) if (allow_float and isinstance(other, float)): return Decimal.from_float(other) import sys if (sys.platform == 'cli'): import System if isinstance(other, System.Decimal): return Decimal(other) if raiseit: raise TypeError(('Unable to convert %s to Decimal' % other)) return NotImplemented
[ "def", "_convert_other", "(", "other", ",", "raiseit", "=", "False", ",", "allow_float", "=", "False", ")", ":", "if", "isinstance", "(", "other", ",", "Decimal", ")", ":", "return", "other", "if", "isinstance", "(", "other", ",", "(", "int", ",", "long", ")", ")", ":", "return", "Decimal", "(", "other", ")", "if", "(", "allow_float", "and", "isinstance", "(", "other", ",", "float", ")", ")", ":", "return", "Decimal", ".", "from_float", "(", "other", ")", "import", "sys", "if", "(", "sys", ".", "platform", "==", "'cli'", ")", ":", "import", "System", "if", "isinstance", "(", "other", ",", "System", ".", "Decimal", ")", ":", "return", "Decimal", "(", "other", ")", "if", "raiseit", ":", "raise", "TypeError", "(", "(", "'Unable to convert %s to Decimal'", "%", "other", ")", ")", "return", "NotImplemented" ]
convert other to decimal .
train
false
3,641
def language_changer_decorator(language_changer): def _decorator(func): def _wrapped(request, *args, **kwargs): set_language_changer(request, language_changer) return func(request, *args, **kwargs) _wrapped.__name__ = func.__name__ _wrapped.__doc__ = func.__doc__ return _wrapped return _decorator
[ "def", "language_changer_decorator", "(", "language_changer", ")", ":", "def", "_decorator", "(", "func", ")", ":", "def", "_wrapped", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "set_language_changer", "(", "request", ",", "language_changer", ")", "return", "func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "_wrapped", ".", "__name__", "=", "func", ".", "__name__", "_wrapped", ".", "__doc__", "=", "func", ".", "__doc__", "return", "_wrapped", "return", "_decorator" ]
a decorator wrapper for set_language_changer .
train
false
3,642
def ThrottledHttpRpcServerFactory(throttle, throttle_class=None): def MakeRpcServer(*args, **kwargs): 'Factory to produce a ThrottledHttpRpcServer.\n\n Args:\n args: Positional args to pass to ThrottledHttpRpcServer.\n kwargs: Keyword args to pass to ThrottledHttpRpcServer.\n\n Returns:\n A ThrottledHttpRpcServer instance.\n ' kwargs['account_type'] = 'HOSTED_OR_GOOGLE' kwargs['save_cookies'] = True if throttle_class: rpc_server = throttle_class(throttle, *args, **kwargs) else: rpc_server = ThrottledHttpRpcServer(throttle, *args, **kwargs) return rpc_server return MakeRpcServer
[ "def", "ThrottledHttpRpcServerFactory", "(", "throttle", ",", "throttle_class", "=", "None", ")", ":", "def", "MakeRpcServer", "(", "*", "args", ",", "**", "kwargs", ")", ":", "kwargs", "[", "'account_type'", "]", "=", "'HOSTED_OR_GOOGLE'", "kwargs", "[", "'save_cookies'", "]", "=", "True", "if", "throttle_class", ":", "rpc_server", "=", "throttle_class", "(", "throttle", ",", "*", "args", ",", "**", "kwargs", ")", "else", ":", "rpc_server", "=", "ThrottledHttpRpcServer", "(", "throttle", ",", "*", "args", ",", "**", "kwargs", ")", "return", "rpc_server", "return", "MakeRpcServer" ]
create a factory to produce throttledhttprpcserver for a given throttle .
train
false
3,643
@contextmanager def pelican_open(filename, mode=u'rb', strip_crs=(sys.platform == u'win32')): with codecs.open(filename, mode, encoding=u'utf-8') as infile: content = infile.read() if (content[:1] == codecs.BOM_UTF8.decode(u'utf8')): content = content[1:] if strip_crs: content = content.replace(u'\r\n', u'\n') (yield content)
[ "@", "contextmanager", "def", "pelican_open", "(", "filename", ",", "mode", "=", "u'rb'", ",", "strip_crs", "=", "(", "sys", ".", "platform", "==", "u'win32'", ")", ")", ":", "with", "codecs", ".", "open", "(", "filename", ",", "mode", ",", "encoding", "=", "u'utf-8'", ")", "as", "infile", ":", "content", "=", "infile", ".", "read", "(", ")", "if", "(", "content", "[", ":", "1", "]", "==", "codecs", ".", "BOM_UTF8", ".", "decode", "(", "u'utf8'", ")", ")", ":", "content", "=", "content", "[", "1", ":", "]", "if", "strip_crs", ":", "content", "=", "content", ".", "replace", "(", "u'\\r\\n'", ",", "u'\\n'", ")", "(", "yield", "content", ")" ]
open a file and return its content .
train
false
3,644
def get_tmp_dir(): dir_name = tempfile.mkdtemp(prefix='autoserv-') pid = os.getpid() if (pid not in __tmp_dirs): __tmp_dirs[pid] = [] __tmp_dirs[pid].append(dir_name) return dir_name
[ "def", "get_tmp_dir", "(", ")", ":", "dir_name", "=", "tempfile", ".", "mkdtemp", "(", "prefix", "=", "'autoserv-'", ")", "pid", "=", "os", ".", "getpid", "(", ")", "if", "(", "pid", "not", "in", "__tmp_dirs", ")", ":", "__tmp_dirs", "[", "pid", "]", "=", "[", "]", "__tmp_dirs", "[", "pid", "]", ".", "append", "(", "dir_name", ")", "return", "dir_name" ]
return the pathname of a directory on the host suitable for temporary file storage .
train
false
3,645
def _make_link(volume_path, backup_path, vol_id): try: utils.execute('ln', volume_path, backup_path, run_as_root=True, check_exit_code=True) except processutils.ProcessExecutionError as exc: err = (_('backup: %(vol_id)s failed to create device hardlink from %(vpath)s to %(bpath)s.\nstdout: %(out)s\n stderr: %(err)s') % {'vol_id': vol_id, 'vpath': volume_path, 'bpath': backup_path, 'out': exc.stdout, 'err': exc.stderr}) LOG.error(err) raise exception.InvalidBackup(reason=err)
[ "def", "_make_link", "(", "volume_path", ",", "backup_path", ",", "vol_id", ")", ":", "try", ":", "utils", ".", "execute", "(", "'ln'", ",", "volume_path", ",", "backup_path", ",", "run_as_root", "=", "True", ",", "check_exit_code", "=", "True", ")", "except", "processutils", ".", "ProcessExecutionError", "as", "exc", ":", "err", "=", "(", "_", "(", "'backup: %(vol_id)s failed to create device hardlink from %(vpath)s to %(bpath)s.\\nstdout: %(out)s\\n stderr: %(err)s'", ")", "%", "{", "'vol_id'", ":", "vol_id", ",", "'vpath'", ":", "volume_path", ",", "'bpath'", ":", "backup_path", ",", "'out'", ":", "exc", ".", "stdout", ",", "'err'", ":", "exc", ".", "stderr", "}", ")", "LOG", ".", "error", "(", "err", ")", "raise", "exception", ".", "InvalidBackup", "(", "reason", "=", "err", ")" ]
create a hard link for the volume block device .
train
false
3,646
def pretty_bool(value): bool_dict = [True, 'True', 'true', 'T', 't', '1'] return (value in bool_dict)
[ "def", "pretty_bool", "(", "value", ")", ":", "bool_dict", "=", "[", "True", ",", "'True'", ",", "'true'", ",", "'T'", ",", "'t'", ",", "'1'", "]", "return", "(", "value", "in", "bool_dict", ")" ]
check value for possible true value .
train
false
3,648
def _update_global_secondary_indexes(ret, changes_old, changes_new, comments, existing_index_names, provisioned_gsi_config, gsi_config, name, region, key, keyid, profile): try: (provisioned_throughputs, index_updates) = _determine_gsi_updates(existing_index_names, provisioned_gsi_config, gsi_config) except GsiNotUpdatableError as e: ret['result'] = False ret['comment'] = str(e) return if index_updates: if __opts__['test']: ret['result'] = None ret['comment'] = 'Dynamo table {0} will have GSIs updated: {1}'.format(name, ', '.join(index_updates.keys())) return changes_old.setdefault('global_indexes', {}) changes_new.setdefault('global_indexes', {}) success = __salt__['boto_dynamodb.update_global_secondary_index'](name, index_updates, region=region, key=key, keyid=keyid, profile=profile) if success: comments.append('Updated GSIs with new throughputs {0}'.format(str(index_updates))) for index_name in index_updates.keys(): changes_old['global_indexes'][index_name] = provisioned_throughputs[index_name] changes_new['global_indexes'][index_name] = index_updates[index_name] else: ret['result'] = False ret['comment'] = 'Failed to update GSI throughputs {0}'.format(str(index_updates))
[ "def", "_update_global_secondary_indexes", "(", "ret", ",", "changes_old", ",", "changes_new", ",", "comments", ",", "existing_index_names", ",", "provisioned_gsi_config", ",", "gsi_config", ",", "name", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", ":", "try", ":", "(", "provisioned_throughputs", ",", "index_updates", ")", "=", "_determine_gsi_updates", "(", "existing_index_names", ",", "provisioned_gsi_config", ",", "gsi_config", ")", "except", "GsiNotUpdatableError", "as", "e", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "str", "(", "e", ")", "return", "if", "index_updates", ":", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'result'", "]", "=", "None", "ret", "[", "'comment'", "]", "=", "'Dynamo table {0} will have GSIs updated: {1}'", ".", "format", "(", "name", ",", "', '", ".", "join", "(", "index_updates", ".", "keys", "(", ")", ")", ")", "return", "changes_old", ".", "setdefault", "(", "'global_indexes'", ",", "{", "}", ")", "changes_new", ".", "setdefault", "(", "'global_indexes'", ",", "{", "}", ")", "success", "=", "__salt__", "[", "'boto_dynamodb.update_global_secondary_index'", "]", "(", "name", ",", "index_updates", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "success", ":", "comments", ".", "append", "(", "'Updated GSIs with new throughputs {0}'", ".", "format", "(", "str", "(", "index_updates", ")", ")", ")", "for", "index_name", "in", "index_updates", ".", "keys", "(", ")", ":", "changes_old", "[", "'global_indexes'", "]", "[", "index_name", "]", "=", "provisioned_throughputs", "[", "index_name", "]", "changes_new", "[", "'global_indexes'", "]", "[", "index_name", "]", "=", "index_updates", "[", "index_name", "]", "else", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'Failed to update GSI throughputs {0}'", ".", "format", "(", "str", "(", "index_updates", ")", ")" ]
updates ret iff there was a failure or in test mode .
train
true
3,649
def _buffered_read_file(fobj): if (PY26 or (PY27 and isinstance(fobj, bz2.BZ2File))): return fobj else: return io.BufferedReader(fobj, buffer_size=_IO_BUFFER_SIZE)
[ "def", "_buffered_read_file", "(", "fobj", ")", ":", "if", "(", "PY26", "or", "(", "PY27", "and", "isinstance", "(", "fobj", ",", "bz2", ".", "BZ2File", ")", ")", ")", ":", "return", "fobj", "else", ":", "return", "io", ".", "BufferedReader", "(", "fobj", ",", "buffer_size", "=", "_IO_BUFFER_SIZE", ")" ]
return a buffered version of a read file object .
train
false
3,650
def _get_query_handle_and_state(query_history): handle = query_history.get_handle() if (handle is None): raise PopupException(_('Failed to retrieve query state from the Query Server.')) state = dbms.get(query_history.owner, query_history.get_query_server_config()).get_state(handle) if (state is None): raise PopupException(_('Failed to contact Server to check query status.')) return (handle, state)
[ "def", "_get_query_handle_and_state", "(", "query_history", ")", ":", "handle", "=", "query_history", ".", "get_handle", "(", ")", "if", "(", "handle", "is", "None", ")", ":", "raise", "PopupException", "(", "_", "(", "'Failed to retrieve query state from the Query Server.'", ")", ")", "state", "=", "dbms", ".", "get", "(", "query_history", ".", "owner", ",", "query_history", ".", "get_query_server_config", "(", ")", ")", ".", "get_state", "(", "handle", ")", "if", "(", "state", "is", "None", ")", ":", "raise", "PopupException", "(", "_", "(", "'Failed to contact Server to check query status.'", ")", ")", "return", "(", "handle", ",", "state", ")" ]
front-end wrapper to handle exceptions .
train
false
3,651
def parse_http_basic(authorization_header): (auth_scheme, auth_token) = require_split(authorization_header, 2) require((auth_scheme.lower() == 'basic')) try: auth_data = base64.b64decode(auth_token) except TypeError: raise RequirementException return require_split(auth_data, 2, ':')
[ "def", "parse_http_basic", "(", "authorization_header", ")", ":", "(", "auth_scheme", ",", "auth_token", ")", "=", "require_split", "(", "authorization_header", ",", "2", ")", "require", "(", "(", "auth_scheme", ".", "lower", "(", ")", "==", "'basic'", ")", ")", "try", ":", "auth_data", "=", "base64", ".", "b64decode", "(", "auth_token", ")", "except", "TypeError", ":", "raise", "RequirementException", "return", "require_split", "(", "auth_data", ",", "2", ",", "':'", ")" ]
parse the username/credentials out of an http basic auth header .
train
false
3,652
def consistencygroup_get_all_by_project(context, project_id, filters=None, marker=None, limit=None, offset=None, sort_keys=None, sort_dirs=None): return IMPL.consistencygroup_get_all_by_project(context, project_id, filters=filters, marker=marker, limit=limit, offset=offset, sort_keys=sort_keys, sort_dirs=sort_dirs)
[ "def", "consistencygroup_get_all_by_project", "(", "context", ",", "project_id", ",", "filters", "=", "None", ",", "marker", "=", "None", ",", "limit", "=", "None", ",", "offset", "=", "None", ",", "sort_keys", "=", "None", ",", "sort_dirs", "=", "None", ")", ":", "return", "IMPL", ".", "consistencygroup_get_all_by_project", "(", "context", ",", "project_id", ",", "filters", "=", "filters", ",", "marker", "=", "marker", ",", "limit", "=", "limit", ",", "offset", "=", "offset", ",", "sort_keys", "=", "sort_keys", ",", "sort_dirs", "=", "sort_dirs", ")" ]
retrieves all consistency groups in a project .
train
false
3,653
def add_cell_to_service(service, cell_name): _add_cell_to_service(service, cell_name) compute_node = service.get('compute_node') if compute_node: add_cell_to_compute_node(compute_node[0], cell_name)
[ "def", "add_cell_to_service", "(", "service", ",", "cell_name", ")", ":", "_add_cell_to_service", "(", "service", ",", "cell_name", ")", "compute_node", "=", "service", ".", "get", "(", "'compute_node'", ")", "if", "compute_node", ":", "add_cell_to_compute_node", "(", "compute_node", "[", "0", "]", ",", "cell_name", ")" ]
fix service attributes that should be unique .
train
false
3,654
def expanding_apply(arg, func, min_periods=1, freq=None, args=(), kwargs={}): return ensure_compat('expanding', 'apply', arg, freq=freq, min_periods=min_periods, func_kw=['func', 'args', 'kwargs'], func=func, args=args, kwargs=kwargs)
[ "def", "expanding_apply", "(", "arg", ",", "func", ",", "min_periods", "=", "1", ",", "freq", "=", "None", ",", "args", "=", "(", ")", ",", "kwargs", "=", "{", "}", ")", ":", "return", "ensure_compat", "(", "'expanding'", ",", "'apply'", ",", "arg", ",", "freq", "=", "freq", ",", "min_periods", "=", "min_periods", ",", "func_kw", "=", "[", "'func'", ",", "'args'", ",", "'kwargs'", "]", ",", "func", "=", "func", ",", "args", "=", "args", ",", "kwargs", "=", "kwargs", ")" ]
generic expanding function application .
train
false
3,655
@open_file(0, mode='rt') def read_graph6(path): glist = [] for line in path: line = line.strip() if (not len(line)): continue glist.append(parse_graph6(line)) if (len(glist) == 1): return glist[0] else: return glist
[ "@", "open_file", "(", "0", ",", "mode", "=", "'rt'", ")", "def", "read_graph6", "(", "path", ")", ":", "glist", "=", "[", "]", "for", "line", "in", "path", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "(", "not", "len", "(", "line", ")", ")", ":", "continue", "glist", ".", "append", "(", "parse_graph6", "(", "line", ")", ")", "if", "(", "len", "(", "glist", ")", "==", "1", ")", ":", "return", "glist", "[", "0", "]", "else", ":", "return", "glist" ]
read simple undirected graphs in graph6 format from path .
train
false
3,656
def vote_display(vote, arg=None): if (arg is None): arg = 'Up,Down' bits = arg.split(',') if (len(bits) != 2): return vote.vote (up, down) = bits if (vote.vote == 1): return up return down
[ "def", "vote_display", "(", "vote", ",", "arg", "=", "None", ")", ":", "if", "(", "arg", "is", "None", ")", ":", "arg", "=", "'Up,Down'", "bits", "=", "arg", ".", "split", "(", "','", ")", "if", "(", "len", "(", "bits", ")", "!=", "2", ")", ":", "return", "vote", ".", "vote", "(", "up", ",", "down", ")", "=", "bits", "if", "(", "vote", ".", "vote", "==", "1", ")", ":", "return", "up", "return", "down" ]
given a string mapping values for up and down votes .
train
false
3,657
def p_declaration_specifiers_1(t): pass
[ "def", "p_declaration_specifiers_1", "(", "t", ")", ":", "pass" ]
declaration_specifiers : storage_class_specifier declaration_specifiers .
train
false
3,659
def cmpPeople(p1, p2): p1b = (getattr(p1, 'billingPos', None) or _last) p2b = (getattr(p2, 'billingPos', None) or _last) if (p1b > p2b): return 1 if (p1b < p2b): return (-1) p1n = p1.get('canonical name', _last) p2n = p2.get('canonical name', _last) if ((p1n is _last) and (p2n is _last)): p1n = p1.get('name', _last) p2n = p2.get('name', _last) if (p1n > p2n): return 1 if (p1n < p2n): return (-1) p1i = p1.get('imdbIndex', _last) p2i = p2.get('imdbIndex', _last) if (p1i > p2i): return 1 if (p1i < p2i): return (-1) return 0
[ "def", "cmpPeople", "(", "p1", ",", "p2", ")", ":", "p1b", "=", "(", "getattr", "(", "p1", ",", "'billingPos'", ",", "None", ")", "or", "_last", ")", "p2b", "=", "(", "getattr", "(", "p2", ",", "'billingPos'", ",", "None", ")", "or", "_last", ")", "if", "(", "p1b", ">", "p2b", ")", ":", "return", "1", "if", "(", "p1b", "<", "p2b", ")", ":", "return", "(", "-", "1", ")", "p1n", "=", "p1", ".", "get", "(", "'canonical name'", ",", "_last", ")", "p2n", "=", "p2", ".", "get", "(", "'canonical name'", ",", "_last", ")", "if", "(", "(", "p1n", "is", "_last", ")", "and", "(", "p2n", "is", "_last", ")", ")", ":", "p1n", "=", "p1", ".", "get", "(", "'name'", ",", "_last", ")", "p2n", "=", "p2", ".", "get", "(", "'name'", ",", "_last", ")", "if", "(", "p1n", ">", "p2n", ")", ":", "return", "1", "if", "(", "p1n", "<", "p2n", ")", ":", "return", "(", "-", "1", ")", "p1i", "=", "p1", ".", "get", "(", "'imdbIndex'", ",", "_last", ")", "p2i", "=", "p2", ".", "get", "(", "'imdbIndex'", ",", "_last", ")", "if", "(", "p1i", ">", "p2i", ")", ":", "return", "1", "if", "(", "p1i", "<", "p2i", ")", ":", "return", "(", "-", "1", ")", "return", "0" ]
compare two people by billingpos .
train
false
3,660
def receive_connection(): server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) server.bind(('localhost', 8080)) server.listen(1) client = server.accept()[0] server.close() return client
[ "def", "receive_connection", "(", ")", ":", "server", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "server", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_REUSEADDR", ",", "1", ")", "server", ".", "bind", "(", "(", "'localhost'", ",", "8080", ")", ")", "server", ".", "listen", "(", "1", ")", "client", "=", "server", ".", "accept", "(", ")", "[", "0", "]", "server", ".", "close", "(", ")", "return", "client" ]
wait for and then return a connected socket .
train
true
3,661
def split_command_line(command_line): arg_list = [] arg = '' state_basic = 0 state_esc = 1 state_singlequote = 2 state_doublequote = 3 state_whitespace = 4 state = state_basic for c in command_line: if ((state == state_basic) or (state == state_whitespace)): if (c == '\\'): state = state_esc elif (c == "'"): state = state_singlequote elif (c == '"'): state = state_doublequote elif c.isspace(): if (state == state_whitespace): None else: arg_list.append(arg) arg = '' state = state_whitespace else: arg = (arg + c) state = state_basic elif (state == state_esc): arg = (arg + c) state = state_basic elif (state == state_singlequote): if (c == "'"): state = state_basic else: arg = (arg + c) elif (state == state_doublequote): if (c == '"'): state = state_basic else: arg = (arg + c) if (arg != ''): arg_list.append(arg) return arg_list
[ "def", "split_command_line", "(", "command_line", ")", ":", "arg_list", "=", "[", "]", "arg", "=", "''", "state_basic", "=", "0", "state_esc", "=", "1", "state_singlequote", "=", "2", "state_doublequote", "=", "3", "state_whitespace", "=", "4", "state", "=", "state_basic", "for", "c", "in", "command_line", ":", "if", "(", "(", "state", "==", "state_basic", ")", "or", "(", "state", "==", "state_whitespace", ")", ")", ":", "if", "(", "c", "==", "'\\\\'", ")", ":", "state", "=", "state_esc", "elif", "(", "c", "==", "\"'\"", ")", ":", "state", "=", "state_singlequote", "elif", "(", "c", "==", "'\"'", ")", ":", "state", "=", "state_doublequote", "elif", "c", ".", "isspace", "(", ")", ":", "if", "(", "state", "==", "state_whitespace", ")", ":", "None", "else", ":", "arg_list", ".", "append", "(", "arg", ")", "arg", "=", "''", "state", "=", "state_whitespace", "else", ":", "arg", "=", "(", "arg", "+", "c", ")", "state", "=", "state_basic", "elif", "(", "state", "==", "state_esc", ")", ":", "arg", "=", "(", "arg", "+", "c", ")", "state", "=", "state_basic", "elif", "(", "state", "==", "state_singlequote", ")", ":", "if", "(", "c", "==", "\"'\"", ")", ":", "state", "=", "state_basic", "else", ":", "arg", "=", "(", "arg", "+", "c", ")", "elif", "(", "state", "==", "state_doublequote", ")", ":", "if", "(", "c", "==", "'\"'", ")", ":", "state", "=", "state_basic", "else", ":", "arg", "=", "(", "arg", "+", "c", ")", "if", "(", "arg", "!=", "''", ")", ":", "arg_list", ".", "append", "(", "arg", ")", "return", "arg_list" ]
this splits a command line into a list of arguments .
train
true
3,664
def copy_modified_jars(app_name): appscale_home = constants.APPSCALE_HOME app_dir = (('/var/apps/' + app_name) + '/app/') lib_dir = locate_dir(app_dir, 'lib') if (not lib_dir): web_inf_dir = locate_dir(app_dir, 'WEB-INF') lib_dir = ((web_inf_dir + os.sep) + 'lib') logging.info('Creating lib directory at: {0}'.format(lib_dir)) mkdir_result = subprocess.call(('mkdir ' + lib_dir), shell=True) if (mkdir_result != 0): logging.error('Failed to create missing lib directory in: {0}.'.format(web_inf_dir)) return False try: copy_files_matching_pattern(((appscale_home + '/AppServer_Java/') + 'appengine-java-sdk-repacked/lib/user/*.jar'), lib_dir) copy_files_matching_pattern(((appscale_home + '/AppServer_Java/') + 'appengine-java-sdk-repacked/lib/impl/appscale-*.jar'), lib_dir) copy_files_matching_pattern('/usr/share/appscale/ext/*', lib_dir) except IOError as io_error: logging.error(((('Failed to copy modified jar files to lib directory of ' + app_name) + ' due to:') + str(io_error))) return False return True
[ "def", "copy_modified_jars", "(", "app_name", ")", ":", "appscale_home", "=", "constants", ".", "APPSCALE_HOME", "app_dir", "=", "(", "(", "'/var/apps/'", "+", "app_name", ")", "+", "'/app/'", ")", "lib_dir", "=", "locate_dir", "(", "app_dir", ",", "'lib'", ")", "if", "(", "not", "lib_dir", ")", ":", "web_inf_dir", "=", "locate_dir", "(", "app_dir", ",", "'WEB-INF'", ")", "lib_dir", "=", "(", "(", "web_inf_dir", "+", "os", ".", "sep", ")", "+", "'lib'", ")", "logging", ".", "info", "(", "'Creating lib directory at: {0}'", ".", "format", "(", "lib_dir", ")", ")", "mkdir_result", "=", "subprocess", ".", "call", "(", "(", "'mkdir '", "+", "lib_dir", ")", ",", "shell", "=", "True", ")", "if", "(", "mkdir_result", "!=", "0", ")", ":", "logging", ".", "error", "(", "'Failed to create missing lib directory in: {0}.'", ".", "format", "(", "web_inf_dir", ")", ")", "return", "False", "try", ":", "copy_files_matching_pattern", "(", "(", "(", "appscale_home", "+", "'/AppServer_Java/'", ")", "+", "'appengine-java-sdk-repacked/lib/user/*.jar'", ")", ",", "lib_dir", ")", "copy_files_matching_pattern", "(", "(", "(", "appscale_home", "+", "'/AppServer_Java/'", ")", "+", "'appengine-java-sdk-repacked/lib/impl/appscale-*.jar'", ")", ",", "lib_dir", ")", "copy_files_matching_pattern", "(", "'/usr/share/appscale/ext/*'", ",", "lib_dir", ")", "except", "IOError", "as", "io_error", ":", "logging", ".", "error", "(", "(", "(", "(", "'Failed to copy modified jar files to lib directory of '", "+", "app_name", ")", "+", "' due to:'", ")", "+", "str", "(", "io_error", ")", ")", ")", "return", "False", "return", "True" ]
copies the changes made to the java sdk for appscale into the apps lib folder .
train
false
3,665
def is_full_path(file): if (file.startswith('\\') or file.startswith('/')): return True try: if (file[1:3] == ':\\'): return True except: pass return False
[ "def", "is_full_path", "(", "file", ")", ":", "if", "(", "file", ".", "startswith", "(", "'\\\\'", ")", "or", "file", ".", "startswith", "(", "'/'", ")", ")", ":", "return", "True", "try", ":", "if", "(", "file", "[", "1", ":", "3", "]", "==", "':\\\\'", ")", ":", "return", "True", "except", ":", "pass", "return", "False" ]
return true if path is absolute .
train
false
3,667
def _fake_exists(path): return False
[ "def", "_fake_exists", "(", "path", ")", ":", "return", "False" ]
assume the path does not exist .
train
false
3,668
def guess_language(lang_list=None): lang_codes = frappe.request.accept_languages.values() if (not lang_codes): return frappe.local.lang guess = None if (not lang_list): lang_list = (get_all_languages() or []) for l in lang_codes: code = l.strip() if (not isinstance(code, unicode)): code = unicode(code, u'utf-8') if ((code in lang_list) or (code == u'en')): guess = code break if (u'-' in code): code = code.split(u'-')[0] if (code in lang_list): guess = code break return (guess or frappe.local.lang)
[ "def", "guess_language", "(", "lang_list", "=", "None", ")", ":", "lang_codes", "=", "frappe", ".", "request", ".", "accept_languages", ".", "values", "(", ")", "if", "(", "not", "lang_codes", ")", ":", "return", "frappe", ".", "local", ".", "lang", "guess", "=", "None", "if", "(", "not", "lang_list", ")", ":", "lang_list", "=", "(", "get_all_languages", "(", ")", "or", "[", "]", ")", "for", "l", "in", "lang_codes", ":", "code", "=", "l", ".", "strip", "(", ")", "if", "(", "not", "isinstance", "(", "code", ",", "unicode", ")", ")", ":", "code", "=", "unicode", "(", "code", ",", "u'utf-8'", ")", "if", "(", "(", "code", "in", "lang_list", ")", "or", "(", "code", "==", "u'en'", ")", ")", ":", "guess", "=", "code", "break", "if", "(", "u'-'", "in", "code", ")", ":", "code", "=", "code", ".", "split", "(", "u'-'", ")", "[", "0", "]", "if", "(", "code", "in", "lang_list", ")", ":", "guess", "=", "code", "break", "return", "(", "guess", "or", "frappe", ".", "local", ".", "lang", ")" ]
passed a string .
train
false
3,670
def get_all_results(repo_id=53976): from collections import OrderedDict def get_results_from_builds(builds): dfs = OrderedDict() for build in builds: build_id = build['id'] build_number = build['number'] print(build_number) res = get_build_results(build_id) if (res is not None): dfs[build_number] = res return dfs base_url = 'https://api.travis-ci.org/builds?url=%2Fbuilds&repository_id={repo_id}' url = base_url.format(repo_id=repo_id) url_after = (url + '&after_number={after}') dfs = OrderedDict() while True: with closing(urlopen(url)) as r: if (not (200 <= r.getcode() < 300)): break builds = json.loads(r.read()) res = get_results_from_builds(builds) if (not res): break last_build_number = min(res.keys()) dfs.update(res) url = url_after.format(after=last_build_number) return dfs
[ "def", "get_all_results", "(", "repo_id", "=", "53976", ")", ":", "from", "collections", "import", "OrderedDict", "def", "get_results_from_builds", "(", "builds", ")", ":", "dfs", "=", "OrderedDict", "(", ")", "for", "build", "in", "builds", ":", "build_id", "=", "build", "[", "'id'", "]", "build_number", "=", "build", "[", "'number'", "]", "print", "(", "build_number", ")", "res", "=", "get_build_results", "(", "build_id", ")", "if", "(", "res", "is", "not", "None", ")", ":", "dfs", "[", "build_number", "]", "=", "res", "return", "dfs", "base_url", "=", "'https://api.travis-ci.org/builds?url=%2Fbuilds&repository_id={repo_id}'", "url", "=", "base_url", ".", "format", "(", "repo_id", "=", "repo_id", ")", "url_after", "=", "(", "url", "+", "'&after_number={after}'", ")", "dfs", "=", "OrderedDict", "(", ")", "while", "True", ":", "with", "closing", "(", "urlopen", "(", "url", ")", ")", "as", "r", ":", "if", "(", "not", "(", "200", "<=", "r", ".", "getcode", "(", ")", "<", "300", ")", ")", ":", "break", "builds", "=", "json", ".", "loads", "(", "r", ".", "read", "(", ")", ")", "res", "=", "get_results_from_builds", "(", "builds", ")", "if", "(", "not", "res", ")", ":", "break", "last_build_number", "=", "min", "(", "res", ".", "keys", "(", ")", ")", "dfs", ".", "update", "(", "res", ")", "url", "=", "url_after", ".", "format", "(", "after", "=", "last_build_number", ")", "return", "dfs" ]
fetches the vbench results for all travis builds .
train
false
3,671
def load_config(config_file): if ((not config_file) or (not os.path.isfile(config_file))): raise ApplicationException(desc=('Invalid configuration file: %s' % config_file)) try: return load_source('bgpspeaker.application.settings', config_file) except Exception as e: raise ApplicationException(desc=str(e))
[ "def", "load_config", "(", "config_file", ")", ":", "if", "(", "(", "not", "config_file", ")", "or", "(", "not", "os", ".", "path", ".", "isfile", "(", "config_file", ")", ")", ")", ":", "raise", "ApplicationException", "(", "desc", "=", "(", "'Invalid configuration file: %s'", "%", "config_file", ")", ")", "try", ":", "return", "load_source", "(", "'bgpspeaker.application.settings'", ",", "config_file", ")", "except", "Exception", "as", "e", ":", "raise", "ApplicationException", "(", "desc", "=", "str", "(", "e", ")", ")" ]
load a configuration file and create importer and exporter classes .
train
true
3,672
def _find_label_paths(subject='fsaverage', pattern=None, subjects_dir=None): subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) subject_dir = os.path.join(subjects_dir, subject) lbl_dir = os.path.join(subject_dir, 'label') if (pattern is None): paths = [] for (dirpath, _, filenames) in os.walk(lbl_dir): rel_dir = os.path.relpath(dirpath, lbl_dir) for filename in fnmatch.filter(filenames, '*.label'): path = os.path.join(rel_dir, filename) paths.append(path) else: paths = [os.path.relpath(path, lbl_dir) for path in iglob(pattern)] return paths
[ "def", "_find_label_paths", "(", "subject", "=", "'fsaverage'", ",", "pattern", "=", "None", ",", "subjects_dir", "=", "None", ")", ":", "subjects_dir", "=", "get_subjects_dir", "(", "subjects_dir", ",", "raise_error", "=", "True", ")", "subject_dir", "=", "os", ".", "path", ".", "join", "(", "subjects_dir", ",", "subject", ")", "lbl_dir", "=", "os", ".", "path", ".", "join", "(", "subject_dir", ",", "'label'", ")", "if", "(", "pattern", "is", "None", ")", ":", "paths", "=", "[", "]", "for", "(", "dirpath", ",", "_", ",", "filenames", ")", "in", "os", ".", "walk", "(", "lbl_dir", ")", ":", "rel_dir", "=", "os", ".", "path", ".", "relpath", "(", "dirpath", ",", "lbl_dir", ")", "for", "filename", "in", "fnmatch", ".", "filter", "(", "filenames", ",", "'*.label'", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "rel_dir", ",", "filename", ")", "paths", ".", "append", "(", "path", ")", "else", ":", "paths", "=", "[", "os", ".", "path", ".", "relpath", "(", "path", ",", "lbl_dir", ")", "for", "path", "in", "iglob", "(", "pattern", ")", "]", "return", "paths" ]
find paths to label files in a subjects label directory .
train
false
3,676
def single_source_dijkstra_path_length(G, source, cutoff=None, weight='weight'): return multi_source_dijkstra_path_length(G, {source}, cutoff=cutoff, weight=weight)
[ "def", "single_source_dijkstra_path_length", "(", "G", ",", "source", ",", "cutoff", "=", "None", ",", "weight", "=", "'weight'", ")", ":", "return", "multi_source_dijkstra_path_length", "(", "G", ",", "{", "source", "}", ",", "cutoff", "=", "cutoff", ",", "weight", "=", "weight", ")" ]
find shortest weighted path lengths in g from a source node .
train
false
3,678
def _fetch_latest_from_memcache(app_version): proto_string = memcache.get(app_version, namespace=NAMESPACE) if proto_string: logging.debug('Loaded most recent conf data from memcache.') return db.model_from_protobuf(proto_string) logging.debug('Tried to load conf data from memcache, but found nothing.') return None
[ "def", "_fetch_latest_from_memcache", "(", "app_version", ")", ":", "proto_string", "=", "memcache", ".", "get", "(", "app_version", ",", "namespace", "=", "NAMESPACE", ")", "if", "proto_string", ":", "logging", ".", "debug", "(", "'Loaded most recent conf data from memcache.'", ")", "return", "db", ".", "model_from_protobuf", "(", "proto_string", ")", "logging", ".", "debug", "(", "'Tried to load conf data from memcache, but found nothing.'", ")", "return", "None" ]
get the latest configuration data for this app-version from memcache .
train
false
3,679
def _triage_segments(window, nperseg, input_length): if (isinstance(window, string_types) or isinstance(window, tuple)): if (nperseg is None): nperseg = 256 if (nperseg > input_length): warnings.warn('nperseg = {0:d} is greater than input length = {1:d}, using nperseg = {1:d}'.format(nperseg, input_length)) nperseg = input_length win = get_window(window, nperseg) else: win = np.asarray(window) if (len(win.shape) != 1): raise ValueError('window must be 1-D') if (input_length < win.shape[(-1)]): raise ValueError('window is longer than input signal') if (nperseg is None): nperseg = win.shape[0] elif (nperseg is not None): if (nperseg != win.shape[0]): raise ValueError('value specified for nperseg is different from length of window') return (win, nperseg)
[ "def", "_triage_segments", "(", "window", ",", "nperseg", ",", "input_length", ")", ":", "if", "(", "isinstance", "(", "window", ",", "string_types", ")", "or", "isinstance", "(", "window", ",", "tuple", ")", ")", ":", "if", "(", "nperseg", "is", "None", ")", ":", "nperseg", "=", "256", "if", "(", "nperseg", ">", "input_length", ")", ":", "warnings", ".", "warn", "(", "'nperseg = {0:d} is greater than input length = {1:d}, using nperseg = {1:d}'", ".", "format", "(", "nperseg", ",", "input_length", ")", ")", "nperseg", "=", "input_length", "win", "=", "get_window", "(", "window", ",", "nperseg", ")", "else", ":", "win", "=", "np", ".", "asarray", "(", "window", ")", "if", "(", "len", "(", "win", ".", "shape", ")", "!=", "1", ")", ":", "raise", "ValueError", "(", "'window must be 1-D'", ")", "if", "(", "input_length", "<", "win", ".", "shape", "[", "(", "-", "1", ")", "]", ")", ":", "raise", "ValueError", "(", "'window is longer than input signal'", ")", "if", "(", "nperseg", "is", "None", ")", ":", "nperseg", "=", "win", ".", "shape", "[", "0", "]", "elif", "(", "nperseg", "is", "not", "None", ")", ":", "if", "(", "nperseg", "!=", "win", ".", "shape", "[", "0", "]", ")", ":", "raise", "ValueError", "(", "'value specified for nperseg is different from length of window'", ")", "return", "(", "win", ",", "nperseg", ")" ]
parses window and nperseg arguments for spectrogram and _spectral_helper .
train
false
3,680
def to_string(ip): return '.'.join(map((lambda n: str(((ip >> n) & 255))), [24, 16, 8, 0]))
[ "def", "to_string", "(", "ip", ")", ":", "return", "'.'", ".", "join", "(", "map", "(", "(", "lambda", "n", ":", "str", "(", "(", "(", "ip", ">>", "n", ")", "&", "255", ")", ")", ")", ",", "[", "24", ",", "16", ",", "8", ",", "0", "]", ")", ")" ]
serializes an object to a string .
train
false
3,681
def list_security_rules(call=None, kwargs=None): global netconn if (not netconn): netconn = get_conn(NetworkManagementClient) if (kwargs is None): kwargs = {} if (kwargs.get('resource_group') is None): kwargs['resource_group'] = config.get_cloud_config_value('resource_group', {}, __opts__, search_global=True) if (kwargs.get('security_group') is None): kwargs['security_group'] = config.get_cloud_config_value('security_group', {}, __opts__, search_global=True) region = get_location() bank = 'cloud/metadata/azurearm/{0}'.format(region) security_rules = cache.cache(bank, 'security_rules', netconn.security_rules.list, loop_fun=make_safe, expire=config.get_cloud_config_value('expire_security_rule_cache', get_configured_provider(), __opts__, search_global=False, default=86400), resource_group_name=kwargs['resource_group'], network_security_group_name=kwargs['security_group']) ret = {} for group in security_rules: ret[group['name']] = group return ret
[ "def", "list_security_rules", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "global", "netconn", "if", "(", "not", "netconn", ")", ":", "netconn", "=", "get_conn", "(", "NetworkManagementClient", ")", "if", "(", "kwargs", "is", "None", ")", ":", "kwargs", "=", "{", "}", "if", "(", "kwargs", ".", "get", "(", "'resource_group'", ")", "is", "None", ")", ":", "kwargs", "[", "'resource_group'", "]", "=", "config", ".", "get_cloud_config_value", "(", "'resource_group'", ",", "{", "}", ",", "__opts__", ",", "search_global", "=", "True", ")", "if", "(", "kwargs", ".", "get", "(", "'security_group'", ")", "is", "None", ")", ":", "kwargs", "[", "'security_group'", "]", "=", "config", ".", "get_cloud_config_value", "(", "'security_group'", ",", "{", "}", ",", "__opts__", ",", "search_global", "=", "True", ")", "region", "=", "get_location", "(", ")", "bank", "=", "'cloud/metadata/azurearm/{0}'", ".", "format", "(", "region", ")", "security_rules", "=", "cache", ".", "cache", "(", "bank", ",", "'security_rules'", ",", "netconn", ".", "security_rules", ".", "list", ",", "loop_fun", "=", "make_safe", ",", "expire", "=", "config", ".", "get_cloud_config_value", "(", "'expire_security_rule_cache'", ",", "get_configured_provider", "(", ")", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "86400", ")", ",", "resource_group_name", "=", "kwargs", "[", "'resource_group'", "]", ",", "network_security_group_name", "=", "kwargs", "[", "'security_group'", "]", ")", "ret", "=", "{", "}", "for", "group", "in", "security_rules", ":", "ret", "[", "group", "[", "'name'", "]", "]", "=", "group", "return", "ret" ]
lits network security rules .
train
false
3,682
def small_testing_registry(): from ..description import WidgetDescription, CategoryDescription from .. import WidgetRegistry registry = WidgetRegistry() data_desc = CategoryDescription.from_package('Orange.widgets.data') file_desc = WidgetDescription.from_module('Orange.widgets.data.owfile') discretize_desc = WidgetDescription.from_module('Orange.widgets.data.owdiscretize') classify_desc = CategoryDescription.from_package('Orange.widgets.classify') bayes_desc = WidgetDescription.from_module('Orange.widgets.classify.ownaivebayes') registry.register_category(data_desc) registry.register_category(classify_desc) registry.register_widget(file_desc) registry.register_widget(discretize_desc) registry.register_widget(bayes_desc) return registry
[ "def", "small_testing_registry", "(", ")", ":", "from", ".", ".", "description", "import", "WidgetDescription", ",", "CategoryDescription", "from", ".", ".", "import", "WidgetRegistry", "registry", "=", "WidgetRegistry", "(", ")", "data_desc", "=", "CategoryDescription", ".", "from_package", "(", "'Orange.widgets.data'", ")", "file_desc", "=", "WidgetDescription", ".", "from_module", "(", "'Orange.widgets.data.owfile'", ")", "discretize_desc", "=", "WidgetDescription", ".", "from_module", "(", "'Orange.widgets.data.owdiscretize'", ")", "classify_desc", "=", "CategoryDescription", ".", "from_package", "(", "'Orange.widgets.classify'", ")", "bayes_desc", "=", "WidgetDescription", ".", "from_module", "(", "'Orange.widgets.classify.ownaivebayes'", ")", "registry", ".", "register_category", "(", "data_desc", ")", "registry", ".", "register_category", "(", "classify_desc", ")", "registry", ".", "register_widget", "(", "file_desc", ")", "registry", ".", "register_widget", "(", "discretize_desc", ")", "registry", ".", "register_widget", "(", "bayes_desc", ")", "return", "registry" ]
return a small registry with a few widgets for testing .
train
false
3,683
def _jittered_backoff(attempt, max_retry_delay): return min((random.random() * (2 ** attempt)), max_retry_delay)
[ "def", "_jittered_backoff", "(", "attempt", ",", "max_retry_delay", ")", ":", "return", "min", "(", "(", "random", ".", "random", "(", ")", "*", "(", "2", "**", "attempt", ")", ")", ",", "max_retry_delay", ")" ]
basic exponential backoff cli example:: salt myminion boto_kinesis .
train
false
3,684
def libvlc_media_player_get_hwnd(p_mi): f = (_Cfunctions.get('libvlc_media_player_get_hwnd', None) or _Cfunction('libvlc_media_player_get_hwnd', ((1,),), None, ctypes.c_void_p, MediaPlayer)) return f(p_mi)
[ "def", "libvlc_media_player_get_hwnd", "(", "p_mi", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_player_get_hwnd'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_player_get_hwnd'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "None", ",", "ctypes", ".", "c_void_p", ",", "MediaPlayer", ")", ")", "return", "f", "(", "p_mi", ")" ]
get the windows api window handle previously set with l{libvlc_media_player_set_hwnd}() .
train
true
3,685
def _clean_data_if(match): quote = match.group(1) condition = match.group(2) for (entity, replace) in _CLEAN_ENTITIES.iteritems(): condition = condition.replace(entity, replace) return ('data-if=%s%s%s' % (quote, condition, quote))
[ "def", "_clean_data_if", "(", "match", ")", ":", "quote", "=", "match", ".", "group", "(", "1", ")", "condition", "=", "match", ".", "group", "(", "2", ")", "for", "(", "entity", ",", "replace", ")", "in", "_CLEAN_ENTITIES", ".", "iteritems", "(", ")", ":", "condition", "=", "condition", ".", "replace", "(", "entity", ",", "replace", ")", "return", "(", "'data-if=%s%s%s'", "%", "(", "quote", ",", "condition", ",", "quote", ")", ")" ]
clean up entities in data-if attributes .
train
false
3,687
def inport(port_name='', props=[], mac_name=None): return __create_port_dict('in', port_name, mac_name, props)
[ "def", "inport", "(", "port_name", "=", "''", ",", "props", "=", "[", "]", ",", "mac_name", "=", "None", ")", ":", "return", "__create_port_dict", "(", "'in'", ",", "port_name", ",", "mac_name", ",", "props", ")" ]
generate a .
train
false
3,689
def print_result(results, file_dict, retval, print_results, section, log_printer, file_diff_dict, ignore_ranges, console_printer): min_severity_str = str(section.get('min_severity', 'INFO')).upper() min_severity = RESULT_SEVERITY.str_dict.get(min_severity_str, 'INFO') results = list(filter((lambda result: ((type(result) is Result) and (result.severity >= min_severity) and (not check_result_ignore(result, ignore_ranges)))), results)) patched_results = autoapply_actions(results, file_dict, file_diff_dict, section, log_printer) print_results(log_printer, section, patched_results, file_dict, file_diff_dict, console_printer) return ((retval or (len(results) > 0)), patched_results)
[ "def", "print_result", "(", "results", ",", "file_dict", ",", "retval", ",", "print_results", ",", "section", ",", "log_printer", ",", "file_diff_dict", ",", "ignore_ranges", ",", "console_printer", ")", ":", "min_severity_str", "=", "str", "(", "section", ".", "get", "(", "'min_severity'", ",", "'INFO'", ")", ")", ".", "upper", "(", ")", "min_severity", "=", "RESULT_SEVERITY", ".", "str_dict", ".", "get", "(", "min_severity_str", ",", "'INFO'", ")", "results", "=", "list", "(", "filter", "(", "(", "lambda", "result", ":", "(", "(", "type", "(", "result", ")", "is", "Result", ")", "and", "(", "result", ".", "severity", ">=", "min_severity", ")", "and", "(", "not", "check_result_ignore", "(", "result", ",", "ignore_ranges", ")", ")", ")", ")", ",", "results", ")", ")", "patched_results", "=", "autoapply_actions", "(", "results", ",", "file_dict", ",", "file_diff_dict", ",", "section", ",", "log_printer", ")", "print_results", "(", "log_printer", ",", "section", ",", "patched_results", ",", "file_dict", ",", "file_diff_dict", ",", "console_printer", ")", "return", "(", "(", "retval", "or", "(", "len", "(", "results", ")", ">", "0", ")", ")", ",", "patched_results", ")" ]
nicely print a single autotest result .
train
false
3,691
@require_GET def contributors_detail(request, readout_slug): product = _get_product(request) return _kb_detail(request, readout_slug, CONTRIBUTOR_READOUTS, 'dashboards.contributors', _('Knowledge Base Dashboard'), locale=settings.WIKI_DEFAULT_LANGUAGE, product=product)
[ "@", "require_GET", "def", "contributors_detail", "(", "request", ",", "readout_slug", ")", ":", "product", "=", "_get_product", "(", "request", ")", "return", "_kb_detail", "(", "request", ",", "readout_slug", ",", "CONTRIBUTOR_READOUTS", ",", "'dashboards.contributors'", ",", "_", "(", "'Knowledge Base Dashboard'", ")", ",", "locale", "=", "settings", ".", "WIKI_DEFAULT_LANGUAGE", ",", "product", "=", "product", ")" ]
show all the rows for the given contributor dashboard table .
train
false
3,692
def TimerReset(*args, **kwargs): return _TimerReset(*args, **kwargs)
[ "def", "TimerReset", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "_TimerReset", "(", "*", "args", ",", "**", "kwargs", ")" ]
global function for timer .
train
false
3,693
def register_check(check, codes=None): def _add_check(check, kind, codes, args): if (check in _checks[kind]): _checks[kind][check][0].extend((codes or [])) else: _checks[kind][check] = ((codes or ['']), args) if inspect.isfunction(check): args = inspect.getargspec(check)[0] if (args and (args[0] in ('physical_line', 'logical_line'))): if (codes is None): codes = ERRORCODE_REGEX.findall((check.__doc__ or '')) _add_check(check, args[0], codes, args) elif inspect.isclass(check): if (inspect.getargspec(check.__init__)[0][:2] == ['self', 'tree']): _add_check(check, 'tree', codes, None)
[ "def", "register_check", "(", "check", ",", "codes", "=", "None", ")", ":", "def", "_add_check", "(", "check", ",", "kind", ",", "codes", ",", "args", ")", ":", "if", "(", "check", "in", "_checks", "[", "kind", "]", ")", ":", "_checks", "[", "kind", "]", "[", "check", "]", "[", "0", "]", ".", "extend", "(", "(", "codes", "or", "[", "]", ")", ")", "else", ":", "_checks", "[", "kind", "]", "[", "check", "]", "=", "(", "(", "codes", "or", "[", "''", "]", ")", ",", "args", ")", "if", "inspect", ".", "isfunction", "(", "check", ")", ":", "args", "=", "inspect", ".", "getargspec", "(", "check", ")", "[", "0", "]", "if", "(", "args", "and", "(", "args", "[", "0", "]", "in", "(", "'physical_line'", ",", "'logical_line'", ")", ")", ")", ":", "if", "(", "codes", "is", "None", ")", ":", "codes", "=", "ERRORCODE_REGEX", ".", "findall", "(", "(", "check", ".", "__doc__", "or", "''", ")", ")", "_add_check", "(", "check", ",", "args", "[", "0", "]", ",", "codes", ",", "args", ")", "elif", "inspect", ".", "isclass", "(", "check", ")", ":", "if", "(", "inspect", ".", "getargspec", "(", "check", ".", "__init__", ")", "[", "0", "]", "[", ":", "2", "]", "==", "[", "'self'", ",", "'tree'", "]", ")", ":", "_add_check", "(", "check", ",", "'tree'", ",", "codes", ",", "None", ")" ]
register a new check object .
train
true
3,698
def _format_info(data): gecos_field = data.pw_gecos.split(',', 3) while (len(gecos_field) < 4): gecos_field.append('') return {'gid': data.pw_gid, 'groups': list_groups(data.pw_name), 'home': data.pw_dir, 'name': data.pw_name, 'passwd': data.pw_passwd, 'shell': data.pw_shell, 'uid': data.pw_uid, 'fullname': gecos_field[0], 'roomnumber': gecos_field[1], 'workphone': gecos_field[2], 'homephone': gecos_field[3]}
[ "def", "_format_info", "(", "data", ")", ":", "gecos_field", "=", "data", ".", "pw_gecos", ".", "split", "(", "','", ",", "3", ")", "while", "(", "len", "(", "gecos_field", ")", "<", "4", ")", ":", "gecos_field", ".", "append", "(", "''", ")", "return", "{", "'gid'", ":", "data", ".", "pw_gid", ",", "'groups'", ":", "list_groups", "(", "data", ".", "pw_name", ")", ",", "'home'", ":", "data", ".", "pw_dir", ",", "'name'", ":", "data", ".", "pw_name", ",", "'passwd'", ":", "data", ".", "pw_passwd", ",", "'shell'", ":", "data", ".", "pw_shell", ",", "'uid'", ":", "data", ".", "pw_uid", ",", "'fullname'", ":", "gecos_field", "[", "0", "]", ",", "'roomnumber'", ":", "gecos_field", "[", "1", "]", ",", "'workphone'", ":", "gecos_field", "[", "2", "]", ",", "'homephone'", ":", "gecos_field", "[", "3", "]", "}" ]
return user information in a pretty way .
train
false
3,699
def ToUtf8(s): if isinstance(s, unicode): return s.encode('utf-8') else: return s
[ "def", "ToUtf8", "(", "s", ")", ":", "if", "isinstance", "(", "s", ",", "unicode", ")", ":", "return", "s", ".", "encode", "(", "'utf-8'", ")", "else", ":", "return", "s" ]
encoded s in utf-8 if it is an unicode string .
train
false
3,700
def anonymize_info(info): if (not isinstance(info, Info)): raise ValueError('self must be an Info instance.') if (info.get('subject_info') is not None): del info['subject_info'] info['meas_date'] = [0, 0] for key_1 in ('file_id', 'meas_id'): key = info.get(key_1) if (key is None): continue for key_2 in ('secs', 'msecs', 'usecs'): if (key_2 not in key): continue info[key_1][key_2] = 0 return info
[ "def", "anonymize_info", "(", "info", ")", ":", "if", "(", "not", "isinstance", "(", "info", ",", "Info", ")", ")", ":", "raise", "ValueError", "(", "'self must be an Info instance.'", ")", "if", "(", "info", ".", "get", "(", "'subject_info'", ")", "is", "not", "None", ")", ":", "del", "info", "[", "'subject_info'", "]", "info", "[", "'meas_date'", "]", "=", "[", "0", ",", "0", "]", "for", "key_1", "in", "(", "'file_id'", ",", "'meas_id'", ")", ":", "key", "=", "info", ".", "get", "(", "key_1", ")", "if", "(", "key", "is", "None", ")", ":", "continue", "for", "key_2", "in", "(", "'secs'", ",", "'msecs'", ",", "'usecs'", ")", ":", "if", "(", "key_2", "not", "in", "key", ")", ":", "continue", "info", "[", "key_1", "]", "[", "key_2", "]", "=", "0", "return", "info" ]
anonymize measurement information in place .
train
false
3,701
def patch_collection_2d_to_3d(col, zs=0, zdir=u'z', depthshade=True): if isinstance(col, PathCollection): col.__class__ = Path3DCollection elif isinstance(col, PatchCollection): col.__class__ = Patch3DCollection col._depthshade = depthshade col.set_3d_properties(zs, zdir)
[ "def", "patch_collection_2d_to_3d", "(", "col", ",", "zs", "=", "0", ",", "zdir", "=", "u'z'", ",", "depthshade", "=", "True", ")", ":", "if", "isinstance", "(", "col", ",", "PathCollection", ")", ":", "col", ".", "__class__", "=", "Path3DCollection", "elif", "isinstance", "(", "col", ",", "PatchCollection", ")", ":", "col", ".", "__class__", "=", "Patch3DCollection", "col", ".", "_depthshade", "=", "depthshade", "col", ".", "set_3d_properties", "(", "zs", ",", "zdir", ")" ]
convert a :class:~matplotlib .
train
false
3,702
def _is_nth_color(c): return (isinstance(c, six.string_types) and re.match(u'\\AC[0-9]\\Z', c))
[ "def", "_is_nth_color", "(", "c", ")", ":", "return", "(", "isinstance", "(", "c", ",", "six", ".", "string_types", ")", "and", "re", ".", "match", "(", "u'\\\\AC[0-9]\\\\Z'", ",", "c", ")", ")" ]
return whether c can be interpreted as an item in the color cycle .
train
false
3,703
def _makeOperator(operatorInput, expectedShape): if (operatorInput is None): def ident(x): return x operator = LinearOperator(expectedShape, ident, matmat=ident) else: operator = aslinearoperator(operatorInput) if (operator.shape != expectedShape): raise ValueError('operator has invalid shape') return operator
[ "def", "_makeOperator", "(", "operatorInput", ",", "expectedShape", ")", ":", "if", "(", "operatorInput", "is", "None", ")", ":", "def", "ident", "(", "x", ")", ":", "return", "x", "operator", "=", "LinearOperator", "(", "expectedShape", ",", "ident", ",", "matmat", "=", "ident", ")", "else", ":", "operator", "=", "aslinearoperator", "(", "operatorInput", ")", "if", "(", "operator", ".", "shape", "!=", "expectedShape", ")", ":", "raise", "ValueError", "(", "'operator has invalid shape'", ")", "return", "operator" ]
takes a dense numpy array or a sparse matrix or a function and makes an operator performing matrix * blockvector products .
train
false
3,704
def update_from_whitelist(whitelist_set, add_experimental, add_noncompliant, autotest_dir): tests = {} profilers = {} for file_path in whitelist_set: if (file_path.find('client/profilers') == (-1)): try: found_test = control_data.parse_control(file_path, raise_warnings=True) tests[file_path] = found_test except control_data.ControlVariableException as e: logging.warn('Skipping %s\n%s', file, e) else: profilers[file_path] = compiler.parseFile(file_path).doc if (len(tests) > 0): update_tests_in_db(tests, add_experimental=add_experimental, add_noncompliant=add_noncompliant, autotest_dir=autotest_dir) if (len(profilers) > 0): update_profilers_in_db(profilers, add_noncompliant=add_noncompliant, description='NA')
[ "def", "update_from_whitelist", "(", "whitelist_set", ",", "add_experimental", ",", "add_noncompliant", ",", "autotest_dir", ")", ":", "tests", "=", "{", "}", "profilers", "=", "{", "}", "for", "file_path", "in", "whitelist_set", ":", "if", "(", "file_path", ".", "find", "(", "'client/profilers'", ")", "==", "(", "-", "1", ")", ")", ":", "try", ":", "found_test", "=", "control_data", ".", "parse_control", "(", "file_path", ",", "raise_warnings", "=", "True", ")", "tests", "[", "file_path", "]", "=", "found_test", "except", "control_data", ".", "ControlVariableException", "as", "e", ":", "logging", ".", "warn", "(", "'Skipping %s\\n%s'", ",", "file", ",", "e", ")", "else", ":", "profilers", "[", "file_path", "]", "=", "compiler", ".", "parseFile", "(", "file_path", ")", ".", "doc", "if", "(", "len", "(", "tests", ")", ">", "0", ")", ":", "update_tests_in_db", "(", "tests", ",", "add_experimental", "=", "add_experimental", ",", "add_noncompliant", "=", "add_noncompliant", ",", "autotest_dir", "=", "autotest_dir", ")", "if", "(", "len", "(", "profilers", ")", ">", "0", ")", ":", "update_profilers_in_db", "(", "profilers", ",", "add_noncompliant", "=", "add_noncompliant", ",", "description", "=", "'NA'", ")" ]
scans through all tests in the whitelist and add them to the database .
train
false
3,705
def test_nm_fit_invalid_ratio(): ratio = (1.0 / 10000.0) nm = NearMiss(ratio=ratio, random_state=RND_SEED) assert_raises(RuntimeError, nm.fit, X, Y)
[ "def", "test_nm_fit_invalid_ratio", "(", ")", ":", "ratio", "=", "(", "1.0", "/", "10000.0", ")", "nm", "=", "NearMiss", "(", "ratio", "=", "ratio", ",", "random_state", "=", "RND_SEED", ")", "assert_raises", "(", "RuntimeError", ",", "nm", ".", "fit", ",", "X", ",", "Y", ")" ]
test either if an error is raised when the balancing ratio to fit is smaller than the one of the data .
train
false
3,706
def dependent_nodes(scheme, node): def expand(node): return [link.sink_node for link in scheme.find_links(source_node=node) if link.enabled] nodes = list(traverse_bf(node, expand)) assert (nodes[0] is node) return nodes[1:]
[ "def", "dependent_nodes", "(", "scheme", ",", "node", ")", ":", "def", "expand", "(", "node", ")", ":", "return", "[", "link", ".", "sink_node", "for", "link", "in", "scheme", ".", "find_links", "(", "source_node", "=", "node", ")", "if", "link", ".", "enabled", "]", "nodes", "=", "list", "(", "traverse_bf", "(", "node", ",", "expand", ")", ")", "assert", "(", "nodes", "[", "0", "]", "is", "node", ")", "return", "nodes", "[", "1", ":", "]" ]
return a list of all nodes in scheme that are dependent on node .
train
false
3,707
def test_appgroup(): @click.group(cls=AppGroup) def cli(): pass @cli.command(with_appcontext=True) def test(): click.echo(current_app.name) @cli.group() def subgroup(): pass @subgroup.command(with_appcontext=True) def test2(): click.echo(current_app.name) obj = ScriptInfo(create_app=(lambda info: Flask('testappgroup'))) runner = CliRunner() result = runner.invoke(cli, ['test'], obj=obj) assert (result.exit_code == 0) assert (result.output == 'testappgroup\n') result = runner.invoke(cli, ['subgroup', 'test2'], obj=obj) assert (result.exit_code == 0) assert (result.output == 'testappgroup\n')
[ "def", "test_appgroup", "(", ")", ":", "@", "click", ".", "group", "(", "cls", "=", "AppGroup", ")", "def", "cli", "(", ")", ":", "pass", "@", "cli", ".", "command", "(", "with_appcontext", "=", "True", ")", "def", "test", "(", ")", ":", "click", ".", "echo", "(", "current_app", ".", "name", ")", "@", "cli", ".", "group", "(", ")", "def", "subgroup", "(", ")", ":", "pass", "@", "subgroup", ".", "command", "(", "with_appcontext", "=", "True", ")", "def", "test2", "(", ")", ":", "click", ".", "echo", "(", "current_app", ".", "name", ")", "obj", "=", "ScriptInfo", "(", "create_app", "=", "(", "lambda", "info", ":", "Flask", "(", "'testappgroup'", ")", ")", ")", "runner", "=", "CliRunner", "(", ")", "result", "=", "runner", ".", "invoke", "(", "cli", ",", "[", "'test'", "]", ",", "obj", "=", "obj", ")", "assert", "(", "result", ".", "exit_code", "==", "0", ")", "assert", "(", "result", ".", "output", "==", "'testappgroup\\n'", ")", "result", "=", "runner", ".", "invoke", "(", "cli", ",", "[", "'subgroup'", ",", "'test2'", "]", ",", "obj", "=", "obj", ")", "assert", "(", "result", ".", "exit_code", "==", "0", ")", "assert", "(", "result", ".", "output", "==", "'testappgroup\\n'", ")" ]
test of with_appcontext .
train
false
3,708
@task def mongodump(ctx, path): db = settings.DB_NAME port = settings.DB_PORT cmd = 'mongodump --db {db} --port {port} --out {path}'.format(db=db, port=port, path=path, pty=True) if settings.DB_USER: cmd += ' --username {0}'.format(settings.DB_USER) if settings.DB_PASS: cmd += ' --password {0}'.format(settings.DB_PASS) ctx.run(cmd, echo=True) print () print 'To restore from the dumped database, run `invoke mongorestore {0}`'.format(os.path.join(path, settings.DB_NAME))
[ "@", "task", "def", "mongodump", "(", "ctx", ",", "path", ")", ":", "db", "=", "settings", ".", "DB_NAME", "port", "=", "settings", ".", "DB_PORT", "cmd", "=", "'mongodump --db {db} --port {port} --out {path}'", ".", "format", "(", "db", "=", "db", ",", "port", "=", "port", ",", "path", "=", "path", ",", "pty", "=", "True", ")", "if", "settings", ".", "DB_USER", ":", "cmd", "+=", "' --username {0}'", ".", "format", "(", "settings", ".", "DB_USER", ")", "if", "settings", ".", "DB_PASS", ":", "cmd", "+=", "' --password {0}'", ".", "format", "(", "settings", ".", "DB_PASS", ")", "ctx", ".", "run", "(", "cmd", ",", "echo", "=", "True", ")", "print", "(", ")", "print", "'To restore from the dumped database, run `invoke mongorestore {0}`'", ".", "format", "(", "os", ".", "path", ".", "join", "(", "path", ",", "settings", ".", "DB_NAME", ")", ")" ]
back up the contents of the running osf database .
train
false
3,709
@util.positional(2) def flow_from_clientsecrets(filename, scope, redirect_uri=None, message=None, cache=None, login_hint=None, device_uri=None): try: (client_type, client_info) = clientsecrets.loadfile(filename, cache=cache) if (client_type in (clientsecrets.TYPE_WEB, clientsecrets.TYPE_INSTALLED)): constructor_kwargs = {'redirect_uri': redirect_uri, 'auth_uri': client_info['auth_uri'], 'token_uri': client_info['token_uri'], 'login_hint': login_hint} revoke_uri = client_info.get('revoke_uri') if (revoke_uri is not None): constructor_kwargs['revoke_uri'] = revoke_uri if (device_uri is not None): constructor_kwargs['device_uri'] = device_uri return OAuth2WebServerFlow(client_info['client_id'], client_info['client_secret'], scope, **constructor_kwargs) except clientsecrets.InvalidClientSecretsError: if message: sys.exit(message) else: raise else: raise UnknownClientSecretsFlowError(('This OAuth 2.0 flow is unsupported: %r' % client_type))
[ "@", "util", ".", "positional", "(", "2", ")", "def", "flow_from_clientsecrets", "(", "filename", ",", "scope", ",", "redirect_uri", "=", "None", ",", "message", "=", "None", ",", "cache", "=", "None", ",", "login_hint", "=", "None", ",", "device_uri", "=", "None", ")", ":", "try", ":", "(", "client_type", ",", "client_info", ")", "=", "clientsecrets", ".", "loadfile", "(", "filename", ",", "cache", "=", "cache", ")", "if", "(", "client_type", "in", "(", "clientsecrets", ".", "TYPE_WEB", ",", "clientsecrets", ".", "TYPE_INSTALLED", ")", ")", ":", "constructor_kwargs", "=", "{", "'redirect_uri'", ":", "redirect_uri", ",", "'auth_uri'", ":", "client_info", "[", "'auth_uri'", "]", ",", "'token_uri'", ":", "client_info", "[", "'token_uri'", "]", ",", "'login_hint'", ":", "login_hint", "}", "revoke_uri", "=", "client_info", ".", "get", "(", "'revoke_uri'", ")", "if", "(", "revoke_uri", "is", "not", "None", ")", ":", "constructor_kwargs", "[", "'revoke_uri'", "]", "=", "revoke_uri", "if", "(", "device_uri", "is", "not", "None", ")", ":", "constructor_kwargs", "[", "'device_uri'", "]", "=", "device_uri", "return", "OAuth2WebServerFlow", "(", "client_info", "[", "'client_id'", "]", ",", "client_info", "[", "'client_secret'", "]", ",", "scope", ",", "**", "constructor_kwargs", ")", "except", "clientsecrets", ".", "InvalidClientSecretsError", ":", "if", "message", ":", "sys", ".", "exit", "(", "message", ")", "else", ":", "raise", "else", ":", "raise", "UnknownClientSecretsFlowError", "(", "(", "'This OAuth 2.0 flow is unsupported: %r'", "%", "client_type", ")", ")" ]
create a flow from a clientsecrets file .
train
false
3,715
def generateColorMap(): Map = cm.jet(np.arange(256)) stringColors = [] for i in range(Map.shape[0]): rgb = (int((255 * Map[i][0])), int((255 * Map[i][1])), int((255 * Map[i][2]))) stringColors.append(struct.pack('BBB', *rgb).encode('hex')) return stringColors
[ "def", "generateColorMap", "(", ")", ":", "Map", "=", "cm", ".", "jet", "(", "np", ".", "arange", "(", "256", ")", ")", "stringColors", "=", "[", "]", "for", "i", "in", "range", "(", "Map", ".", "shape", "[", "0", "]", ")", ":", "rgb", "=", "(", "int", "(", "(", "255", "*", "Map", "[", "i", "]", "[", "0", "]", ")", ")", ",", "int", "(", "(", "255", "*", "Map", "[", "i", "]", "[", "1", "]", ")", ")", ",", "int", "(", "(", "255", "*", "Map", "[", "i", "]", "[", "2", "]", ")", ")", ")", "stringColors", ".", "append", "(", "struct", ".", "pack", "(", "'BBB'", ",", "*", "rgb", ")", ".", "encode", "(", "'hex'", ")", ")", "return", "stringColors" ]
this function generates a 256 jet colormap of html-like hex string colors .
train
true
3,716
def backupdriver(cls): _backup_register.append(cls) return cls
[ "def", "backupdriver", "(", "cls", ")", ":", "_backup_register", ".", "append", "(", "cls", ")", "return", "cls" ]
decorator for concrete backup driver implementations .
train
false
3,717
def _get_typing_replacement_module(): global _typing_module if (_typing_module is None): typing_path = os.path.abspath(os.path.join(__file__, '../jedi_typing.py')) with open(typing_path) as f: code = _compatibility.unicode(f.read()) p = ParserWithRecovery(load_grammar(), code) _typing_module = p.module return _typing_module
[ "def", "_get_typing_replacement_module", "(", ")", ":", "global", "_typing_module", "if", "(", "_typing_module", "is", "None", ")", ":", "typing_path", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "__file__", ",", "'../jedi_typing.py'", ")", ")", "with", "open", "(", "typing_path", ")", "as", "f", ":", "code", "=", "_compatibility", ".", "unicode", "(", "f", ".", "read", "(", ")", ")", "p", "=", "ParserWithRecovery", "(", "load_grammar", "(", ")", ",", "code", ")", "_typing_module", "=", "p", ".", "module", "return", "_typing_module" ]
the idea is to return our jedi replacement for the pep-0484 typing module as discussed at URL .
train
false
3,719
def HttpRequest(service, operation, data, uri, extra_headers=None, url_params=None, escape_params=True, content_type='application/atom+xml'): full_uri = atom.service.BuildUri(uri, url_params, escape_params) (server, port, ssl, partial_uri) = atom.service.ProcessUrl(service, full_uri) if ssl: full_url = ('https://%s%s' % (server, partial_uri)) else: full_url = ('http://%s%s' % (server, partial_uri)) data_str = data if data: if isinstance(data, list): converted_parts = [__ConvertDataPart(x) for x in data] data_str = ''.join(converted_parts) else: data_str = __ConvertDataPart(data) headers = {} if isinstance(service.additional_headers, dict): headers = service.additional_headers.copy() if isinstance(extra_headers, dict): for (header, value) in extra_headers.iteritems(): headers[header] = value if content_type: headers['Content-Type'] = content_type if (operation == 'GET'): method = urlfetch.GET elif (operation == 'POST'): method = urlfetch.POST elif (operation == 'PUT'): method = urlfetch.PUT elif (operation == 'DELETE'): method = urlfetch.DELETE else: method = None return HttpResponse(urlfetch.Fetch(url=full_url, payload=data_str, method=method, headers=headers))
[ "def", "HttpRequest", "(", "service", ",", "operation", ",", "data", ",", "uri", ",", "extra_headers", "=", "None", ",", "url_params", "=", "None", ",", "escape_params", "=", "True", ",", "content_type", "=", "'application/atom+xml'", ")", ":", "full_uri", "=", "atom", ".", "service", ".", "BuildUri", "(", "uri", ",", "url_params", ",", "escape_params", ")", "(", "server", ",", "port", ",", "ssl", ",", "partial_uri", ")", "=", "atom", ".", "service", ".", "ProcessUrl", "(", "service", ",", "full_uri", ")", "if", "ssl", ":", "full_url", "=", "(", "'https://%s%s'", "%", "(", "server", ",", "partial_uri", ")", ")", "else", ":", "full_url", "=", "(", "'http://%s%s'", "%", "(", "server", ",", "partial_uri", ")", ")", "data_str", "=", "data", "if", "data", ":", "if", "isinstance", "(", "data", ",", "list", ")", ":", "converted_parts", "=", "[", "__ConvertDataPart", "(", "x", ")", "for", "x", "in", "data", "]", "data_str", "=", "''", ".", "join", "(", "converted_parts", ")", "else", ":", "data_str", "=", "__ConvertDataPart", "(", "data", ")", "headers", "=", "{", "}", "if", "isinstance", "(", "service", ".", "additional_headers", ",", "dict", ")", ":", "headers", "=", "service", ".", "additional_headers", ".", "copy", "(", ")", "if", "isinstance", "(", "extra_headers", ",", "dict", ")", ":", "for", "(", "header", ",", "value", ")", "in", "extra_headers", ".", "iteritems", "(", ")", ":", "headers", "[", "header", "]", "=", "value", "if", "content_type", ":", "headers", "[", "'Content-Type'", "]", "=", "content_type", "if", "(", "operation", "==", "'GET'", ")", ":", "method", "=", "urlfetch", ".", "GET", "elif", "(", "operation", "==", "'POST'", ")", ":", "method", "=", "urlfetch", ".", "POST", "elif", "(", "operation", "==", "'PUT'", ")", ":", "method", "=", "urlfetch", ".", "PUT", "elif", "(", "operation", "==", "'DELETE'", ")", ":", "method", "=", "urlfetch", ".", "DELETE", "else", ":", "method", "=", "None", "return", "HttpResponse", "(", "urlfetch", ".", "Fetch", "(", "url", "=", "full_url", ",", "payload", "=", "data_str", ",", "method", "=", "method", ",", "headers", "=", "headers", ")", ")" ]
simulates an http call to the server .
train
false
3,722
def gitignore_templates(): return gh.gitignore_templates()
[ "def", "gitignore_templates", "(", ")", ":", "return", "gh", ".", "gitignore_templates", "(", ")" ]
return the list of available templates .
train
false
3,724
def fit_inside(image, shape): assert (len(image.shape) == 3) assert (len(shape) == 2) if ((image.shape[0] <= shape[0]) and (image.shape[1] <= shape[1])): return image.copy() row_ratio = (float(image.shape[0]) / float(shape[0])) col_ratio = (float(image.shape[1]) / float(shape[1])) if (row_ratio > col_ratio): target_shape = [shape[0], min((image.shape[1] / row_ratio), shape[1])] else: target_shape = [min((image.shape[0] / col_ratio), shape[0]), shape[1]] assert (target_shape[0] <= shape[0]) assert (target_shape[1] <= shape[1]) assert ((target_shape[0] == shape[0]) or (target_shape[1] == shape[1])) rval = rescale(image, target_shape) return rval
[ "def", "fit_inside", "(", "image", ",", "shape", ")", ":", "assert", "(", "len", "(", "image", ".", "shape", ")", "==", "3", ")", "assert", "(", "len", "(", "shape", ")", "==", "2", ")", "if", "(", "(", "image", ".", "shape", "[", "0", "]", "<=", "shape", "[", "0", "]", ")", "and", "(", "image", ".", "shape", "[", "1", "]", "<=", "shape", "[", "1", "]", ")", ")", ":", "return", "image", ".", "copy", "(", ")", "row_ratio", "=", "(", "float", "(", "image", ".", "shape", "[", "0", "]", ")", "/", "float", "(", "shape", "[", "0", "]", ")", ")", "col_ratio", "=", "(", "float", "(", "image", ".", "shape", "[", "1", "]", ")", "/", "float", "(", "shape", "[", "1", "]", ")", ")", "if", "(", "row_ratio", ">", "col_ratio", ")", ":", "target_shape", "=", "[", "shape", "[", "0", "]", ",", "min", "(", "(", "image", ".", "shape", "[", "1", "]", "/", "row_ratio", ")", ",", "shape", "[", "1", "]", ")", "]", "else", ":", "target_shape", "=", "[", "min", "(", "(", "image", ".", "shape", "[", "0", "]", "/", "col_ratio", ")", ",", "shape", "[", "0", "]", ")", ",", "shape", "[", "1", "]", "]", "assert", "(", "target_shape", "[", "0", "]", "<=", "shape", "[", "0", "]", ")", "assert", "(", "target_shape", "[", "1", "]", "<=", "shape", "[", "1", "]", ")", "assert", "(", "(", "target_shape", "[", "0", "]", "==", "shape", "[", "0", "]", ")", "or", "(", "target_shape", "[", "1", "]", "==", "shape", "[", "1", "]", ")", ")", "rval", "=", "rescale", "(", "image", ",", "target_shape", ")", "return", "rval" ]
scales image down to fit inside shape preserves proportions of image parameters image : writeme shape : writeme returns writeme .
train
false
3,725
@set_database def delete_instances(ids, **kwargs): if ids: for item in Item.select().where(Item.id.in_(ids)): item.delete_instance()
[ "@", "set_database", "def", "delete_instances", "(", "ids", ",", "**", "kwargs", ")", ":", "if", "ids", ":", "for", "item", "in", "Item", ".", "select", "(", ")", ".", "where", "(", "Item", ".", "id", ".", "in_", "(", "ids", ")", ")", ":", "item", ".", "delete_instance", "(", ")" ]
given a list of item ids .
train
false
3,727
def create_mashup_dict(image_meta): d = {} for (key, value) in six.iteritems(image_meta): if isinstance(value, dict): for (subkey, subvalue) in six.iteritems(create_mashup_dict(value)): if (subkey not in image_meta): d[subkey] = subvalue else: d[key] = value return d
[ "def", "create_mashup_dict", "(", "image_meta", ")", ":", "d", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "six", ".", "iteritems", "(", "image_meta", ")", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "for", "(", "subkey", ",", "subvalue", ")", "in", "six", ".", "iteritems", "(", "create_mashup_dict", "(", "value", ")", ")", ":", "if", "(", "subkey", "not", "in", "image_meta", ")", ":", "d", "[", "subkey", "]", "=", "subvalue", "else", ":", "d", "[", "key", "]", "=", "value", "return", "d" ]
returns a dictionary-like mashup of the image core properties and the image custom properties from given image metadata .
train
false
3,728
def matroska_date_to_datetime(date): format = re.split('([-:. ])', '%Y-%m-%d %H:%M:%S.%f') while format: try: return datetime.strptime(date, ''.join(format)) except ValueError: format = format[:(-2)] return date
[ "def", "matroska_date_to_datetime", "(", "date", ")", ":", "format", "=", "re", ".", "split", "(", "'([-:. ])'", ",", "'%Y-%m-%d %H:%M:%S.%f'", ")", "while", "format", ":", "try", ":", "return", "datetime", ".", "strptime", "(", "date", ",", "''", ".", "join", "(", "format", ")", ")", "except", "ValueError", ":", "format", "=", "format", "[", ":", "(", "-", "2", ")", "]", "return", "date" ]
converts a date in matroskas date format to a python datetime object .
train
false
3,729
def is_possible_number_with_reason(numobj): national_number = national_significant_number(numobj) country_code = numobj.country_code if (not _has_valid_country_calling_code(country_code)): return ValidationResult.INVALID_COUNTRY_CODE region_code = region_code_for_country_code(country_code) metadata = PhoneMetadata.metadata_for_region_or_calling_code(country_code, region_code) possible_re = re.compile((metadata.general_desc.possible_number_pattern or U_EMPTY_STRING)) return _test_number_length_against_pattern(possible_re, national_number)
[ "def", "is_possible_number_with_reason", "(", "numobj", ")", ":", "national_number", "=", "national_significant_number", "(", "numobj", ")", "country_code", "=", "numobj", ".", "country_code", "if", "(", "not", "_has_valid_country_calling_code", "(", "country_code", ")", ")", ":", "return", "ValidationResult", ".", "INVALID_COUNTRY_CODE", "region_code", "=", "region_code_for_country_code", "(", "country_code", ")", "metadata", "=", "PhoneMetadata", ".", "metadata_for_region_or_calling_code", "(", "country_code", ",", "region_code", ")", "possible_re", "=", "re", ".", "compile", "(", "(", "metadata", ".", "general_desc", ".", "possible_number_pattern", "or", "U_EMPTY_STRING", ")", ")", "return", "_test_number_length_against_pattern", "(", "possible_re", ",", "national_number", ")" ]
check whether a phone number is a possible number .
train
false
3,732
def getChainTextFromProcedures(fileName, procedures, text): lastProcedureTime = time.time() for procedure in procedures: craftModule = getCraftModule(procedure) if (craftModule != None): text = craftModule.getCraftedText(fileName, text) if gcodec.isProcedureDone(text, procedure): print ('%s procedure took %s.' % (procedure.capitalize(), euclidean.getDurationString((time.time() - lastProcedureTime)))) lastProcedureTime = time.time() return text
[ "def", "getChainTextFromProcedures", "(", "fileName", ",", "procedures", ",", "text", ")", ":", "lastProcedureTime", "=", "time", ".", "time", "(", ")", "for", "procedure", "in", "procedures", ":", "craftModule", "=", "getCraftModule", "(", "procedure", ")", "if", "(", "craftModule", "!=", "None", ")", ":", "text", "=", "craftModule", ".", "getCraftedText", "(", "fileName", ",", "text", ")", "if", "gcodec", ".", "isProcedureDone", "(", "text", ",", "procedure", ")", ":", "print", "(", "'%s procedure took %s.'", "%", "(", "procedure", ".", "capitalize", "(", ")", ",", "euclidean", ".", "getDurationString", "(", "(", "time", ".", "time", "(", ")", "-", "lastProcedureTime", ")", ")", ")", ")", "lastProcedureTime", "=", "time", ".", "time", "(", ")", "return", "text" ]
get a crafted shape file from a list of procedures .
train
false
3,733
def _https_verify_certificates(enable=True): global _create_default_https_context if enable: _create_default_https_context = create_default_context else: _create_default_https_context = _create_unverified_context
[ "def", "_https_verify_certificates", "(", "enable", "=", "True", ")", ":", "global", "_create_default_https_context", "if", "enable", ":", "_create_default_https_context", "=", "create_default_context", "else", ":", "_create_default_https_context", "=", "_create_unverified_context" ]
verify server https certificates by default? .
train
false
3,735
def evaluate_filter_op(value, operator, threshold): if (operator == 'gt'): return (value > threshold) elif (operator == 'gte'): return (value >= threshold) elif (operator == 'lt'): return (value < threshold) elif (operator == 'lte'): return (value <= threshold) elif (operator == 'neq'): return (value != threshold) elif (operator == 'eq'): return (value == threshold) msg = _('Unable to filter on a unknown operator.') raise exception.InvalidFilterOperatorValue(msg)
[ "def", "evaluate_filter_op", "(", "value", ",", "operator", ",", "threshold", ")", ":", "if", "(", "operator", "==", "'gt'", ")", ":", "return", "(", "value", ">", "threshold", ")", "elif", "(", "operator", "==", "'gte'", ")", ":", "return", "(", "value", ">=", "threshold", ")", "elif", "(", "operator", "==", "'lt'", ")", ":", "return", "(", "value", "<", "threshold", ")", "elif", "(", "operator", "==", "'lte'", ")", ":", "return", "(", "value", "<=", "threshold", ")", "elif", "(", "operator", "==", "'neq'", ")", ":", "return", "(", "value", "!=", "threshold", ")", "elif", "(", "operator", "==", "'eq'", ")", ":", "return", "(", "value", "==", "threshold", ")", "msg", "=", "_", "(", "'Unable to filter on a unknown operator.'", ")", "raise", "exception", ".", "InvalidFilterOperatorValue", "(", "msg", ")" ]
evaluate a comparison operator .
train
false
3,736
def GetGoogleSqlOAuth2RefreshToken(oauth_file_path): if (not os.path.exists(oauth_file_path)): return None try: with open(oauth_file_path) as oauth_file: token = simplejson.load(oauth_file) return token['refresh_token'] except (IOError, KeyError, simplejson.decoder.JSONDecodeError): logging.exception('Could not read OAuth2.0 token from %s', oauth_file_path) return None
[ "def", "GetGoogleSqlOAuth2RefreshToken", "(", "oauth_file_path", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "oauth_file_path", ")", ")", ":", "return", "None", "try", ":", "with", "open", "(", "oauth_file_path", ")", "as", "oauth_file", ":", "token", "=", "simplejson", ".", "load", "(", "oauth_file", ")", "return", "token", "[", "'refresh_token'", "]", "except", "(", "IOError", ",", "KeyError", ",", "simplejson", ".", "decoder", ".", "JSONDecodeError", ")", ":", "logging", ".", "exception", "(", "'Could not read OAuth2.0 token from %s'", ",", "oauth_file_path", ")", "return", "None" ]
reads the users google cloud sql oauth2 .
train
false
3,737
def qr_destroy(la): a = np.asfortranarray(la[0]) del la[0], la (m, n) = a.shape logger.debug(('computing QR of %s dense matrix' % str(a.shape))) (geqrf,) = get_lapack_funcs(('geqrf',), (a,)) (qr, tau, work, info) = geqrf(a, lwork=(-1), overwrite_a=True) (qr, tau, work, info) = geqrf(a, lwork=work[0], overwrite_a=True) del a assert (info >= 0) r = triu(qr[:n, :n]) if (m < n): qr = qr[:, :m] (gorgqr,) = get_lapack_funcs(('orgqr',), (qr,)) (q, work, info) = gorgqr(qr, tau, lwork=(-1), overwrite_a=True) (q, work, info) = gorgqr(qr, tau, lwork=work[0], overwrite_a=True) assert (info >= 0), 'qr failed' assert q.flags.f_contiguous return (q, r)
[ "def", "qr_destroy", "(", "la", ")", ":", "a", "=", "np", ".", "asfortranarray", "(", "la", "[", "0", "]", ")", "del", "la", "[", "0", "]", ",", "la", "(", "m", ",", "n", ")", "=", "a", ".", "shape", "logger", ".", "debug", "(", "(", "'computing QR of %s dense matrix'", "%", "str", "(", "a", ".", "shape", ")", ")", ")", "(", "geqrf", ",", ")", "=", "get_lapack_funcs", "(", "(", "'geqrf'", ",", ")", ",", "(", "a", ",", ")", ")", "(", "qr", ",", "tau", ",", "work", ",", "info", ")", "=", "geqrf", "(", "a", ",", "lwork", "=", "(", "-", "1", ")", ",", "overwrite_a", "=", "True", ")", "(", "qr", ",", "tau", ",", "work", ",", "info", ")", "=", "geqrf", "(", "a", ",", "lwork", "=", "work", "[", "0", "]", ",", "overwrite_a", "=", "True", ")", "del", "a", "assert", "(", "info", ">=", "0", ")", "r", "=", "triu", "(", "qr", "[", ":", "n", ",", ":", "n", "]", ")", "if", "(", "m", "<", "n", ")", ":", "qr", "=", "qr", "[", ":", ",", ":", "m", "]", "(", "gorgqr", ",", ")", "=", "get_lapack_funcs", "(", "(", "'orgqr'", ",", ")", ",", "(", "qr", ",", ")", ")", "(", "q", ",", "work", ",", "info", ")", "=", "gorgqr", "(", "qr", ",", "tau", ",", "lwork", "=", "(", "-", "1", ")", ",", "overwrite_a", "=", "True", ")", "(", "q", ",", "work", ",", "info", ")", "=", "gorgqr", "(", "qr", ",", "tau", ",", "lwork", "=", "work", "[", "0", "]", ",", "overwrite_a", "=", "True", ")", "assert", "(", "info", ">=", "0", ")", ",", "'qr failed'", "assert", "q", ".", "flags", ".", "f_contiguous", "return", "(", "q", ",", "r", ")" ]
return qr decomposition of la[0] .
train
false
3,739
def submit_rescore_problem_for_all_students(request, usage_key, only_if_higher=False): check_arguments_for_rescoring(usage_key) task_type = ('rescore_problem_if_higher' if only_if_higher else 'rescore_problem') task_class = rescore_problem (task_input, task_key) = encode_problem_and_student_input(usage_key) task_input.update({'only_if_higher': only_if_higher}) return submit_task(request, task_type, task_class, usage_key.course_key, task_input, task_key)
[ "def", "submit_rescore_problem_for_all_students", "(", "request", ",", "usage_key", ",", "only_if_higher", "=", "False", ")", ":", "check_arguments_for_rescoring", "(", "usage_key", ")", "task_type", "=", "(", "'rescore_problem_if_higher'", "if", "only_if_higher", "else", "'rescore_problem'", ")", "task_class", "=", "rescore_problem", "(", "task_input", ",", "task_key", ")", "=", "encode_problem_and_student_input", "(", "usage_key", ")", "task_input", ".", "update", "(", "{", "'only_if_higher'", ":", "only_if_higher", "}", ")", "return", "submit_task", "(", "request", ",", "task_type", ",", "task_class", ",", "usage_key", ".", "course_key", ",", "task_input", ",", "task_key", ")" ]
request a problem to be rescored as a background task .
train
false
3,740
def _SendRecv(): port = int(os.getenv(DEVSHELL_ENV, 0)) if (port == 0): raise NoDevshellServer() import socket sock = socket.socket() sock.connect(('localhost', port)) data = CREDENTIAL_INFO_REQUEST_JSON msg = ('%s\n%s' % (len(data), data)) sock.sendall(msg.encode()) header = sock.recv(6).decode() if ('\n' not in header): raise CommunicationError('saw no newline in the first 6 bytes') (len_str, json_str) = header.split('\n', 1) to_read = (int(len_str) - len(json_str)) if (to_read > 0): json_str += sock.recv(to_read, socket.MSG_WAITALL).decode() return CredentialInfoResponse(json_str)
[ "def", "_SendRecv", "(", ")", ":", "port", "=", "int", "(", "os", ".", "getenv", "(", "DEVSHELL_ENV", ",", "0", ")", ")", "if", "(", "port", "==", "0", ")", ":", "raise", "NoDevshellServer", "(", ")", "import", "socket", "sock", "=", "socket", ".", "socket", "(", ")", "sock", ".", "connect", "(", "(", "'localhost'", ",", "port", ")", ")", "data", "=", "CREDENTIAL_INFO_REQUEST_JSON", "msg", "=", "(", "'%s\\n%s'", "%", "(", "len", "(", "data", ")", ",", "data", ")", ")", "sock", ".", "sendall", "(", "msg", ".", "encode", "(", ")", ")", "header", "=", "sock", ".", "recv", "(", "6", ")", ".", "decode", "(", ")", "if", "(", "'\\n'", "not", "in", "header", ")", ":", "raise", "CommunicationError", "(", "'saw no newline in the first 6 bytes'", ")", "(", "len_str", ",", "json_str", ")", "=", "header", ".", "split", "(", "'\\n'", ",", "1", ")", "to_read", "=", "(", "int", "(", "len_str", ")", "-", "len", "(", "json_str", ")", ")", "if", "(", "to_read", ">", "0", ")", ":", "json_str", "+=", "sock", ".", "recv", "(", "to_read", ",", "socket", ".", "MSG_WAITALL", ")", ".", "decode", "(", ")", "return", "CredentialInfoResponse", "(", "json_str", ")" ]
communicate with the developer shell server socket .
train
true
3,741
def preconfigure_instance(session, instance, vdi_ref, network_info): key = str(instance['key_data']) net = netutils.get_injected_network_template(network_info) metadata = instance['metadata'] mount_required = (key or net or metadata) if (not mount_required): return with vdi_attached(session, vdi_ref, read_only=False) as dev: _mounted_processing(dev, key, net, metadata)
[ "def", "preconfigure_instance", "(", "session", ",", "instance", ",", "vdi_ref", ",", "network_info", ")", ":", "key", "=", "str", "(", "instance", "[", "'key_data'", "]", ")", "net", "=", "netutils", ".", "get_injected_network_template", "(", "network_info", ")", "metadata", "=", "instance", "[", "'metadata'", "]", "mount_required", "=", "(", "key", "or", "net", "or", "metadata", ")", "if", "(", "not", "mount_required", ")", ":", "return", "with", "vdi_attached", "(", "session", ",", "vdi_ref", ",", "read_only", "=", "False", ")", "as", "dev", ":", "_mounted_processing", "(", "dev", ",", "key", ",", "net", ",", "metadata", ")" ]
makes alterations to the image before launching as part of spawn .
train
false
3,743
@requires_nitime def test_multitaper_psd(): import nitime as ni n_times = 1000 n_channels = 5 data = np.random.RandomState(0).randn(n_channels, n_times) sfreq = 500 info = create_info(n_channels, sfreq, 'eeg') raw = RawArray(data, info) assert_raises(ValueError, psd_multitaper, raw, sfreq, normalization='foo') ni_5 = (LooseVersion(ni.__version__) >= LooseVersion('0.5')) norm = ('full' if ni_5 else 'length') for (adaptive, n_jobs) in zip((False, True, True), (1, 1, 2)): (psd, freqs) = psd_multitaper(raw, adaptive=adaptive, n_jobs=n_jobs, normalization=norm) (freqs_ni, psd_ni, _) = ni.algorithms.spectral.multi_taper_psd(data, sfreq, adaptive=adaptive, jackknife=False) assert_array_almost_equal(psd[:, 1:], psd_ni[:, 1:(-1)], decimal=3) assert_array_almost_equal(freqs, freqs_ni[:(-1)])
[ "@", "requires_nitime", "def", "test_multitaper_psd", "(", ")", ":", "import", "nitime", "as", "ni", "n_times", "=", "1000", "n_channels", "=", "5", "data", "=", "np", ".", "random", ".", "RandomState", "(", "0", ")", ".", "randn", "(", "n_channels", ",", "n_times", ")", "sfreq", "=", "500", "info", "=", "create_info", "(", "n_channels", ",", "sfreq", ",", "'eeg'", ")", "raw", "=", "RawArray", "(", "data", ",", "info", ")", "assert_raises", "(", "ValueError", ",", "psd_multitaper", ",", "raw", ",", "sfreq", ",", "normalization", "=", "'foo'", ")", "ni_5", "=", "(", "LooseVersion", "(", "ni", ".", "__version__", ")", ">=", "LooseVersion", "(", "'0.5'", ")", ")", "norm", "=", "(", "'full'", "if", "ni_5", "else", "'length'", ")", "for", "(", "adaptive", ",", "n_jobs", ")", "in", "zip", "(", "(", "False", ",", "True", ",", "True", ")", ",", "(", "1", ",", "1", ",", "2", ")", ")", ":", "(", "psd", ",", "freqs", ")", "=", "psd_multitaper", "(", "raw", ",", "adaptive", "=", "adaptive", ",", "n_jobs", "=", "n_jobs", ",", "normalization", "=", "norm", ")", "(", "freqs_ni", ",", "psd_ni", ",", "_", ")", "=", "ni", ".", "algorithms", ".", "spectral", ".", "multi_taper_psd", "(", "data", ",", "sfreq", ",", "adaptive", "=", "adaptive", ",", "jackknife", "=", "False", ")", "assert_array_almost_equal", "(", "psd", "[", ":", ",", "1", ":", "]", ",", "psd_ni", "[", ":", ",", "1", ":", "(", "-", "1", ")", "]", ",", "decimal", "=", "3", ")", "assert_array_almost_equal", "(", "freqs", ",", "freqs_ni", "[", ":", "(", "-", "1", ")", "]", ")" ]
test multi-taper psd computation .
train
false
3,744
def getBytesFromBits(bitsStream): if (not isinstance(bitsStream, str)): return ((-1), 'The bitsStream must be a string') bytes = '' if re.match('[01]*$', bitsStream): try: for i in range(0, len(bitsStream), 8): bits = bitsStream[i:(i + 8)] byte = chr(int(bits, 2)) bytes += byte except: return ((-1), 'Error in conversion from bits to bytes') return (0, bytes) else: return ((-1), 'The format of the bit stream is not correct')
[ "def", "getBytesFromBits", "(", "bitsStream", ")", ":", "if", "(", "not", "isinstance", "(", "bitsStream", ",", "str", ")", ")", ":", "return", "(", "(", "-", "1", ")", ",", "'The bitsStream must be a string'", ")", "bytes", "=", "''", "if", "re", ".", "match", "(", "'[01]*$'", ",", "bitsStream", ")", ":", "try", ":", "for", "i", "in", "range", "(", "0", ",", "len", "(", "bitsStream", ")", ",", "8", ")", ":", "bits", "=", "bitsStream", "[", "i", ":", "(", "i", "+", "8", ")", "]", "byte", "=", "chr", "(", "int", "(", "bits", ",", "2", ")", ")", "bytes", "+=", "byte", "except", ":", "return", "(", "(", "-", "1", ")", ",", "'Error in conversion from bits to bytes'", ")", "return", "(", "0", ",", "bytes", ")", "else", ":", "return", "(", "(", "-", "1", ")", ",", "'The format of the bit stream is not correct'", ")" ]
makes the conversion between bits and bytes .
train
false
3,745
def makeLDreduced(basename, infpath=None, outfpath=None, plinke='plink', forcerebuild=False, returnFname=False, winsize='60', winmove='40', r2thresh='0.1'): outbase = os.path.join(outfpath, basename) inbase = os.path.join(infpath) plinktasks = [] vclbase = [plinke, '--noweb'] plinktasks += [['--bfile', inbase, ('--indep-pairwise %s %s %s' % (winsize, winmove, r2thresh)), ('--out %s' % outbase)], ['--bfile', inbase, ('--extract %s.prune.in --make-bed --out %s' % (outbase, outbase))]] vclbase = [plinke, '--noweb'] pruneLD(plinktasks=plinktasks, cd=outfpath, vclbase=vclbase)
[ "def", "makeLDreduced", "(", "basename", ",", "infpath", "=", "None", ",", "outfpath", "=", "None", ",", "plinke", "=", "'plink'", ",", "forcerebuild", "=", "False", ",", "returnFname", "=", "False", ",", "winsize", "=", "'60'", ",", "winmove", "=", "'40'", ",", "r2thresh", "=", "'0.1'", ")", ":", "outbase", "=", "os", ".", "path", ".", "join", "(", "outfpath", ",", "basename", ")", "inbase", "=", "os", ".", "path", ".", "join", "(", "infpath", ")", "plinktasks", "=", "[", "]", "vclbase", "=", "[", "plinke", ",", "'--noweb'", "]", "plinktasks", "+=", "[", "[", "'--bfile'", ",", "inbase", ",", "(", "'--indep-pairwise %s %s %s'", "%", "(", "winsize", ",", "winmove", ",", "r2thresh", ")", ")", ",", "(", "'--out %s'", "%", "outbase", ")", "]", ",", "[", "'--bfile'", ",", "inbase", ",", "(", "'--extract %s.prune.in --make-bed --out %s'", "%", "(", "outbase", ",", "outbase", ")", ")", "]", "]", "vclbase", "=", "[", "plinke", ",", "'--noweb'", "]", "pruneLD", "(", "plinktasks", "=", "plinktasks", ",", "cd", "=", "outfpath", ",", "vclbase", "=", "vclbase", ")" ]
not there so make and leave in output dir for post job hook to copy back into input extra files path for next time .
train
false
3,747
def parse_stop_word(source): result = set() for line in source.splitlines(): line = line.split('|')[0] result.update(line.split()) return result
[ "def", "parse_stop_word", "(", "source", ")", ":", "result", "=", "set", "(", ")", "for", "line", "in", "source", ".", "splitlines", "(", ")", ":", "line", "=", "line", ".", "split", "(", "'|'", ")", "[", "0", "]", "result", ".", "update", "(", "line", ".", "split", "(", ")", ")", "return", "result" ]
parse snowball style word list like this: * URL .
train
false
3,748
def run_from_datastore(key): entity = _DeferredTaskEntity.get(key) if (not entity): raise PermanentTaskFailure() try: ret = run(entity.data) entity.delete() except PermanentTaskFailure: entity.delete() raise
[ "def", "run_from_datastore", "(", "key", ")", ":", "entity", "=", "_DeferredTaskEntity", ".", "get", "(", "key", ")", "if", "(", "not", "entity", ")", ":", "raise", "PermanentTaskFailure", "(", ")", "try", ":", "ret", "=", "run", "(", "entity", ".", "data", ")", "entity", ".", "delete", "(", ")", "except", "PermanentTaskFailure", ":", "entity", ".", "delete", "(", ")", "raise" ]
retrieves a task from the datastore and executes it .
train
false
3,749
def rename_process(): try: import setproctitle setproctitle.setproctitle('qtile') except: pass
[ "def", "rename_process", "(", ")", ":", "try", ":", "import", "setproctitle", "setproctitle", ".", "setproctitle", "(", "'qtile'", ")", "except", ":", "pass" ]
try to rename the qtile process if py-setproctitle is installed: URL will fail silently if its not installed .
train
false
3,751
def get_type_name(type_): if (type_ is None): return '' if isinstance(type_, string_types): return type_ elif isinstance(type_, list): assert (len(type_) == 1) return ('[%s]' % get_type_name(type_[0])) elif isinstance(type_, dict): assert (len(type_) == 1) (key, value) = list(type_.items())[0] return ('{%s: %s}' % (get_type_name(key), get_type_name(value))) elif (type_.__module__ in ('__builtin__', 'builtins')): return type_.__name__ else: return ('%s.%s' % (type_.__module__, type_.__name__))
[ "def", "get_type_name", "(", "type_", ")", ":", "if", "(", "type_", "is", "None", ")", ":", "return", "''", "if", "isinstance", "(", "type_", ",", "string_types", ")", ":", "return", "type_", "elif", "isinstance", "(", "type_", ",", "list", ")", ":", "assert", "(", "len", "(", "type_", ")", "==", "1", ")", "return", "(", "'[%s]'", "%", "get_type_name", "(", "type_", "[", "0", "]", ")", ")", "elif", "isinstance", "(", "type_", ",", "dict", ")", ":", "assert", "(", "len", "(", "type_", ")", "==", "1", ")", "(", "key", ",", "value", ")", "=", "list", "(", "type_", ".", "items", "(", ")", ")", "[", "0", "]", "return", "(", "'{%s: %s}'", "%", "(", "get_type_name", "(", "key", ")", ",", "get_type_name", "(", "value", ")", ")", ")", "elif", "(", "type_", ".", "__module__", "in", "(", "'__builtin__'", ",", "'builtins'", ")", ")", ":", "return", "type_", ".", "__name__", "else", ":", "return", "(", "'%s.%s'", "%", "(", "type_", ".", "__module__", ",", "type_", ".", "__name__", ")", ")" ]
gives a name for a type that is suitable for a docstring .
train
true
3,752
@dec.skipif((execution.profile is None)) def test_prun_special_syntax(): @register_line_magic def lmagic(line): ip = get_ipython() ip.user_ns['lmagic_out'] = line _ip.run_line_magic('prun', '-q %lmagic my line') nt.assert_equal(_ip.user_ns['lmagic_out'], 'my line') _ip.run_cell_magic('prun', '-q', '%lmagic my line2') nt.assert_equal(_ip.user_ns['lmagic_out'], 'my line2')
[ "@", "dec", ".", "skipif", "(", "(", "execution", ".", "profile", "is", "None", ")", ")", "def", "test_prun_special_syntax", "(", ")", ":", "@", "register_line_magic", "def", "lmagic", "(", "line", ")", ":", "ip", "=", "get_ipython", "(", ")", "ip", ".", "user_ns", "[", "'lmagic_out'", "]", "=", "line", "_ip", ".", "run_line_magic", "(", "'prun'", ",", "'-q %lmagic my line'", ")", "nt", ".", "assert_equal", "(", "_ip", ".", "user_ns", "[", "'lmagic_out'", "]", ",", "'my line'", ")", "_ip", ".", "run_cell_magic", "(", "'prun'", ",", "'-q'", ",", "'%lmagic my line2'", ")", "nt", ".", "assert_equal", "(", "_ip", ".", "user_ns", "[", "'lmagic_out'", "]", ",", "'my line2'", ")" ]
test %%prun with ipython special syntax .
train
false
3,753
def ckron(*arrays): return reduce(np.kron, arrays)
[ "def", "ckron", "(", "*", "arrays", ")", ":", "return", "reduce", "(", "np", ".", "kron", ",", "arrays", ")" ]
repeatedly applies the np .
train
false
3,754
def try_for(fn, timeout=5.0, delay=0.1): until = (time.time() + timeout) v = fn() while ((not v) and (time.time() < until)): time.sleep(delay) v = fn() return v
[ "def", "try_for", "(", "fn", ",", "timeout", "=", "5.0", ",", "delay", "=", "0.1", ")", ":", "until", "=", "(", "time", ".", "time", "(", ")", "+", "timeout", ")", "v", "=", "fn", "(", ")", "while", "(", "(", "not", "v", ")", "and", "(", "time", ".", "time", "(", ")", "<", "until", ")", ")", ":", "time", ".", "sleep", "(", "delay", ")", "v", "=", "fn", "(", ")", "return", "v" ]
calls fn every delay seconds until it returns true or timeout seconds elapse .
train
false
3,755
def _notifications_for_activities(activities, user_dict): if (not activities): return [] if (not user_dict.get('activity_streams_email_notifications')): return [] subject = ungettext('{n} new activity from {site_title}', '{n} new activities from {site_title}', len(activities)).format(site_title=config.get('ckan.site_title'), n=len(activities)) body = base.render('activity_streams/activity_stream_email_notifications.text', extra_vars={'activities': activities}) notifications = [{'subject': subject, 'body': body}] return notifications
[ "def", "_notifications_for_activities", "(", "activities", ",", "user_dict", ")", ":", "if", "(", "not", "activities", ")", ":", "return", "[", "]", "if", "(", "not", "user_dict", ".", "get", "(", "'activity_streams_email_notifications'", ")", ")", ":", "return", "[", "]", "subject", "=", "ungettext", "(", "'{n} new activity from {site_title}'", ",", "'{n} new activities from {site_title}'", ",", "len", "(", "activities", ")", ")", ".", "format", "(", "site_title", "=", "config", ".", "get", "(", "'ckan.site_title'", ")", ",", "n", "=", "len", "(", "activities", ")", ")", "body", "=", "base", ".", "render", "(", "'activity_streams/activity_stream_email_notifications.text'", ",", "extra_vars", "=", "{", "'activities'", ":", "activities", "}", ")", "notifications", "=", "[", "{", "'subject'", ":", "subject", ",", "'body'", ":", "body", "}", "]", "return", "notifications" ]
return one or more email notifications covering the given activities .
train
false
3,756
@anonymous_csrf @mobile_template('questions/{mobile/}marketplace_category.html') def marketplace_category(request, category_slug, template=None): try: category_name = MARKETPLACE_CATEGORIES[category_slug] except KeyError: raise Http404 error_message = None if (request.method == 'GET'): form = MarketplaceAaqForm(request.user) else: form = MarketplaceAaqForm(request.user, request.POST) if form.is_valid(): try: form.submit_ticket() return HttpResponseRedirect(reverse('questions.marketplace_aaq_success')) except ZendeskError: error_message = ZENDESK_ERROR_MESSAGE return render(request, template, {'category': category_name, 'category_slug': category_slug, 'categories': MARKETPLACE_CATEGORIES, 'form': form, 'error_message': error_message})
[ "@", "anonymous_csrf", "@", "mobile_template", "(", "'questions/{mobile/}marketplace_category.html'", ")", "def", "marketplace_category", "(", "request", ",", "category_slug", ",", "template", "=", "None", ")", ":", "try", ":", "category_name", "=", "MARKETPLACE_CATEGORIES", "[", "category_slug", "]", "except", "KeyError", ":", "raise", "Http404", "error_message", "=", "None", "if", "(", "request", ".", "method", "==", "'GET'", ")", ":", "form", "=", "MarketplaceAaqForm", "(", "request", ".", "user", ")", "else", ":", "form", "=", "MarketplaceAaqForm", "(", "request", ".", "user", ",", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "try", ":", "form", ".", "submit_ticket", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'questions.marketplace_aaq_success'", ")", ")", "except", "ZendeskError", ":", "error_message", "=", "ZENDESK_ERROR_MESSAGE", "return", "render", "(", "request", ",", "template", ",", "{", "'category'", ":", "category_name", ",", "'category_slug'", ":", "category_slug", ",", "'categories'", ":", "MARKETPLACE_CATEGORIES", ",", "'form'", ":", "form", ",", "'error_message'", ":", "error_message", "}", ")" ]
aaq category page .
train
false
3,757
def sample_mapped_keys(mapping, min_coverage=50): if (min_coverage == 0): return {} sample_keys = {} for key in mapping.keys(): if (min_coverage > 1): sample_keys[key] = sample(mapping[key], min((min_coverage - 1), len(mapping[key]))) else: sample_keys[key] = [] sample_keys[key].append(key) return sample_keys
[ "def", "sample_mapped_keys", "(", "mapping", ",", "min_coverage", "=", "50", ")", ":", "if", "(", "min_coverage", "==", "0", ")", ":", "return", "{", "}", "sample_keys", "=", "{", "}", "for", "key", "in", "mapping", ".", "keys", "(", ")", ":", "if", "(", "min_coverage", ">", "1", ")", ":", "sample_keys", "[", "key", "]", "=", "sample", "(", "mapping", "[", "key", "]", ",", "min", "(", "(", "min_coverage", "-", "1", ")", ",", "len", "(", "mapping", "[", "key", "]", ")", ")", ")", "else", ":", "sample_keys", "[", "key", "]", "=", "[", "]", "sample_keys", "[", "key", "]", ".", "append", "(", "key", ")", "return", "sample_keys" ]
sample up to min_coverage keys for each key in mapping .
train
false
3,758
def generate_anchors(base_size=16, ratios=[0.5, 1, 2], scales=(2 ** np.arange(3, 6))): base_anchor = (np.array([1, 1, base_size, base_size]) - 1) ratio_anchors = _ratio_enum(base_anchor, ratios) anchors = np.vstack([_scale_enum(ratio_anchors[i, :], scales) for i in xrange(ratio_anchors.shape[0])]) return anchors
[ "def", "generate_anchors", "(", "base_size", "=", "16", ",", "ratios", "=", "[", "0.5", ",", "1", ",", "2", "]", ",", "scales", "=", "(", "2", "**", "np", ".", "arange", "(", "3", ",", "6", ")", ")", ")", ":", "base_anchor", "=", "(", "np", ".", "array", "(", "[", "1", ",", "1", ",", "base_size", ",", "base_size", "]", ")", "-", "1", ")", "ratio_anchors", "=", "_ratio_enum", "(", "base_anchor", ",", "ratios", ")", "anchors", "=", "np", ".", "vstack", "(", "[", "_scale_enum", "(", "ratio_anchors", "[", "i", ",", ":", "]", ",", "scales", ")", "for", "i", "in", "xrange", "(", "ratio_anchors", ".", "shape", "[", "0", "]", ")", "]", ")", "return", "anchors" ]
generate anchor windows by enumerating aspect ratios x scales wrt a reference window .
train
true
3,759
def get_nonce_key(server_url, timestamp, salt): return '{prefix}{url}.{ts}.{salt}'.format(prefix=NONCE_KEY_PREFIX, url=server_url, ts=timestamp, salt=salt)
[ "def", "get_nonce_key", "(", "server_url", ",", "timestamp", ",", "salt", ")", ":", "return", "'{prefix}{url}.{ts}.{salt}'", ".", "format", "(", "prefix", "=", "NONCE_KEY_PREFIX", ",", "url", "=", "server_url", ",", "ts", "=", "timestamp", ",", "salt", "=", "salt", ")" ]
returns the nonce for the given parameters .
train
false
3,761
def HMAC_SHA256_128(key, msg): assert (len(key) >= const.SHARED_SECRET_LENGTH) h = Crypto.Hash.HMAC.new(key, msg, Crypto.Hash.SHA256) return h.digest()[:16]
[ "def", "HMAC_SHA256_128", "(", "key", ",", "msg", ")", ":", "assert", "(", "len", "(", "key", ")", ">=", "const", ".", "SHARED_SECRET_LENGTH", ")", "h", "=", "Crypto", ".", "Hash", ".", "HMAC", ".", "new", "(", "key", ",", "msg", ",", "Crypto", ".", "Hash", ".", "SHA256", ")", "return", "h", ".", "digest", "(", ")", "[", ":", "16", "]" ]
return the hmac-sha256-128 of the given msg authenticated by key .
train
false
3,762
def CheckScriptExists(cgi_path, handler_path): if handler_path.startswith((PYTHON_LIB_VAR + '/')): return if ((not os.path.isdir(cgi_path)) and (not os.path.isfile(cgi_path)) and os.path.isfile((cgi_path + '.py'))): raise CouldNotFindModuleError(('Perhaps you meant to have the line "script: %s.py" in your app.yaml' % handler_path))
[ "def", "CheckScriptExists", "(", "cgi_path", ",", "handler_path", ")", ":", "if", "handler_path", ".", "startswith", "(", "(", "PYTHON_LIB_VAR", "+", "'/'", ")", ")", ":", "return", "if", "(", "(", "not", "os", ".", "path", ".", "isdir", "(", "cgi_path", ")", ")", "and", "(", "not", "os", ".", "path", ".", "isfile", "(", "cgi_path", ")", ")", "and", "os", ".", "path", ".", "isfile", "(", "(", "cgi_path", "+", "'.py'", ")", ")", ")", ":", "raise", "CouldNotFindModuleError", "(", "(", "'Perhaps you meant to have the line \"script: %s.py\" in your app.yaml'", "%", "handler_path", ")", ")" ]
check that the given handler_path is a file that exists on disk .
train
false
3,764
def get_certificate_for_user(username, course_key): try: cert = GeneratedCertificate.eligible_certificates.get(user__username=username, course_id=course_key) except GeneratedCertificate.DoesNotExist: return None return format_certificate_for_user(username, cert)
[ "def", "get_certificate_for_user", "(", "username", ",", "course_key", ")", ":", "try", ":", "cert", "=", "GeneratedCertificate", ".", "eligible_certificates", ".", "get", "(", "user__username", "=", "username", ",", "course_id", "=", "course_key", ")", "except", "GeneratedCertificate", ".", "DoesNotExist", ":", "return", "None", "return", "format_certificate_for_user", "(", "username", ",", "cert", ")" ]
retrieve certificate information for a particular user for a specific course .
train
false
3,765
def load_market_data(trading_day=None, trading_days=None, bm_symbol='^GSPC'): if (trading_day is None): trading_day = get_calendar('NYSE').trading_day if (trading_days is None): trading_days = get_calendar('NYSE').all_sessions first_date = trading_days[0] now = pd.Timestamp.utcnow() last_date = trading_days[(trading_days.get_loc(now, method='ffill') - 2)] br = ensure_benchmark_data(bm_symbol, first_date, last_date, now, trading_day) tc = ensure_treasury_data(bm_symbol, first_date, last_date, now) benchmark_returns = br[br.index.slice_indexer(first_date, last_date)] treasury_curves = tc[tc.index.slice_indexer(first_date, last_date)] return (benchmark_returns, treasury_curves)
[ "def", "load_market_data", "(", "trading_day", "=", "None", ",", "trading_days", "=", "None", ",", "bm_symbol", "=", "'^GSPC'", ")", ":", "if", "(", "trading_day", "is", "None", ")", ":", "trading_day", "=", "get_calendar", "(", "'NYSE'", ")", ".", "trading_day", "if", "(", "trading_days", "is", "None", ")", ":", "trading_days", "=", "get_calendar", "(", "'NYSE'", ")", ".", "all_sessions", "first_date", "=", "trading_days", "[", "0", "]", "now", "=", "pd", ".", "Timestamp", ".", "utcnow", "(", ")", "last_date", "=", "trading_days", "[", "(", "trading_days", ".", "get_loc", "(", "now", ",", "method", "=", "'ffill'", ")", "-", "2", ")", "]", "br", "=", "ensure_benchmark_data", "(", "bm_symbol", ",", "first_date", ",", "last_date", ",", "now", ",", "trading_day", ")", "tc", "=", "ensure_treasury_data", "(", "bm_symbol", ",", "first_date", ",", "last_date", ",", "now", ")", "benchmark_returns", "=", "br", "[", "br", ".", "index", ".", "slice_indexer", "(", "first_date", ",", "last_date", ")", "]", "treasury_curves", "=", "tc", "[", "tc", ".", "index", ".", "slice_indexer", "(", "first_date", ",", "last_date", ")", "]", "return", "(", "benchmark_returns", ",", "treasury_curves", ")" ]
load benchmark returns and treasury yield curves for the given calendar and benchmark symbol .
train
false
3,766
def t_one_observation(x, sample, tails='two-sided', exp_diff=0): try: sample_mean = mean(sample) sample_std = std(sample, ddof=1) if (sample_std == 0): return (nan, nan) else: n = len(sample) t = ((((x - sample_mean) - exp_diff) / sample_std) / sqrt(((n + 1) / n))) prob = tprob(t, (n - 1), tails) return (float(t), prob) except (ZeroDivisionError, ValueError, AttributeError, TypeError, FloatingPointError): return (nan, nan)
[ "def", "t_one_observation", "(", "x", ",", "sample", ",", "tails", "=", "'two-sided'", ",", "exp_diff", "=", "0", ")", ":", "try", ":", "sample_mean", "=", "mean", "(", "sample", ")", "sample_std", "=", "std", "(", "sample", ",", "ddof", "=", "1", ")", "if", "(", "sample_std", "==", "0", ")", ":", "return", "(", "nan", ",", "nan", ")", "else", ":", "n", "=", "len", "(", "sample", ")", "t", "=", "(", "(", "(", "(", "x", "-", "sample_mean", ")", "-", "exp_diff", ")", "/", "sample_std", ")", "/", "sqrt", "(", "(", "(", "n", "+", "1", ")", "/", "n", ")", ")", ")", "prob", "=", "tprob", "(", "t", ",", "(", "n", "-", "1", ")", ",", "tails", ")", "return", "(", "float", "(", "t", ")", ",", "prob", ")", "except", "(", "ZeroDivisionError", ",", "ValueError", ",", "AttributeError", ",", "TypeError", ",", "FloatingPointError", ")", ":", "return", "(", "nan", ",", "nan", ")" ]
returns t-test for significance of single observation versus a sample .
train
false