sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
|---|---|---|
def secret(prompt=None, empty=False):
"""Prompt a string without echoing.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a non-empty string.
None if the user pressed only Enter and ``empty`` was True.
Raises
------
getpass.GetPassWarning
If echo free input is unavailable.
See Also
--------
getpass.getpass
"""
if prompt is None:
prompt = PROMPT
s = getpass.getpass(prompt=prompt)
if empty and not s:
return None
else:
if s:
return s
else:
return secret(prompt=prompt, empty=empty)
|
Prompt a string without echoing.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a non-empty string.
None if the user pressed only Enter and ``empty`` was True.
Raises
------
getpass.GetPassWarning
If echo free input is unavailable.
See Also
--------
getpass.getpass
|
entailment
|
def string(prompt=None, empty=False):
"""Prompt a string.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a non-empty string.
None if the user pressed only Enter and ``empty`` was True.
"""
s = _prompt_input(prompt)
if empty and not s:
return None
else:
if s:
return s
else:
return string(prompt=prompt, empty=empty)
|
Prompt a string.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a non-empty string.
None if the user pressed only Enter and ``empty`` was True.
|
entailment
|
def _get_cache_plus_key(self):
"""Return a cache region plus key."""
key = getattr(self, '_cache_key', self.key_from_query())
return self._cache.cache, key
|
Return a cache region plus key.
|
entailment
|
def get_value(self, merge=True, createfunc=None,
expiration_time=None, ignore_expiration=False):
"""
Return the value from the cache for this query.
"""
cache, cache_key = self._get_cache_plus_key()
# ignore_expiration means, if the value is in the cache
# but is expired, return it anyway. This doesn't make sense
# with createfunc, which says, if the value is expired, generate
# a new value.
assert not ignore_expiration or not createfunc, \
"Can't ignore expiration and also provide createfunc"
if ignore_expiration or not createfunc:
cached_value = cache.get(cache_key,
expiration_time=expiration_time,
ignore_expiration=ignore_expiration)
else:
cached_value = cache.get(cache_key)
if not cached_value:
cached_value = createfunc()
cache.set(cache_key, cached_value, timeout=expiration_time)
if cached_value and merge:
cached_value = self.merge_result(cached_value, load=False)
return cached_value
|
Return the value from the cache for this query.
|
entailment
|
def set_value(self, value):
"""Set the value in the cache for this query."""
cache, cache_key = self._get_cache_plus_key()
cache.set(cache_key, value)
|
Set the value in the cache for this query.
|
entailment
|
def key_from_query(self, qualifier=None):
"""
Given a Query, create a cache key.
There are many approaches to this; here we use the simplest, which is
to create an md5 hash of the text of the SQL statement, combined with
stringified versions of all the bound parameters within it.
There's a bit of a performance hit with compiling out "query.statement"
here; other approaches include setting up an explicit cache key with a
particular Query, then combining that with the bound parameter values.
"""
stmt = self.with_labels().statement
compiled = stmt.compile()
params = compiled.params
values = [str(compiled)]
for k in sorted(params):
values.append(repr(params[k]))
key = u" ".join(values)
return md5(key.encode('utf8')).hexdigest()
|
Given a Query, create a cache key.
There are many approaches to this; here we use the simplest, which is
to create an md5 hash of the text of the SQL statement, combined with
stringified versions of all the bound parameters within it.
There's a bit of a performance hit with compiling out "query.statement"
here; other approaches include setting up an explicit cache key with a
particular Query, then combining that with the bound parameter values.
|
entailment
|
def process_query_conditionally(self, query):
"""
Process a Query that is used within a lazy loader.
(the process_query_conditionally() method is a SQLAlchemy
hook invoked only within lazyload.)
"""
if query._current_path:
mapper, prop = query._current_path[-2:]
for cls in mapper.class_.__mro__:
k = (cls, prop.key)
relationship_option = self._relationship_options.get(k)
if relationship_option:
query._cache = relationship_option
break
|
Process a Query that is used within a lazy loader.
(the process_query_conditionally() method is a SQLAlchemy
hook invoked only within lazyload.)
|
entailment
|
def fit(self, t, y, dy=1, presorted=False):
"""Fit the smoother
Parameters
----------
t : array_like
time locations of the points to smooth
y : array_like
y locations of the points to smooth
dy : array_like or float (default = 1)
Errors in the y values
presorted : bool (default = False)
If True, then t is assumed to be sorted.
Returns
-------
self : Smoother instance
"""
self.t, self.y, self.dy = self._validate_inputs(t, y, dy, presorted)
self._fit(self.t, self.y, self.dy)
return self
|
Fit the smoother
Parameters
----------
t : array_like
time locations of the points to smooth
y : array_like
y locations of the points to smooth
dy : array_like or float (default = 1)
Errors in the y values
presorted : bool (default = False)
If True, then t is assumed to be sorted.
Returns
-------
self : Smoother instance
|
entailment
|
def predict(self, t):
"""Predict the smoothed function value at time t
Parameters
----------
t : array_like
Times at which to predict the result
Returns
-------
y : ndarray
Smoothed values at time t
"""
t = np.asarray(t)
return self._predict(np.ravel(t)).reshape(t.shape)
|
Predict the smoothed function value at time t
Parameters
----------
t : array_like
Times at which to predict the result
Returns
-------
y : ndarray
Smoothed values at time t
|
entailment
|
def cv_residuals(self, cv=True):
"""Return the residuals of the cross-validation for the fit data"""
vals = self.cv_values(cv)
return (self.y - vals) / self.dy
|
Return the residuals of the cross-validation for the fit data
|
entailment
|
def cv_error(self, cv=True, skip_endpoints=True):
"""Return the sum of cross-validation residuals for the input data"""
resids = self.cv_residuals(cv)
if skip_endpoints:
resids = resids[1:-1]
return np.mean(abs(resids))
|
Return the sum of cross-validation residuals for the input data
|
entailment
|
def arcfour(key, csbN=1):
'''Return a generator for the ARCFOUR/RC4 pseudorandom keystream for the
key provided. Keys should be byte strings or sequences of ints.'''
if isinstance(key, str):
key = [ord(c) for c in key]
s = range(256)
j = 0
for n in range(csbN):
for i in range(256):
j = (j + s[i] + key[i % len(key)]) % 256
t = s[i]
s[i] = s[j]
s[j] = t
i = 0
j = 0
while True:
i = (i + 1) % 256
j = (j + s[i]) % 256
t = s[i]
s[i] = s[j]
s[j] = t
yield s[(s[i] + s[j]) % 256]
|
Return a generator for the ARCFOUR/RC4 pseudorandom keystream for the
key provided. Keys should be byte strings or sequences of ints.
|
entailment
|
def arcfour_drop(key, n=3072):
'''Return a generator for the RC4-drop pseudorandom keystream given by
the key and number of bytes to drop passed as arguments. Dropped bytes
default to the more conservative 3072, NOT the SCAN default of 768.'''
af = arcfour(key)
[af.next() for c in range(n)]
return af
|
Return a generator for the RC4-drop pseudorandom keystream given by
the key and number of bytes to drop passed as arguments. Dropped bytes
default to the more conservative 3072, NOT the SCAN default of 768.
|
entailment
|
def resolve_ssl_protocol_version(version=None):
"""
Look up an SSL protocol version by name. If *version* is not specified, then
the strongest protocol available will be returned.
:param str version: The name of the version to look up.
:return: A protocol constant from the :py:mod:`ssl` module.
:rtype: int
"""
if version is None:
protocol_preference = ('TLSv1_2', 'TLSv1_1', 'TLSv1', 'SSLv3', 'SSLv23', 'SSLv2')
for protocol in protocol_preference:
if hasattr(ssl, 'PROTOCOL_' + protocol):
return getattr(ssl, 'PROTOCOL_' + protocol)
raise RuntimeError('could not find a suitable ssl PROTOCOL_ version constant')
elif isinstance(version, str):
if not hasattr(ssl, 'PROTOCOL_' + version):
raise ValueError('invalid ssl protocol version: ' + version)
return getattr(ssl, 'PROTOCOL_' + version)
raise TypeError("ssl_version() argument 1 must be str, not {0}".format(type(version).__name__))
|
Look up an SSL protocol version by name. If *version* is not specified, then
the strongest protocol available will be returned.
:param str version: The name of the version to look up.
:return: A protocol constant from the :py:mod:`ssl` module.
:rtype: int
|
entailment
|
def build_server_from_argparser(description=None, server_klass=None, handler_klass=None):
"""
Build a server from command line arguments. If a ServerClass or
HandlerClass is specified, then the object must inherit from the
corresponding AdvancedHTTPServer base class.
:param str description: Description string to be passed to the argument parser.
:param server_klass: Alternative server class to use.
:type server_klass: :py:class:`.AdvancedHTTPServer`
:param handler_klass: Alternative handler class to use.
:type handler_klass: :py:class:`.RequestHandler`
:return: A configured server instance.
:rtype: :py:class:`.AdvancedHTTPServer`
"""
import argparse
def _argp_dir_type(arg):
if not os.path.isdir(arg):
raise argparse.ArgumentTypeError("{0} is not a valid directory".format(repr(arg)))
return arg
def _argp_port_type(arg):
if not arg.isdigit():
raise argparse.ArgumentTypeError("{0} is not a valid port".format(repr(arg)))
arg = int(arg)
if arg < 0 or arg > 65535:
raise argparse.ArgumentTypeError("{0} is not a valid port".format(repr(arg)))
return arg
description = (description or 'HTTP Server')
server_klass = (server_klass or AdvancedHTTPServer)
handler_klass = (handler_klass or RequestHandler)
parser = argparse.ArgumentParser(conflict_handler='resolve', description=description, fromfile_prefix_chars='@')
parser.epilog = 'When a config file is specified with --config only the --log, --log-file and --password options will be used.'
parser.add_argument('-c', '--conf', dest='config', type=argparse.FileType('r'), help='read settings from a config file')
parser.add_argument('-i', '--ip', dest='ip', default='0.0.0.0', help='the ip address to serve on')
parser.add_argument('-L', '--log', dest='loglvl', choices=('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'), default='INFO', help='set the logging level')
parser.add_argument('-p', '--port', dest='port', default=8080, type=_argp_port_type, help='port to serve on')
parser.add_argument('-v', '--version', action='version', version=parser.prog + ' Version: ' + __version__)
parser.add_argument('-w', '--web-root', dest='web_root', default='.', type=_argp_dir_type, help='path to the web root directory')
parser.add_argument('--log-file', dest='log_file', help='log information to a file')
parser.add_argument('--no-threads', dest='use_threads', action='store_false', default=True, help='disable threading')
parser.add_argument('--password', dest='password', help='password to use for basic authentication')
ssl_group = parser.add_argument_group('ssl options')
ssl_group.add_argument('--ssl-cert', dest='ssl_cert', help='the ssl cert to use')
ssl_group.add_argument('--ssl-key', dest='ssl_key', help='the ssl key to use')
ssl_group.add_argument('--ssl-version', dest='ssl_version', choices=[p[9:] for p in dir(ssl) if p.startswith('PROTOCOL_')], help='the version of ssl to use')
arguments = parser.parse_args()
logging.getLogger('').setLevel(logging.DEBUG)
console_log_handler = logging.StreamHandler()
console_log_handler.setLevel(getattr(logging, arguments.loglvl))
console_log_handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)-8s %(message)s"))
logging.getLogger('').addHandler(console_log_handler)
if arguments.log_file:
main_file_handler = logging.handlers.RotatingFileHandler(arguments.log_file, maxBytes=262144, backupCount=5)
main_file_handler.setLevel(logging.DEBUG)
main_file_handler.setFormatter(logging.Formatter("%(asctime)s %(name)-30s %(levelname)-10s %(message)s"))
logging.getLogger('').setLevel(logging.DEBUG)
logging.getLogger('').addHandler(main_file_handler)
if arguments.config:
config = ConfigParser()
config.readfp(arguments.config)
server = build_server_from_config(
config,
'server',
server_klass=server_klass,
handler_klass=handler_klass
)
else:
server = server_klass(
handler_klass,
address=(arguments.ip, arguments.port),
use_threads=arguments.use_threads,
ssl_certfile=arguments.ssl_cert,
ssl_keyfile=arguments.ssl_key,
ssl_version=arguments.ssl_version
)
server.serve_files_root = arguments.web_root
if arguments.password:
server.auth_add_creds('', arguments.password)
return server
|
Build a server from command line arguments. If a ServerClass or
HandlerClass is specified, then the object must inherit from the
corresponding AdvancedHTTPServer base class.
:param str description: Description string to be passed to the argument parser.
:param server_klass: Alternative server class to use.
:type server_klass: :py:class:`.AdvancedHTTPServer`
:param handler_klass: Alternative handler class to use.
:type handler_klass: :py:class:`.RequestHandler`
:return: A configured server instance.
:rtype: :py:class:`.AdvancedHTTPServer`
|
entailment
|
def build_server_from_config(config, section_name, server_klass=None, handler_klass=None):
"""
Build a server from a provided :py:class:`configparser.ConfigParser`
instance. If a ServerClass or HandlerClass is specified, then the
object must inherit from the corresponding AdvancedHTTPServer base
class.
:param config: Configuration to retrieve settings from.
:type config: :py:class:`configparser.ConfigParser`
:param str section_name: The section name of the configuration to use.
:param server_klass: Alternative server class to use.
:type server_klass: :py:class:`.AdvancedHTTPServer`
:param handler_klass: Alternative handler class to use.
:type handler_klass: :py:class:`.RequestHandler`
:return: A configured server instance.
:rtype: :py:class:`.AdvancedHTTPServer`
"""
server_klass = (server_klass or AdvancedHTTPServer)
handler_klass = (handler_klass or RequestHandler)
port = config.getint(section_name, 'port')
web_root = None
if config.has_option(section_name, 'web_root'):
web_root = config.get(section_name, 'web_root')
if config.has_option(section_name, 'ip'):
ip = config.get(section_name, 'ip')
else:
ip = '0.0.0.0'
ssl_certfile = None
if config.has_option(section_name, 'ssl_cert'):
ssl_certfile = config.get(section_name, 'ssl_cert')
ssl_keyfile = None
if config.has_option(section_name, 'ssl_key'):
ssl_keyfile = config.get(section_name, 'ssl_key')
ssl_version = None
if config.has_option(section_name, 'ssl_version'):
ssl_version = config.get(section_name, 'ssl_version')
server = server_klass(
handler_klass,
address=(ip, port),
ssl_certfile=ssl_certfile,
ssl_keyfile=ssl_keyfile,
ssl_version=ssl_version
)
if config.has_option(section_name, 'password_type'):
password_type = config.get(section_name, 'password_type')
else:
password_type = 'md5'
if config.has_option(section_name, 'password'):
password = config.get(section_name, 'password')
if config.has_option(section_name, 'username'):
username = config.get(section_name, 'username')
else:
username = ''
server.auth_add_creds(username, password, pwtype=password_type)
cred_idx = 0
while config.has_option(section_name, 'password' + str(cred_idx)):
password = config.get(section_name, 'password' + str(cred_idx))
if not config.has_option(section_name, 'username' + str(cred_idx)):
break
username = config.get(section_name, 'username' + str(cred_idx))
server.auth_add_creds(username, password, pwtype=password_type)
cred_idx += 1
if web_root is None:
server.serve_files = False
else:
server.serve_files = True
server.serve_files_root = web_root
if config.has_option(section_name, 'list_directories'):
server.serve_files_list_directories = config.getboolean(section_name, 'list_directories')
return server
|
Build a server from a provided :py:class:`configparser.ConfigParser`
instance. If a ServerClass or HandlerClass is specified, then the
object must inherit from the corresponding AdvancedHTTPServer base
class.
:param config: Configuration to retrieve settings from.
:type config: :py:class:`configparser.ConfigParser`
:param str section_name: The section name of the configuration to use.
:param server_klass: Alternative server class to use.
:type server_klass: :py:class:`.AdvancedHTTPServer`
:param handler_klass: Alternative handler class to use.
:type handler_klass: :py:class:`.RequestHandler`
:return: A configured server instance.
:rtype: :py:class:`.AdvancedHTTPServer`
|
entailment
|
def set_serializer(self, serializer_name, compression=None):
"""
Configure the serializer to use for communication with the server.
The serializer specified must be valid and in the
:py:data:`.g_serializer_drivers` map.
:param str serializer_name: The name of the serializer to use.
:param str compression: The name of a compression library to use.
"""
self.serializer = Serializer(serializer_name, charset='UTF-8', compression=compression)
self.logger.debug('using serializer: ' + serializer_name)
|
Configure the serializer to use for communication with the server.
The serializer specified must be valid and in the
:py:data:`.g_serializer_drivers` map.
:param str serializer_name: The name of the serializer to use.
:param str compression: The name of a compression library to use.
|
entailment
|
def reconnect(self):
"""Reconnect to the remote server."""
self.lock.acquire()
if self.use_ssl:
self.client = http.client.HTTPSConnection(self.host, self.port, context=self.ssl_context)
else:
self.client = http.client.HTTPConnection(self.host, self.port)
self.lock.release()
|
Reconnect to the remote server.
|
entailment
|
def call(self, method, *args, **kwargs):
"""
Issue a call to the remote end point to execute the specified
procedure.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
"""
if kwargs:
options = self.encode(dict(args=args, kwargs=kwargs))
else:
options = self.encode(args)
headers = {}
if self.headers:
headers.update(self.headers)
headers['Content-Type'] = self.serializer.content_type
headers['Content-Length'] = str(len(options))
headers['Connection'] = 'close'
if self.username is not None and self.password is not None:
headers['Authorization'] = 'Basic ' + base64.b64encode((self.username + ':' + self.password).encode('UTF-8')).decode('UTF-8')
method = os.path.join(self.uri_base, method)
self.logger.debug('calling RPC method: ' + method[1:])
try:
with self.lock:
self.client.request('RPC', method, options, headers)
resp = self.client.getresponse()
except http.client.ImproperConnectionState:
raise RPCConnectionError('improper connection state')
if resp.status != 200:
raise RPCError(resp.reason, resp.status)
resp_data = resp.read()
resp_data = self.decode(resp_data)
if not ('exception_occurred' in resp_data and 'result' in resp_data):
raise RPCError('missing response information', resp.status)
if resp_data['exception_occurred']:
raise RPCError('remote method incurred an exception', resp.status, remote_exception=resp_data['exception'])
return resp_data['result']
|
Issue a call to the remote end point to execute the specified
procedure.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
|
entailment
|
def cache_call(self, method, *options):
"""
Call a remote method and store the result locally. Subsequent
calls to the same method with the same arguments will return the
cached result without invoking the remote procedure. Cached results are
kept indefinitely and must be manually refreshed with a call to
:py:meth:`.cache_call_refresh`.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
"""
options_hash = self.encode(options)
if len(options_hash) > 20:
options_hash = hashlib.new('sha1', options_hash).digest()
options_hash = sqlite3.Binary(options_hash)
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('SELECT return_value FROM cache WHERE method = ? AND options_hash = ?', (method, options_hash))
return_value = cursor.fetchone()
if return_value:
return_value = bytes(return_value[0])
return self.decode(return_value)
return_value = self.call(method, *options)
store_return_value = sqlite3.Binary(self.encode(return_value))
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('INSERT INTO cache (method, options_hash, return_value) VALUES (?, ?, ?)', (method, options_hash, store_return_value))
self.cache_db.commit()
return return_value
|
Call a remote method and store the result locally. Subsequent
calls to the same method with the same arguments will return the
cached result without invoking the remote procedure. Cached results are
kept indefinitely and must be manually refreshed with a call to
:py:meth:`.cache_call_refresh`.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
|
entailment
|
def cache_call_refresh(self, method, *options):
"""
Call a remote method and update the local cache with the result
if it already existed.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
"""
options_hash = self.encode(options)
if len(options_hash) > 20:
options_hash = hashlib.new('sha1', options).digest()
options_hash = sqlite3.Binary(options_hash)
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('DELETE FROM cache WHERE method = ? AND options_hash = ?', (method, options_hash))
return_value = self.call(method, *options)
store_return_value = sqlite3.Binary(self.encode(return_value))
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('INSERT INTO cache (method, options_hash, return_value) VALUES (?, ?, ?)', (method, options_hash, store_return_value))
self.cache_db.commit()
return return_value
|
Call a remote method and update the local cache with the result
if it already existed.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
|
entailment
|
def cache_clear(self):
"""Purge the local store of all cached function information."""
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('DELETE FROM cache')
self.cache_db.commit()
self.logger.info('the RPC cache has been purged')
return
|
Purge the local store of all cached function information.
|
entailment
|
def respond_file(self, file_path, attachment=False, query=None):
"""
Respond to the client by serving a file, either directly or as
an attachment.
:param str file_path: The path to the file to serve, this does not need to be in the web root.
:param bool attachment: Whether to serve the file as a download by setting the Content-Disposition header.
"""
del query
file_path = os.path.abspath(file_path)
try:
file_obj = open(file_path, 'rb')
except IOError:
self.respond_not_found()
return
self.send_response(200)
self.send_header('Content-Type', self.guess_mime_type(file_path))
fs = os.fstat(file_obj.fileno())
self.send_header('Content-Length', str(fs[6]))
if attachment:
file_name = os.path.basename(file_path)
self.send_header('Content-Disposition', 'attachment; filename=' + file_name)
self.send_header('Last-Modified', self.date_time_string(fs.st_mtime))
self.end_headers()
shutil.copyfileobj(file_obj, self.wfile)
file_obj.close()
return
|
Respond to the client by serving a file, either directly or as
an attachment.
:param str file_path: The path to the file to serve, this does not need to be in the web root.
:param bool attachment: Whether to serve the file as a download by setting the Content-Disposition header.
|
entailment
|
def respond_list_directory(self, dir_path, query=None):
"""
Respond to the client with an HTML page listing the contents of
the specified directory.
:param str dir_path: The path of the directory to list the contents of.
"""
del query
try:
dir_contents = os.listdir(dir_path)
except os.error:
self.respond_not_found()
return
if os.path.normpath(dir_path) != self.__config['serve_files_root']:
dir_contents.append('..')
dir_contents.sort(key=lambda a: a.lower())
displaypath = html.escape(urllib.parse.unquote(self.path), quote=True)
f = io.BytesIO()
encoding = sys.getfilesystemencoding()
f.write(b'<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n')
f.write(b'<html>\n<title>Directory listing for ' + displaypath.encode(encoding) + b'</title>\n')
f.write(b'<body>\n<h2>Directory listing for ' + displaypath.encode(encoding) + b'</h2>\n')
f.write(b'<hr>\n<ul>\n')
for name in dir_contents:
fullname = os.path.join(dir_path, name)
displayname = linkname = name
# Append / for directories or @ for symbolic links
if os.path.isdir(fullname):
displayname = name + "/"
linkname = name + "/"
if os.path.islink(fullname):
displayname = name + "@"
# Note: a link to a directory displays with @ and links with /
f.write(('<li><a href="' + urllib.parse.quote(linkname) + '">' + html.escape(displayname, quote=True) + '</a>\n').encode(encoding))
f.write(b'</ul>\n<hr>\n</body>\n</html>\n')
length = f.tell()
f.seek(0)
self.send_response(200)
self.send_header('Content-Type', 'text/html; charset=' + encoding)
self.send_header('Content-Length', length)
self.end_headers()
shutil.copyfileobj(f, self.wfile)
f.close()
return
|
Respond to the client with an HTML page listing the contents of
the specified directory.
:param str dir_path: The path of the directory to list the contents of.
|
entailment
|
def respond_redirect(self, location='/'):
"""
Respond to the client with a 301 message and redirect them with
a Location header.
:param str location: The new location to redirect the client to.
"""
self.send_response(301)
self.send_header('Content-Length', 0)
self.send_header('Location', location)
self.end_headers()
return
|
Respond to the client with a 301 message and redirect them with
a Location header.
:param str location: The new location to redirect the client to.
|
entailment
|
def respond_server_error(self, status=None, status_line=None, message=None):
"""
Handle an internal server error, logging a traceback if executed
within an exception handler.
:param int status: The status code to respond to the client with.
:param str status_line: The status message to respond to the client with.
:param str message: The body of the response that is sent to the client.
"""
(ex_type, ex_value, ex_traceback) = sys.exc_info()
if ex_type:
(ex_file_name, ex_line, _, _) = traceback.extract_tb(ex_traceback)[-1]
line_info = "{0}:{1}".format(ex_file_name, ex_line)
log_msg = "encountered {0} in {1}".format(repr(ex_value), line_info)
self.server.logger.error(log_msg, exc_info=True)
status = (status or 500)
status_line = (status_line or http.client.responses.get(status, 'Internal Server Error')).strip()
self.send_response(status, status_line)
message = (message or status_line)
if isinstance(message, (str, bytes)):
self.send_header('Content-Length', len(message))
self.end_headers()
if isinstance(message, str):
self.wfile.write(message.encode(sys.getdefaultencoding()))
else:
self.wfile.write(message)
elif hasattr(message, 'fileno'):
fs = os.fstat(message.fileno())
self.send_header('Content-Length', fs[6])
self.end_headers()
shutil.copyfileobj(message, self.wfile)
else:
self.end_headers()
return
|
Handle an internal server error, logging a traceback if executed
within an exception handler.
:param int status: The status code to respond to the client with.
:param str status_line: The status message to respond to the client with.
:param str message: The body of the response that is sent to the client.
|
entailment
|
def respond_unauthorized(self, request_authentication=False):
"""
Respond to the client that the request is unauthorized.
:param bool request_authentication: Whether to request basic authentication information by sending a WWW-Authenticate header.
"""
headers = {}
if request_authentication:
headers['WWW-Authenticate'] = 'Basic realm="' + self.__config['server_version'] + '"'
self.send_response_full(b'Unauthorized', status=401, headers=headers)
return
|
Respond to the client that the request is unauthorized.
:param bool request_authentication: Whether to request basic authentication information by sending a WWW-Authenticate header.
|
entailment
|
def dispatch_handler(self, query=None):
"""
Dispatch functions based on the established handler_map. It is
generally not necessary to override this function and doing so
will prevent any handlers from being executed. This function is
executed automatically when requests of either GET, HEAD, or POST
are received.
:param dict query: Parsed query parameters from the corresponding request.
"""
query = (query or {})
# normalize the path
# abandon query parameters
self.path = self.path.split('?', 1)[0]
self.path = self.path.split('#', 1)[0]
original_path = urllib.parse.unquote(self.path)
self.path = posixpath.normpath(original_path)
words = self.path.split('/')
words = filter(None, words)
tmp_path = ''
for word in words:
_, word = os.path.splitdrive(word)
_, word = os.path.split(word)
if word in (os.curdir, os.pardir):
continue
tmp_path = os.path.join(tmp_path, word)
self.path = tmp_path
if self.path == 'robots.txt' and self.__config['serve_robots_txt']:
self.send_response_full(self.__config['robots_txt'])
return
self.cookies = http.cookies.SimpleCookie(self.headers.get('cookie', ''))
handler, is_method = self.__get_handler(is_rpc=False)
if handler is not None:
try:
handler(*((query,) if is_method else (self, query)))
except Exception:
self.respond_server_error()
return
if not self.__config['serve_files']:
self.respond_not_found()
return
file_path = self.__config['serve_files_root']
file_path = os.path.join(file_path, tmp_path)
if os.path.isfile(file_path) and os.access(file_path, os.R_OK):
self.respond_file(file_path, query=query)
return
elif os.path.isdir(file_path) and os.access(file_path, os.R_OK):
if not original_path.endswith('/'):
# redirect browser, doing what apache does
destination = self.path + '/'
if self.command == 'GET' and self.query_data:
destination += '?' + urllib.parse.urlencode(self.query_data, True)
self.respond_redirect(destination)
return
for index in ['index.html', 'index.htm']:
index = os.path.join(file_path, index)
if os.path.isfile(index) and os.access(index, os.R_OK):
self.respond_file(index, query=query)
return
if self.__config['serve_files_list_directories']:
self.respond_list_directory(file_path, query=query)
return
self.respond_not_found()
return
|
Dispatch functions based on the established handler_map. It is
generally not necessary to override this function and doing so
will prevent any handlers from being executed. This function is
executed automatically when requests of either GET, HEAD, or POST
are received.
:param dict query: Parsed query parameters from the corresponding request.
|
entailment
|
def guess_mime_type(self, path):
"""
Guess an appropriate MIME type based on the extension of the
provided path.
:param str path: The of the file to analyze.
:return: The guessed MIME type of the default if non are found.
:rtype: str
"""
_, ext = posixpath.splitext(path)
if ext in self.extensions_map:
return self.extensions_map[ext]
ext = ext.lower()
return self.extensions_map[ext if ext in self.extensions_map else '']
|
Guess an appropriate MIME type based on the extension of the
provided path.
:param str path: The of the file to analyze.
:return: The guessed MIME type of the default if non are found.
:rtype: str
|
entailment
|
def check_authorization(self):
"""
Check for the presence of a basic auth Authorization header and
if the credentials contained within in are valid.
:return: Whether or not the credentials are valid.
:rtype: bool
"""
try:
store = self.__config.get('basic_auth')
if store is None:
return True
auth_info = self.headers.get('Authorization')
if not auth_info:
return False
auth_info = auth_info.split()
if len(auth_info) != 2 or auth_info[0] != 'Basic':
return False
auth_info = base64.b64decode(auth_info[1]).decode(sys.getdefaultencoding())
username = auth_info.split(':')[0]
password = ':'.join(auth_info.split(':')[1:])
password_bytes = password.encode(sys.getdefaultencoding())
if hasattr(self, 'custom_authentication'):
if self.custom_authentication(username, password):
self.basic_auth_user = username
return True
return False
if not username in store:
self.server.logger.warning('received invalid username: ' + username)
return False
password_data = store[username]
if password_data['type'] == 'plain':
if password == password_data['value']:
self.basic_auth_user = username
return True
elif hashlib.new(password_data['type'], password_bytes).digest() == password_data['value']:
self.basic_auth_user = username
return True
self.server.logger.warning('received invalid password from user: ' + username)
except Exception:
pass
return False
|
Check for the presence of a basic auth Authorization header and
if the credentials contained within in are valid.
:return: Whether or not the credentials are valid.
:rtype: bool
|
entailment
|
def cookie_get(self, name):
"""
Check for a cookie value by name.
:param str name: Name of the cookie value to retreive.
:return: Returns the cookie value if it's set or None if it's not found.
"""
if not hasattr(self, 'cookies'):
return None
if self.cookies.get(name):
return self.cookies.get(name).value
return None
|
Check for a cookie value by name.
:param str name: Name of the cookie value to retreive.
:return: Returns the cookie value if it's set or None if it's not found.
|
entailment
|
def cookie_set(self, name, value):
"""
Set the value of a client cookie. This can only be called while
headers can be sent.
:param str name: The name of the cookie value to set.
:param str value: The value of the cookie to set.
"""
if not self.headers_active:
raise RuntimeError('headers have already been ended')
cookie = "{0}={1}; Path=/; HttpOnly".format(name, value)
self.send_header('Set-Cookie', cookie)
|
Set the value of a client cookie. This can only be called while
headers can be sent.
:param str name: The name of the cookie value to set.
:param str value: The value of the cookie to set.
|
entailment
|
def get_content_type_charset(self, default='UTF-8'):
"""
Inspect the Content-Type header to retrieve the charset that the client
has specified.
:param str default: The default charset to return if none exists.
:return: The charset of the request.
:rtype: str
"""
encoding = default
header = self.headers.get('Content-Type', '')
idx = header.find('charset=')
if idx > 0:
encoding = (header[idx + 8:].split(' ', 1)[0] or encoding)
return encoding
|
Inspect the Content-Type header to retrieve the charset that the client
has specified.
:param str default: The default charset to return if none exists.
:return: The charset of the request.
:rtype: str
|
entailment
|
def close(self):
"""
Close the web socket connection and stop processing results. If the
connection is still open, a WebSocket close message will be sent to the
peer.
"""
if not self.connected:
return
self.connected = False
if self.handler.wfile.closed:
return
if select.select([], [self.handler.wfile], [], 0)[1]:
with self.lock:
self.handler.wfile.write(b'\x88\x00')
self.handler.wfile.flush()
self.on_closed()
|
Close the web socket connection and stop processing results. If the
connection is still open, a WebSocket close message will be sent to the
peer.
|
entailment
|
def send_message(self, opcode, message):
"""
Send a message to the peer over the socket.
:param int opcode: The opcode for the message to send.
:param bytes message: The message data to send.
"""
if not isinstance(message, bytes):
message = message.encode('utf-8')
length = len(message)
if not select.select([], [self.handler.wfile], [], 0)[1]:
self.logger.error('the socket is not ready for writing')
self.close()
return
buffer = b''
buffer += struct.pack('B', 0x80 + opcode)
if length <= 125:
buffer += struct.pack('B', length)
elif 126 <= length <= 65535:
buffer += struct.pack('>BH', 126, length)
else:
buffer += struct.pack('>BQ', 127, length)
buffer += message
self._last_sent_opcode = opcode
self.lock.acquire()
try:
self.handler.wfile.write(buffer)
self.handler.wfile.flush()
except Exception:
self.logger.error('an error occurred while sending a message', exc_info=True)
self.close()
finally:
self.lock.release()
|
Send a message to the peer over the socket.
:param int opcode: The opcode for the message to send.
:param bytes message: The message data to send.
|
entailment
|
def on_message(self, opcode, message):
"""
The primary dispatch function to handle incoming WebSocket messages.
:param int opcode: The opcode of the message that was received.
:param bytes message: The data contained within the message.
"""
self.logger.debug("processing {0} (opcode: 0x{1:02x}) message".format(self._opcode_names.get(opcode, 'UNKNOWN'), opcode))
if opcode == self._opcode_close:
self.close()
elif opcode == self._opcode_ping:
if len(message) > 125:
self.close()
return
self.send_message(self._opcode_pong, message)
elif opcode == self._opcode_pong:
pass
elif opcode == self._opcode_binary:
self.on_message_binary(message)
elif opcode == self._opcode_text:
try:
message = self._decode_string(message)
except UnicodeDecodeError:
self.logger.warning('closing connection due to invalid unicode within a text message')
self.close()
else:
self.on_message_text(message)
elif opcode == self._opcode_continue:
self.close()
else:
self.logger.warning("received unknown opcode: {0} (0x{0:02x})".format(opcode))
self.close()
|
The primary dispatch function to handle incoming WebSocket messages.
:param int opcode: The opcode of the message that was received.
:param bytes message: The data contained within the message.
|
entailment
|
def from_content_type(cls, content_type):
"""
Build a serializer object from a MIME Content-Type string.
:param str content_type: The Content-Type string to parse.
:return: A new serializer instance.
:rtype: :py:class:`.Serializer`
"""
name = content_type
options = {}
if ';' in content_type:
name, options_str = content_type.split(';', 1)
for part in options_str.split(';'):
part = part.strip()
if '=' in part:
key, value = part.split('=')
else:
key, value = (part, None)
options[key] = value
# old style compatibility
if name.endswith('+zlib'):
options['compression'] = 'zlib'
name = name[:-5]
return cls(name, charset=options.get('charset', 'UTF-8'), compression=options.get('compression'))
|
Build a serializer object from a MIME Content-Type string.
:param str content_type: The Content-Type string to parse.
:return: A new serializer instance.
:rtype: :py:class:`.Serializer`
|
entailment
|
def dumps(self, data):
"""
Serialize a python data type for transmission or storage.
:param data: The python object to serialize.
:return: The serialized representation of the object.
:rtype: bytes
"""
data = g_serializer_drivers[self.name]['dumps'](data)
if sys.version_info[0] == 3 and isinstance(data, str):
data = data.encode(self._charset)
if self._compression == 'zlib':
data = zlib.compress(data)
assert isinstance(data, bytes)
return data
|
Serialize a python data type for transmission or storage.
:param data: The python object to serialize.
:return: The serialized representation of the object.
:rtype: bytes
|
entailment
|
def loads(self, data):
"""
Deserialize the data into it's original python object.
:param bytes data: The serialized object to load.
:return: The original python object.
"""
if not isinstance(data, bytes):
raise TypeError("loads() argument 1 must be bytes, not {0}".format(type(data).__name__))
if self._compression == 'zlib':
data = zlib.decompress(data)
if sys.version_info[0] == 3 and self.name.startswith('application/'):
data = data.decode(self._charset)
data = g_serializer_drivers[self.name]['loads'](data, (self._charset if sys.version_info[0] == 3 else None))
if isinstance(data, list):
data = tuple(data)
return data
|
Deserialize the data into it's original python object.
:param bytes data: The serialized object to load.
:return: The original python object.
|
entailment
|
def add_sni_cert(self, hostname, ssl_certfile=None, ssl_keyfile=None, ssl_version=None):
"""
Add an SSL certificate for a specific hostname as supported by SSL's
Server Name Indicator (SNI) extension. See :rfc:`3546` for more details
on SSL extensions. In order to use this method, the server instance must
have been initialized with at least one address configured for SSL.
.. warning::
This method will raise a :py:exc:`RuntimeError` if either the SNI
extension is not available in the :py:mod:`ssl` module or if SSL was
not enabled at initialization time through the use of arguments to
:py:meth:`~.__init__`.
.. versionadded:: 2.0.0
:param str hostname: The hostname for this configuration.
:param str ssl_certfile: An SSL certificate file to use, setting this enables SSL.
:param str ssl_keyfile: An SSL certificate file to use.
:param ssl_version: The SSL protocol version to use.
"""
if not g_ssl_has_server_sni:
raise RuntimeError('the ssl server name indicator extension is unavailable')
if self._ssl_sni_entries is None:
raise RuntimeError('ssl was not enabled on initialization')
if ssl_certfile:
ssl_certfile = os.path.abspath(ssl_certfile)
if ssl_keyfile:
ssl_keyfile = os.path.abspath(ssl_keyfile)
cert_info = SSLSNICertificate(hostname, ssl_certfile, ssl_keyfile)
if ssl_version is None or isinstance(ssl_version, str):
ssl_version = resolve_ssl_protocol_version(ssl_version)
ssl_ctx = ssl.SSLContext(ssl_version)
ssl_ctx.load_cert_chain(ssl_certfile, keyfile=ssl_keyfile)
self._ssl_sni_entries[hostname] = SSLSNIEntry(context=ssl_ctx, certificate=cert_info)
|
Add an SSL certificate for a specific hostname as supported by SSL's
Server Name Indicator (SNI) extension. See :rfc:`3546` for more details
on SSL extensions. In order to use this method, the server instance must
have been initialized with at least one address configured for SSL.
.. warning::
This method will raise a :py:exc:`RuntimeError` if either the SNI
extension is not available in the :py:mod:`ssl` module or if SSL was
not enabled at initialization time through the use of arguments to
:py:meth:`~.__init__`.
.. versionadded:: 2.0.0
:param str hostname: The hostname for this configuration.
:param str ssl_certfile: An SSL certificate file to use, setting this enables SSL.
:param str ssl_keyfile: An SSL certificate file to use.
:param ssl_version: The SSL protocol version to use.
|
entailment
|
def remove_sni_cert(self, hostname):
"""
Remove the SSL Server Name Indicator (SNI) certificate configuration for
the specified *hostname*.
.. warning::
This method will raise a :py:exc:`RuntimeError` if either the SNI
extension is not available in the :py:mod:`ssl` module or if SSL was
not enabled at initialization time through the use of arguments to
:py:meth:`~.__init__`.
.. versionadded:: 2.2.0
:param str hostname: The hostname to delete the SNI configuration for.
"""
if not g_ssl_has_server_sni:
raise RuntimeError('the ssl server name indicator extension is unavailable')
if self._ssl_sni_entries is None:
raise RuntimeError('ssl was not enabled on initialization')
sni_entry = self._ssl_sni_entries.pop(hostname, None)
if sni_entry is None:
raise ValueError('the specified hostname does not have an sni certificate configuration')
|
Remove the SSL Server Name Indicator (SNI) certificate configuration for
the specified *hostname*.
.. warning::
This method will raise a :py:exc:`RuntimeError` if either the SNI
extension is not available in the :py:mod:`ssl` module or if SSL was
not enabled at initialization time through the use of arguments to
:py:meth:`~.__init__`.
.. versionadded:: 2.2.0
:param str hostname: The hostname to delete the SNI configuration for.
|
entailment
|
def sni_certs(self):
"""
.. versionadded:: 2.2.0
:return: Return a tuple of :py:class:`~.SSLSNICertificate` instances for each of the certificates that are configured.
:rtype: tuple
"""
if not g_ssl_has_server_sni or self._ssl_sni_entries is None:
return tuple()
return tuple(entry.certificate for entry in self._ssl_sni_entries.values())
|
.. versionadded:: 2.2.0
:return: Return a tuple of :py:class:`~.SSLSNICertificate` instances for each of the certificates that are configured.
:rtype: tuple
|
entailment
|
def serve_forever(self, fork=False):
"""
Start handling requests. This method must be called and does not
return unless the :py:meth:`.shutdown` method is called from
another thread.
:param bool fork: Whether to fork or not before serving content.
:return: The child processes PID if *fork* is set to True.
:rtype: int
"""
if fork:
if not hasattr(os, 'fork'):
raise OSError('os.fork is not available')
child_pid = os.fork()
if child_pid != 0:
self.logger.info('forked child process: ' + str(child_pid))
return child_pid
self.__server_thread = threading.current_thread()
self.__wakeup_fd = WakeupFd()
self.__is_shutdown.clear()
self.__should_stop.clear()
self.__is_running.set()
while not self.__should_stop.is_set():
try:
self._serve_ready()
except socket.error:
self.logger.warning('encountered socket error, stopping server')
self.__should_stop.set()
self.__is_shutdown.set()
self.__is_running.clear()
return 0
|
Start handling requests. This method must be called and does not
return unless the :py:meth:`.shutdown` method is called from
another thread.
:param bool fork: Whether to fork or not before serving content.
:return: The child processes PID if *fork* is set to True.
:rtype: int
|
entailment
|
def shutdown(self):
"""Shutdown the server and stop responding to requests."""
self.__should_stop.set()
if self.__server_thread == threading.current_thread():
self.__is_shutdown.set()
self.__is_running.clear()
else:
if self.__wakeup_fd is not None:
os.write(self.__wakeup_fd.write_fd, b'\x00')
self.__is_shutdown.wait()
if self.__wakeup_fd is not None:
self.__wakeup_fd.close()
self.__wakeup_fd = None
for server in self.sub_servers:
server.shutdown()
|
Shutdown the server and stop responding to requests.
|
entailment
|
def auth_set(self, status):
"""
Enable or disable requiring authentication on all incoming requests.
:param bool status: Whether to enable or disable requiring authentication.
"""
if not bool(status):
self.__config['basic_auth'] = None
self.logger.info('basic authentication has been disabled')
else:
self.__config['basic_auth'] = {}
self.logger.info('basic authentication has been enabled')
|
Enable or disable requiring authentication on all incoming requests.
:param bool status: Whether to enable or disable requiring authentication.
|
entailment
|
def auth_delete_creds(self, username=None):
"""
Delete the credentials for a specific username if specified or all
stored credentials.
:param str username: The username of the credentials to delete.
"""
if not username:
self.__config['basic_auth'] = {}
self.logger.info('basic authentication database has been cleared of all entries')
return
del self.__config['basic_auth'][username]
|
Delete the credentials for a specific username if specified or all
stored credentials.
:param str username: The username of the credentials to delete.
|
entailment
|
def auth_add_creds(self, username, password, pwtype='plain'):
"""
Add a valid set of credentials to be accepted for authentication.
Calling this function will automatically enable requiring
authentication. Passwords can be provided in either plaintext or
as a hash by specifying the hash type in the *pwtype* argument.
:param str username: The username of the credentials to be added.
:param password: The password data of the credentials to be added.
:type password: bytes, str
:param str pwtype: The type of the *password* data, (plain, md5, sha1, etc.).
"""
if not isinstance(password, (bytes, str)):
raise TypeError("auth_add_creds() argument 2 must be bytes or str, not {0}".format(type(password).__name__))
pwtype = pwtype.lower()
if not pwtype in ('plain', 'md5', 'sha1', 'sha256', 'sha384', 'sha512'):
raise ValueError('invalid password type, must be \'plain\', or supported by hashlib')
if self.__config.get('basic_auth') is None:
self.__config['basic_auth'] = {}
self.logger.info('basic authentication has been enabled')
if pwtype != 'plain':
algorithms_available = getattr(hashlib, 'algorithms_available', ()) or getattr(hashlib, 'algorithms', ())
if pwtype not in algorithms_available:
raise ValueError('hashlib does not support the desired algorithm')
# only md5 and sha1 hex for backwards compatibility
if pwtype == 'md5' and len(password) == 32:
password = binascii.unhexlify(password)
elif pwtype == 'sha1' and len(password) == 40:
password = binascii.unhexlify(password)
if not isinstance(password, bytes):
password = password.encode('UTF-8')
if len(hashlib.new(pwtype, b'foobar').digest()) != len(password):
raise ValueError('the length of the password hash does not match the type specified')
self.__config['basic_auth'][username] = {'value': password, 'type': pwtype}
|
Add a valid set of credentials to be accepted for authentication.
Calling this function will automatically enable requiring
authentication. Passwords can be provided in either plaintext or
as a hash by specifying the hash type in the *pwtype* argument.
:param str username: The username of the credentials to be added.
:param password: The password data of the credentials to be added.
:type password: bytes, str
:param str pwtype: The type of the *password* data, (plain, md5, sha1, etc.).
|
entailment
|
def setattr_context(obj, **kwargs):
"""
Context manager to temporarily change the values of object attributes
while executing a function.
Example
-------
>>> class Foo: pass
>>> f = Foo(); f.attr = 'hello'
>>> with setattr_context(f, attr='goodbye'):
... print(f.attr)
goodbye
>>> print(f.attr)
hello
"""
old_kwargs = dict([(key, getattr(obj, key)) for key in kwargs])
[setattr(obj, key, val) for key, val in kwargs.items()]
try:
yield
finally:
[setattr(obj, key, val) for key, val in old_kwargs.items()]
|
Context manager to temporarily change the values of object attributes
while executing a function.
Example
-------
>>> class Foo: pass
>>> f = Foo(); f.attr = 'hello'
>>> with setattr_context(f, attr='goodbye'):
... print(f.attr)
goodbye
>>> print(f.attr)
hello
|
entailment
|
def validate_inputs(*arrays, **kwargs):
"""Validate input arrays
This checks that
- Arrays are mutually broadcastable
- Broadcasted arrays are one-dimensional
Optionally, arrays are sorted according to the ``sort_by`` argument.
Parameters
----------
*args : ndarrays
All non-keyword arguments are arrays which will be validated
sort_by : array
If specified, sort all inputs by the order given in this array.
"""
arrays = np.broadcast_arrays(*arrays)
sort_by = kwargs.pop('sort_by', None)
if kwargs:
raise ValueError("unrecognized arguments: {0}".format(kwargs.keys()))
if arrays[0].ndim != 1:
raise ValueError("Input arrays should be one-dimensional.")
if sort_by is not None:
isort = np.argsort(sort_by)
if isort.shape != arrays[0].shape:
raise ValueError("sort shape must equal array shape.")
arrays = tuple([a[isort] for a in arrays])
return arrays
|
Validate input arrays
This checks that
- Arrays are mutually broadcastable
- Broadcasted arrays are one-dimensional
Optionally, arrays are sorted according to the ``sort_by`` argument.
Parameters
----------
*args : ndarrays
All non-keyword arguments are arrays which will be validated
sort_by : array
If specified, sort all inputs by the order given in this array.
|
entailment
|
def _prep_smooth(t, y, dy, span, t_out, span_out, period):
"""Private function to prepare & check variables for smooth utilities"""
# If period is provided, sort by phases. Otherwise sort by t
if period:
t = t % period
if t_out is not None:
t_out = t_out % period
t, y, dy = validate_inputs(t, y, dy, sort_by=t)
if span_out is not None:
if t_out is None:
raise ValueError("Must specify t_out when span_out is given")
if span is not None:
raise ValueError("Must specify only one of span, span_out")
span, t_out = np.broadcast_arrays(span_out, t_out)
indices = np.searchsorted(t, t_out)
elif span is None:
raise ValueError("Must specify either span_out or span")
else:
indices = None
return t, y, dy, span, t_out, span_out, indices
|
Private function to prepare & check variables for smooth utilities
|
entailment
|
def moving_average_smooth(t, y, dy, span=None, cv=True,
t_out=None, span_out=None, period=None):
"""Perform a moving-average smooth of the data
Parameters
----------
t, y, dy : array_like
time, value, and error in value of the input data
span : array_like
the integer spans of the data
cv : boolean (default=True)
if True, treat the problem as a cross-validation, i.e. don't use
each point in the evaluation of its own smoothing.
t_out : array_like (optional)
the output times for the moving averages
span_out : array_like (optional)
the spans associated with the output times t_out
period : float
if provided, then consider the inputs periodic with the given period
Returns
-------
y_smooth : array_like
smoothed y values at each time t (or t_out)
"""
prep = _prep_smooth(t, y, dy, span, t_out, span_out, period)
t, y, dy, span, t_out, span_out, indices = prep
w = 1. / (dy ** 2)
w, yw = windowed_sum([w, y * w], t=t, span=span, subtract_mid=cv,
indices=indices, period=period)
if t_out is None or span_out is not None:
return yw / w
else:
i = np.minimum(len(t) - 1, np.searchsorted(t, t_out))
return yw[i] / w[i]
|
Perform a moving-average smooth of the data
Parameters
----------
t, y, dy : array_like
time, value, and error in value of the input data
span : array_like
the integer spans of the data
cv : boolean (default=True)
if True, treat the problem as a cross-validation, i.e. don't use
each point in the evaluation of its own smoothing.
t_out : array_like (optional)
the output times for the moving averages
span_out : array_like (optional)
the spans associated with the output times t_out
period : float
if provided, then consider the inputs periodic with the given period
Returns
-------
y_smooth : array_like
smoothed y values at each time t (or t_out)
|
entailment
|
def linear_smooth(t, y, dy, span=None, cv=True,
t_out=None, span_out=None, period=None):
"""Perform a linear smooth of the data
Parameters
----------
t, y, dy : array_like
time, value, and error in value of the input data
span : array_like
the integer spans of the data
cv : boolean (default=True)
if True, treat the problem as a cross-validation, i.e. don't use
each point in the evaluation of its own smoothing.
t_out : array_like (optional)
the output times for the moving averages
span_out : array_like (optional)
the spans associated with the output times t_out
period : float
if provided, then consider the inputs periodic with the given period
Returns
-------
y_smooth : array_like
smoothed y values at each time t or t_out
"""
t_input = t
prep = _prep_smooth(t, y, dy, span, t_out, span_out, period)
t, y, dy, span, t_out, span_out, indices = prep
if period:
t_input = np.asarray(t_input) % period
w = 1. / (dy ** 2)
w, yw, tw, tyw, ttw = windowed_sum([w, y * w, w, y * w, w], t=t,
tpowers=[0, 0, 1, 1, 2],
span=span, indices=indices,
subtract_mid=cv, period=period)
denominator = (w * ttw - tw * tw)
slope = (tyw * w - tw * yw)
intercept = (ttw * yw - tyw * tw)
if np.any(denominator == 0):
raise ValueError("Zero denominator in linear smooth. This usually "
"indicates that the input contains duplicate points.")
if t_out is None:
return (slope * t_input + intercept) / denominator
elif span_out is not None:
return (slope * t_out + intercept) / denominator
else:
i = np.minimum(len(t) - 1, np.searchsorted(t, t_out))
return (slope[i] * t_out + intercept[i]) / denominator[i]
|
Perform a linear smooth of the data
Parameters
----------
t, y, dy : array_like
time, value, and error in value of the input data
span : array_like
the integer spans of the data
cv : boolean (default=True)
if True, treat the problem as a cross-validation, i.e. don't use
each point in the evaluation of its own smoothing.
t_out : array_like (optional)
the output times for the moving averages
span_out : array_like (optional)
the spans associated with the output times t_out
period : float
if provided, then consider the inputs periodic with the given period
Returns
-------
y_smooth : array_like
smoothed y values at each time t or t_out
|
entailment
|
def multinterp(x, y, xquery, slow=False):
"""Multiple linear interpolations
Parameters
----------
x : array_like, shape=(N,)
sorted array of x values
y : array_like, shape=(N, M)
array of y values corresponding to each x value
xquery : array_like, shape=(M,)
array of query values
slow : boolean, default=False
if True, use slow method (used mainly for unit testing)
Returns
-------
yquery : ndarray, shape=(M,)
The interpolated values corresponding to each x query.
"""
x, y, xquery = map(np.asarray, (x, y, xquery))
assert x.ndim == 1
assert xquery.ndim == 1
assert y.shape == x.shape + xquery.shape
# make sure xmin < xquery < xmax in all cases
xquery = np.clip(xquery, x.min(), x.max())
if slow:
from scipy.interpolate import interp1d
return np.array([interp1d(x, y)(xq) for xq, y in zip(xquery, y.T)])
elif len(x) == 3:
# Most common case: use a faster approach
yq_lower = y[0] + (xquery - x[0]) * (y[1] - y[0]) / (x[1] - x[0])
yq_upper = y[1] + (xquery - x[1]) * (y[2] - y[1]) / (x[2] - x[1])
return np.where(xquery < x[1], yq_lower, yq_upper)
else:
i = np.clip(np.searchsorted(x, xquery, side='right') - 1,
0, len(x) - 2)
j = np.arange(len(xquery))
return y[i, j] + ((xquery - x[i]) *
(y[i + 1, j] - y[i, j]) / (x[i + 1] - x[i]))
|
Multiple linear interpolations
Parameters
----------
x : array_like, shape=(N,)
sorted array of x values
y : array_like, shape=(N, M)
array of y values corresponding to each x value
xquery : array_like, shape=(M,)
array of query values
slow : boolean, default=False
if True, use slow method (used mainly for unit testing)
Returns
-------
yquery : ndarray, shape=(M,)
The interpolated values corresponding to each x query.
|
entailment
|
def _create_session(self, test_connection=False):
"""
Create a consulate.session object, and query for its leader to ensure
that the connection is made.
:param test_connection: call .leader() to ensure that the connection
is valid
:type test_connection: bool
:return consulate.Session instance
"""
session = consulate.Session(host=self.host, port=self.port)
if test_connection:
session.status.leader()
return session
|
Create a consulate.session object, and query for its leader to ensure
that the connection is made.
:param test_connection: call .leader() to ensure that the connection
is valid
:type test_connection: bool
:return consulate.Session instance
|
entailment
|
def apply_remote_config(self, namespace=None):
"""
Applies all config values defined in consul's kv store to self.app.
There is no guarantee that these values will not be overwritten later
elsewhere.
:param namespace: kv namespace/directory. Defaults to
DEFAULT_KV_NAMESPACE
:return: None
"""
if namespace is None:
namespace = "config/{service}/{environment}/".format(
service=os.environ.get('SERVICE', 'generic_service'),
environment=os.environ.get('ENVIRONMENT', 'generic_environment')
)
for k, v in iteritems(self.session.kv.find(namespace)):
k = k.replace(namespace, '')
try:
self.app.config[k] = json.loads(v)
except (TypeError, ValueError):
self.app.logger.warning("Couldn't de-serialize {} to json, using raw value".format(v))
self.app.config[k] = v
msg = "Set {k}={v} from consul kv '{ns}'".format(
k=k,
v=v,
ns=namespace,
)
self.app.logger.debug(msg)
|
Applies all config values defined in consul's kv store to self.app.
There is no guarantee that these values will not be overwritten later
elsewhere.
:param namespace: kv namespace/directory. Defaults to
DEFAULT_KV_NAMESPACE
:return: None
|
entailment
|
def register_service(self, **kwargs):
"""
register this service with consul
kwargs passed to Consul.agent.service.register
"""
kwargs.setdefault('name', self.app.name)
self.session.agent.service.register(**kwargs)
|
register this service with consul
kwargs passed to Consul.agent.service.register
|
entailment
|
def _resolve(self):
"""
Query the consul DNS server for the service IP and port
"""
endpoints = {}
r = self.resolver.query(self.service, 'SRV')
for rec in r.response.additional:
name = rec.name.to_text()
addr = rec.items[0].address
endpoints[name] = {'addr': addr}
for rec in r.response.answer[0].items:
name = '.'.join(rec.target.labels)
endpoints[name]['port'] = rec.port
return [
'http://{ip}:{port}'.format(
ip=v['addr'], port=v['port']
) for v in endpoints.values()
]
|
Query the consul DNS server for the service IP and port
|
entailment
|
def request(self, method, endpoint, **kwargs):
"""
Proxy to requests.request
:param method: str formatted http method
:param endpoint: service endpoint
:param kwargs: kwargs passed directly to requests.request
:return:
"""
kwargs.setdefault('timeout', (1, 30))
return self.session.request(
method,
urljoin(self.base_url, endpoint),
**kwargs
)
|
Proxy to requests.request
:param method: str formatted http method
:param endpoint: service endpoint
:param kwargs: kwargs passed directly to requests.request
:return:
|
entailment
|
def with_retry_connections(max_tries=3, sleep=0.05):
"""
Decorator that wraps an entire function in a try/except clause. On
requests.exceptions.ConnectionError, will re-run the function code
until success or max_tries is reached.
:param max_tries: maximum number of attempts before giving up
:param sleep: time to sleep between tries, or None
"""
def decorator(f):
@functools.wraps(f)
def f_retry(*args, **kwargs):
tries = 0
while True:
try:
return f(*args, **kwargs)
except (ConnectionError, ConnectTimeout) as e:
tries += 1
if tries >= max_tries:
raise ConsulConnectionError(e)
if sleep:
time.sleep(sleep)
return f_retry
return decorator
|
Decorator that wraps an entire function in a try/except clause. On
requests.exceptions.ConnectionError, will re-run the function code
until success or max_tries is reached.
:param max_tries: maximum number of attempts before giving up
:param sleep: time to sleep between tries, or None
|
entailment
|
def crop(gens, seconds=5, cropper=None):
'''
Crop the generator to a finite number of frames
Return a generator which outputs the provided generator limited
to enough samples to produce seconds seconds of audio (default 5s)
at the provided frame rate.
'''
if hasattr(gens, "next"):
# single generator
gens = (gens,)
if cropper == None:
cropper = lambda gen: itertools.islice(gen, 0, seconds * sampler.FRAME_RATE)
cropped = [cropper(gen) for gen in gens]
return cropped[0] if len(cropped) == 1 else cropped
|
Crop the generator to a finite number of frames
Return a generator which outputs the provided generator limited
to enough samples to produce seconds seconds of audio (default 5s)
at the provided frame rate.
|
entailment
|
def crop_at_zero_crossing(gen, seconds=5, error=0.1):
'''
Crop the generator, ending at a zero-crossing
Crop the generator to produce approximately seconds seconds
(default 5s) of audio at the provided FRAME_RATE, attempting
to end the clip at a zero crossing point to avoid clicking.
'''
source = iter(gen)
buffer_length = int(2 * error * sampler.FRAME_RATE)
# split the source into two iterators:
# - start, which contains the bulk of the sound clip
# - and end, which contains the final 100ms, plus 100ms past
# the desired clip length. We may cut the clip anywhere
# within this +/-100ms end buffer.
start = itertools.islice(source, 0, int((seconds - error) * sampler.FRAME_RATE))
end = itertools.islice(source, 0, buffer_length)
for sample in start:
yield sample
# pull end buffer generator into memory so we can work with it
end = list(end)
# find min by sorting buffer samples, first by abs of sample, then by distance from optimal
best = sorted(enumerate(end), key=lambda x: (math.fabs(x[1]),abs((buffer_length/2)-x[0])))
print best[:10]
print best[0][0]
# todo: better logic when we don't have a perfect zero crossing
#if best[0][1] != 0:
# # we don't have a perfect zero crossing, so let's look for best fit?
# pass
# crop samples at index of best zero crossing
for sample in end[:best[0][0] + 1]:
yield sample
|
Crop the generator, ending at a zero-crossing
Crop the generator to produce approximately seconds seconds
(default 5s) of audio at the provided FRAME_RATE, attempting
to end the clip at a zero crossing point to avoid clicking.
|
entailment
|
def volume(gen, dB=0):
'''Change the volume of gen by dB decibles'''
if not hasattr(dB, 'next'):
# not a generator
scale = 10 ** (dB / 20.)
else:
def scale_gen():
while True:
yield 10 ** (next(dB) / 20.)
scale = scale_gen()
return envelope(gen, scale)
|
Change the volume of gen by dB decibles
|
entailment
|
def mixer(inputs, mix=None):
'''
Mix `inputs` together based on `mix` tuple
`inputs` should be a tuple of *n* generators.
`mix` should be a tuple of *m* tuples, one per desired
output channel. Each of the *m* tuples should contain
*n* generators, corresponding to the time-sequence of
the desired mix levels for each of the *n* input channels.
That is, to make an ouput channel contain a 50/50 mix of the
two input channels, the tuple would be:
(constant(0.5), constant(0.5))
The mix generators need not be constant, allowing for time-varying
mix levels:
# 50% from input 1, pulse input 2 over a two second cycle
(constant(0.5), tone(0.5))
The mixer will return a list of *m* generators, each containing
the data from the inputs mixed as specified.
If no `mix` tuple is specified, all of the *n* input channels
will be mixed together into one generator, with the volume of
each reduced *n*-fold.
Example:
# three in, two out;
# 10Hz binaural beat with white noise across both channels
mixer(
(white_noise(), tone(440), tone(450)),
(
(constant(.5), constant(1), constant(0)),
(constant(.5), constant(0), constant(1)),
)
)
'''
if mix == None:
# by default, mix all inputs down to one channel
mix = ([constant(1.0 / len(inputs))] * len(inputs),)
duped_inputs = zip(*[itertools.tee(i, len(mix)) for i in inputs])
# second zip is backwards
return [\
sum(*[multiply(m,i) for m,i in zip(channel_mix, channel_inputs)])\
for channel_mix, channel_inputs in zip(mix, duped_inputs) \
]
|
Mix `inputs` together based on `mix` tuple
`inputs` should be a tuple of *n* generators.
`mix` should be a tuple of *m* tuples, one per desired
output channel. Each of the *m* tuples should contain
*n* generators, corresponding to the time-sequence of
the desired mix levels for each of the *n* input channels.
That is, to make an ouput channel contain a 50/50 mix of the
two input channels, the tuple would be:
(constant(0.5), constant(0.5))
The mix generators need not be constant, allowing for time-varying
mix levels:
# 50% from input 1, pulse input 2 over a two second cycle
(constant(0.5), tone(0.5))
The mixer will return a list of *m* generators, each containing
the data from the inputs mixed as specified.
If no `mix` tuple is specified, all of the *n* input channels
will be mixed together into one generator, with the volume of
each reduced *n*-fold.
Example:
# three in, two out;
# 10Hz binaural beat with white noise across both channels
mixer(
(white_noise(), tone(440), tone(450)),
(
(constant(.5), constant(1), constant(0)),
(constant(.5), constant(0), constant(1)),
)
)
|
entailment
|
def channelize(gen, channels):
'''
Break multi-channel generator into one sub-generator per channel
Takes a generator producing n-tuples of samples and returns n generators,
each producing samples for a single channel.
Since multi-channel generators are the only reasonable way to synchronize samples
across channels, and the sampler functions only take tuples of generators,
you must use this function to process synchronized streams for output.
'''
def pick(g, channel):
for samples in g:
yield samples[channel]
return [pick(gen_copy, channel) for channel, gen_copy in enumerate(itertools.tee(gen, channels))]
|
Break multi-channel generator into one sub-generator per channel
Takes a generator producing n-tuples of samples and returns n generators,
each producing samples for a single channel.
Since multi-channel generators are the only reasonable way to synchronize samples
across channels, and the sampler functions only take tuples of generators,
you must use this function to process synchronized streams for output.
|
entailment
|
def file_is_seekable(f):
'''
Returns True if file `f` is seekable, and False if not
Useful to determine, for example, if `f` is STDOUT to
a pipe.
'''
try:
f.tell()
logger.info("File is seekable!")
except IOError, e:
if e.errno == errno.ESPIPE:
return False
else:
raise
return True
|
Returns True if file `f` is seekable, and False if not
Useful to determine, for example, if `f` is STDOUT to
a pipe.
|
entailment
|
def sample(generator, min=-1, max=1, width=SAMPLE_WIDTH):
'''Convert audio waveform generator into packed sample generator.'''
# select signed char, short, or in based on sample width
fmt = { 1: '<B', 2: '<h', 4: '<i' }[width]
return (struct.pack(fmt, int(sample)) for sample in \
normalize(hard_clip(generator, min, max),\
min, max, -2**(width * 8 - 1), 2**(width * 8 - 1) - 1))
|
Convert audio waveform generator into packed sample generator.
|
entailment
|
def sample_all(generators, *args, **kwargs):
'''Convert list of audio waveform generators into list of packed sample generators.'''
return [sample(gen, *args, **kwargs) for gen in generators]
|
Convert list of audio waveform generators into list of packed sample generators.
|
entailment
|
def buffer(stream, buffer_size=BUFFER_SIZE):
'''
Buffer the generator into byte strings of buffer_size samples
Return a generator that outputs reasonably sized byte strings
containing buffer_size samples from the generator stream.
This allows us to outputing big chunks of the audio stream to
disk at once for faster writes.
'''
i = iter(stream)
return iter(lambda: "".join(itertools.islice(i, buffer_size)), "")
|
Buffer the generator into byte strings of buffer_size samples
Return a generator that outputs reasonably sized byte strings
containing buffer_size samples from the generator stream.
This allows us to outputing big chunks of the audio stream to
disk at once for faster writes.
|
entailment
|
def wave_module_patched():
'''True if wave module can write data size of 0xFFFFFFFF, False otherwise.'''
f = StringIO()
w = wave.open(f, "wb")
w.setparams((1, 2, 44100, 0, "NONE", "no compression"))
patched = True
try:
w.setnframes((0xFFFFFFFF - 36) / w.getnchannels() / w.getsampwidth())
w._ensure_header_written(0)
except struct.error:
patched = False
logger.info("Error setting wave data size to 0xFFFFFFFF; wave module unpatched, setting sata size to 0x7FFFFFFF")
w.setnframes((0x7FFFFFFF - 36) / w.getnchannels() / w.getsampwidth())
w._ensure_header_written(0)
return patched
|
True if wave module can write data size of 0xFFFFFFFF, False otherwise.
|
entailment
|
def cache_finite_samples(f):
'''Decorator to cache audio samples produced by the wrapped generator.'''
cache = {}
def wrap(*args):
key = FRAME_RATE, args
if key not in cache:
cache[key] = [sample for sample in f(*args)]
return (sample for sample in cache[key])
return wrap
|
Decorator to cache audio samples produced by the wrapped generator.
|
entailment
|
def play(channels, blocking=True, raw_samples=False):
'''
Play the contents of the generator using PyAudio
Play to the system soundcard using PyAudio. PyAudio, an otherwise optional
depenency, must be installed for this feature to work.
'''
if not pyaudio_loaded:
raise Exception("Soundcard playback requires PyAudio. Install with `pip install pyaudio`.")
channel_count = 1 if hasattr(channels, "next") else len(channels)
wavgen = wav_samples(channels, raw_samples=raw_samples)
p = pyaudio.PyAudio()
stream = p.open(
format=p.get_format_from_width(SAMPLE_WIDTH),
channels=channel_count,
rate=FRAME_RATE,
output=True,
stream_callback=_pyaudio_callback(wavgen) if not blocking else None
)
if blocking:
try:
for chunk in buffer(wavgen, 1024):
stream.write(chunk)
except Exception:
raise
finally:
if not stream.is_stopped():
stream.stop_stream()
try:
stream.close()
except Exception:
pass
else:
return stream
|
Play the contents of the generator using PyAudio
Play to the system soundcard using PyAudio. PyAudio, an otherwise optional
depenency, must be installed for this feature to work.
|
entailment
|
def windowed_sum_slow(arrays, span, t=None, indices=None, tpowers=0,
period=None, subtract_mid=False):
"""Compute the windowed sum of the given arrays.
This is a slow function, used primarily for testing and validation
of the faster version of ``windowed_sum()``
Parameters
----------
arrays : tuple of arrays
arrays to window
span : int or array of ints
The span to use for the sum at each point. If array is provided,
it must be broadcastable with ``indices``
indices : array
the indices of the center of the desired windows. If ``None``,
the indices are assumed to be ``range(len(arrays[0]))`` though
these are not actually instantiated.
t : array (optional)
Times associated with the arrays
tpowers : list (optional)
Powers of t for each array sum
period : float (optional)
Period to use, if times are periodic. If supplied, input times
must be arranged such that (t % period) is sorted!
subtract_mid : boolean
If true, then subtract the middle value from each sum
Returns
-------
arrays : tuple of ndarrays
arrays containing the windowed sum of each input array
"""
span = np.asarray(span, dtype=int)
if not np.all(span > 0):
raise ValueError("span values must be positive")
arrays = tuple(map(np.asarray, arrays))
N = arrays[0].size
if not all(a.shape == (N,) for a in arrays):
raise ValueError("sizes of provided arrays must match")
t_input = t
if t is not None:
t = np.asarray(t)
if t.shape != (N,):
raise ValueError("shape of t must match shape of arrays")
else:
t = np.ones(N)
tpowers = tpowers + np.zeros(len(arrays))
if len(tpowers) != len(arrays):
raise ValueError("tpowers must be broadcastable with number of arrays")
if period:
if t_input is None:
raise ValueError("periodic requires t to be provided")
t = t % period
if indices is None:
indices = np.arange(N)
spans, indices = np.broadcast_arrays(span, indices)
results = []
for tpower, array in zip(tpowers, arrays):
if period:
result = [sum(array[j % N]
* (t[j % N] + (j // N) * period) ** tpower
for j in range(i - s // 2,
i - s // 2 + s)
if not (subtract_mid and j == i))
for i, s in np.broadcast(indices, spans)]
else:
result = [sum(array[j] * t[j] ** tpower
for j in range(max(0, i - s // 2),
min(N, i - s // 2 + s))
if not (subtract_mid and j == i))
for i, s in np.broadcast(indices, spans)]
results.append(np.asarray(result))
return tuple(results)
|
Compute the windowed sum of the given arrays.
This is a slow function, used primarily for testing and validation
of the faster version of ``windowed_sum()``
Parameters
----------
arrays : tuple of arrays
arrays to window
span : int or array of ints
The span to use for the sum at each point. If array is provided,
it must be broadcastable with ``indices``
indices : array
the indices of the center of the desired windows. If ``None``,
the indices are assumed to be ``range(len(arrays[0]))`` though
these are not actually instantiated.
t : array (optional)
Times associated with the arrays
tpowers : list (optional)
Powers of t for each array sum
period : float (optional)
Period to use, if times are periodic. If supplied, input times
must be arranged such that (t % period) is sorted!
subtract_mid : boolean
If true, then subtract the middle value from each sum
Returns
-------
arrays : tuple of ndarrays
arrays containing the windowed sum of each input array
|
entailment
|
def windowed_sum(arrays, span, t=None, indices=None, tpowers=0,
period=None, subtract_mid=False):
"""Compute the windowed sum of the given arrays.
Parameters
----------
arrays : tuple of arrays
arrays to window
span : int or array of ints
The span to use for the sum at each point. If array is provided,
it must be broadcastable with ``indices``
indices : array
the indices of the center of the desired windows. If ``None``,
the indices are assumed to be ``range(len(arrays[0]))`` though
these are not actually instantiated.
t : array (optional)
Times associated with the arrays
tpowers : list (optional)
Powers of t for each array sum
period : float (optional)
Period to use, if times are periodic. If supplied, input times
must be arranged such that (t % period) is sorted!
subtract_mid : boolean
If true, then subtract the middle value from each sum
Returns
-------
arrays : tuple of ndarrays
arrays containing the windowed sum of each input array
"""
span = np.asarray(span, dtype=int)
if not np.all(span > 0):
raise ValueError("span values must be positive")
arrays = tuple(map(np.asarray, arrays))
N = arrays[0].size
if not all(a.shape == (N,) for a in arrays):
raise ValueError("sizes of provided arrays must match")
t_input = t
if t is not None:
t = np.asarray(t)
if t.shape != (N,):
raise ValueError("shape of t must match shape of arrays "
"t -> {0} arr -> {1}".format(t.shape,
arrays[0].shape))
else:
# XXX: special-case no t?
t = np.ones(N)
tpowers = np.asarray(tpowers) + np.zeros(len(arrays))
if indices is not None:
span, indices = np.broadcast_arrays(span, indices)
# For the periodic case, re-call the function with padded arrays
if period:
if t_input is None:
raise ValueError("periodic requires t to be provided")
t = t % period
t, arrays, sl = _pad_arrays(t, arrays, indices, span, period)
if len(t) > N:
# arrays are padded. Recursively call windowed_sum() and return.
if span.ndim == 0 and indices is None:
# fixed-span/no index case is done faster this way
arrs = windowed_sum(arrays, span, t=t, indices=indices,
tpowers=tpowers, period=None,
subtract_mid=subtract_mid)
return tuple([a[sl] for a in arrs])
else:
# this works for variable span and general indices
if indices is None:
indices = np.arange(N)
indices = indices + sl.start
return windowed_sum(arrays, span, t=t, indices=indices,
tpowers=tpowers, period=None,
subtract_mid=subtract_mid)
else:
# No padding needed! We can carry-on as if it's a non-periodic case
period = None
# The rest of the algorithm now proceeds without reference to the period
# just as a sanity check...
assert not period
if span.ndim == 0:
# fixed-span case. Because of the checks & manipulations above
# we know here that indices=None
assert indices is None
window = np.ones(span)
def convolve_same(a, window):
if len(window) <= len(a):
res = np.convolve(a, window, mode='same')
else:
res = np.convolve(a, window, mode='full')
start = (len(window) - 1) // 2
res = res[start:start + len(a)]
return res
results = [convolve_same(a * t ** tp, window)
for a, tp in zip(arrays, tpowers)]
indices = slice(None)
else:
# variable-span case. Use reduceat() in a clever way for speed.
if indices is None:
indices = np.arange(len(span))
# we checked this above, but just as a sanity check assert it here...
assert span.shape == indices.shape
mins = np.asarray(indices) - span // 2
results = []
for a, tp in zip(arrays, tpowers):
ranges = np.vstack([np.maximum(0, mins),
np.minimum(len(a), mins+span)]).ravel('F')
results.append(np.add.reduceat(np.append(a * t ** tp, 0),
ranges)[::2])
# Subtract the midpoint if required: this is used in cross-validation
if subtract_mid:
results = [r - a[indices] * t[indices] ** tp
for r, a, tp in zip(results, arrays, tpowers)]
return tuple(results)
|
Compute the windowed sum of the given arrays.
Parameters
----------
arrays : tuple of arrays
arrays to window
span : int or array of ints
The span to use for the sum at each point. If array is provided,
it must be broadcastable with ``indices``
indices : array
the indices of the center of the desired windows. If ``None``,
the indices are assumed to be ``range(len(arrays[0]))`` though
these are not actually instantiated.
t : array (optional)
Times associated with the arrays
tpowers : list (optional)
Powers of t for each array sum
period : float (optional)
Period to use, if times are periodic. If supplied, input times
must be arranged such that (t % period) is sorted!
subtract_mid : boolean
If true, then subtract the middle value from each sum
Returns
-------
arrays : tuple of ndarrays
arrays containing the windowed sum of each input array
|
entailment
|
def _pad_arrays(t, arrays, indices, span, period):
"""Internal routine to pad arrays for periodic models."""
N = len(t)
if indices is None:
indices = np.arange(N)
pad_left = max(0, 0 - np.min(indices - span // 2))
pad_right = max(0, np.max(indices + span - span // 2) - (N - 1))
if pad_left + pad_right > 0:
Nright, pad_right = divmod(pad_right, N)
Nleft, pad_left = divmod(pad_left, N)
t = np.concatenate([t[N - pad_left:] - (Nleft + 1) * period]
+ [t + i * period
for i in range(-Nleft, Nright + 1)]
+ [t[:pad_right] + (Nright + 1) * period])
arrays = [np.concatenate([a[N - pad_left:]]
+ (Nleft + Nright + 1) * [a]
+ [a[:pad_right]])
for a in arrays]
pad_left = pad_left % N
Nright = pad_right / N
pad_right = pad_right % N
return (t, arrays, slice(pad_left + Nleft * N,
pad_left + (Nleft + 1) * N))
else:
return (t, arrays, slice(None))
|
Internal routine to pad arrays for periodic models.
|
entailment
|
def get_i2c_bus_numbers(glober = glob.glob):
"""Search all the available I2C devices in the system"""
res = []
for device in glober("/dev/i2c-*"):
r = re.match("/dev/i2c-([\d]){1,2}", device)
res.append(int(r.group(1)))
return res
|
Search all the available I2C devices in the system
|
entailment
|
def get_led_register_from_name(self, name):
"""Parse the name for led number
:param name: attribute name, like: led_1
"""
res = re.match('^led_([0-9]{1,2})$', name)
if res is None:
raise AttributeError("Unknown attribute: '%s'" % name)
led_num = int(res.group(1))
if led_num < 0 or led_num > 15:
raise AttributeError("Unknown attribute: '%s'" % name)
return self.calc_led_register(led_num)
|
Parse the name for led number
:param name: attribute name, like: led_1
|
entailment
|
def set_pwm(self, led_num, value):
"""Set PWM value for the specified LED
:param led_num: LED number (0-15)
:param value: the 12 bit value (0-4095)
"""
self.__check_range('led_number', led_num)
self.__check_range('led_value', value)
register_low = self.calc_led_register(led_num)
self.write(register_low, value_low(value))
self.write(register_low + 1, value_high(value))
|
Set PWM value for the specified LED
:param led_num: LED number (0-15)
:param value: the 12 bit value (0-4095)
|
entailment
|
def get_pwm(self, led_num):
"""Generic getter for all LED PWM value"""
self.__check_range('led_number', led_num)
register_low = self.calc_led_register(led_num)
return self.__get_led_value(register_low)
|
Generic getter for all LED PWM value
|
entailment
|
def sleep(self):
"""Send the controller to sleep"""
logger.debug("Sleep the controller")
self.write(Registers.MODE_1, self.mode_1 | (1 << Mode1.SLEEP))
|
Send the controller to sleep
|
entailment
|
def write(self, reg, value):
"""Write raw byte value to the specified register
:param reg: the register number (0-69, 250-255)
:param value: byte value
"""
# TODO: check reg: 0-69, 250-255
self.__check_range('register_value', value)
logger.debug("Write '%s' to register '%s'" % (value, reg))
self.__bus.write_byte_data(self.__address, reg, value)
|
Write raw byte value to the specified register
:param reg: the register number (0-69, 250-255)
:param value: byte value
|
entailment
|
def set_pwm_frequency(self, value):
"""Set the frequency for all PWM output
:param value: the frequency in Hz
"""
self.__check_range('pwm_frequency', value)
reg_val = self.calc_pre_scale(value)
logger.debug("Calculated prescale value is %s" % reg_val)
self.sleep()
self.write(Registers.PRE_SCALE, reg_val)
self.wake()
|
Set the frequency for all PWM output
:param value: the frequency in Hz
|
entailment
|
def levenshtein_norm(source, target):
"""Calculates the normalized Levenshtein distance between two string
arguments. The result will be a float in the range [0.0, 1.0], with 1.0
signifying the biggest possible distance between strings with these lengths
"""
# Compute Levenshtein distance using helper function. The max is always
# just the length of the longer string, so this is used to normalize result
# before returning it
distance = _levenshtein_compute(source, target, False)
return float(distance) / max(len(source), len(target))
|
Calculates the normalized Levenshtein distance between two string
arguments. The result will be a float in the range [0.0, 1.0], with 1.0
signifying the biggest possible distance between strings with these lengths
|
entailment
|
def check_valid_color(color):
"""Check if the color provided by the user is valid.
If color is invalid the default is returned.
"""
if color in list(mcolors.CSS4_COLORS.keys()) + ["#4CB391"]:
logging.info("Nanoplotter: Valid color {}.".format(color))
return color
else:
logging.info("Nanoplotter: Invalid color {}, using default.".format(color))
sys.stderr.write("Invalid color {}, using default.\n".format(color))
return "#4CB391"
|
Check if the color provided by the user is valid.
If color is invalid the default is returned.
|
entailment
|
def check_valid_format(figformat):
"""Check if the specified figure format is valid.
If format is invalid the default is returned.
Probably installation-dependent
"""
fig = plt.figure()
if figformat in list(fig.canvas.get_supported_filetypes().keys()):
logging.info("Nanoplotter: valid output format {}".format(figformat))
return figformat
else:
logging.info("Nanoplotter: invalid output format {}".format(figformat))
sys.stderr.write("Invalid format {}, using default.\n".format(figformat))
return "png"
|
Check if the specified figure format is valid.
If format is invalid the default is returned.
Probably installation-dependent
|
entailment
|
def scatter(x, y, names, path, plots, color="#4CB391", figformat="png",
stat=None, log=False, minvalx=0, minvaly=0, title=None, plot_settings=None):
"""Create bivariate plots.
Create four types of bivariate plots of x vs y, containing marginal summaries
-A scatter plot with histograms on axes
-A hexagonal binned plot with histograms on axes
-A kernel density plot with density curves on axes
-A pauvre-style plot using code from https://github.com/conchoecia/pauvre
"""
logging.info("Nanoplotter: Creating {} vs {} plots using statistics from {} reads.".format(
names[0], names[1], x.size))
if not contains_variance([x, y], names):
return []
sns.set(style="ticks", **plot_settings)
maxvalx = np.amax(x)
maxvaly = np.amax(y)
plots_made = []
if plots["hex"]:
hex_plot = Plot(
path=path + "_hex." + figformat,
title="{} vs {} plot using hexagonal bins".format(names[0], names[1]))
plot = sns.jointplot(
x=x,
y=y,
kind="hex",
color=color,
stat_func=stat,
space=0,
xlim=(minvalx, maxvalx),
ylim=(minvaly, maxvaly),
height=10)
plot.set_axis_labels(names[0], names[1])
if log:
hex_plot.title = hex_plot.title + " after log transformation of read lengths"
ticks = [10**i for i in range(10) if not 10**i > 10 * (10**maxvalx)]
plot.ax_joint.set_xticks(np.log10(ticks))
plot.ax_marg_x.set_xticks(np.log10(ticks))
plot.ax_joint.set_xticklabels(ticks)
plt.subplots_adjust(top=0.90)
plot.fig.suptitle(title or "{} vs {} plot".format(names[0], names[1]), fontsize=25)
hex_plot.fig = plot
hex_plot.save(format=figformat)
plots_made.append(hex_plot)
sns.set(style="darkgrid", **plot_settings)
if plots["dot"]:
dot_plot = Plot(
path=path + "_dot." + figformat,
title="{} vs {} plot using dots".format(names[0], names[1]))
plot = sns.jointplot(
x=x,
y=y,
kind="scatter",
color=color,
stat_func=stat,
xlim=(minvalx, maxvalx),
ylim=(minvaly, maxvaly),
space=0,
height=10,
joint_kws={"s": 1})
plot.set_axis_labels(names[0], names[1])
if log:
dot_plot.title = dot_plot.title + " after log transformation of read lengths"
ticks = [10**i for i in range(10) if not 10**i > 10 * (10**maxvalx)]
plot.ax_joint.set_xticks(np.log10(ticks))
plot.ax_marg_x.set_xticks(np.log10(ticks))
plot.ax_joint.set_xticklabels(ticks)
plt.subplots_adjust(top=0.90)
plot.fig.suptitle(title or "{} vs {} plot".format(names[0], names[1]), fontsize=25)
dot_plot.fig = plot
dot_plot.save(format=figformat)
plots_made.append(dot_plot)
if plots["kde"]:
idx = np.random.choice(x.index, min(2000, len(x)), replace=False)
kde_plot = Plot(
path=path + "_kde." + figformat,
title="{} vs {} plot using a kernel density estimation".format(names[0], names[1]))
plot = sns.jointplot(
x=x[idx],
y=y[idx],
kind="kde",
clip=((0, np.Inf), (0, np.Inf)),
xlim=(minvalx, maxvalx),
ylim=(minvaly, maxvaly),
space=0,
color=color,
stat_func=stat,
shade_lowest=False,
height=10)
plot.set_axis_labels(names[0], names[1])
if log:
kde_plot.title = kde_plot.title + " after log transformation of read lengths"
ticks = [10**i for i in range(10) if not 10**i > 10 * (10**maxvalx)]
plot.ax_joint.set_xticks(np.log10(ticks))
plot.ax_marg_x.set_xticks(np.log10(ticks))
plot.ax_joint.set_xticklabels(ticks)
plt.subplots_adjust(top=0.90)
plot.fig.suptitle(title or "{} vs {} plot".format(names[0], names[1]), fontsize=25)
kde_plot.fig = plot
kde_plot.save(format=figformat)
plots_made.append(kde_plot)
if plots["pauvre"] and names == ['Read lengths', 'Average read quality'] and log is False:
pauvre_plot = Plot(
path=path + "_pauvre." + figformat,
title="{} vs {} plot using pauvre-style @conchoecia".format(names[0], names[1]))
sns.set(style="white", **plot_settings)
margin_plot(df=pd.DataFrame({"length": x, "meanQual": y}),
Y_AXES=False,
title=title or "Length vs Quality in Pauvre-style",
plot_maxlen=None,
plot_minlen=0,
plot_maxqual=None,
plot_minqual=0,
lengthbin=None,
qualbin=None,
BASENAME="whatever",
path=pauvre_plot.path,
fileform=[figformat],
dpi=600,
TRANSPARENT=True,
QUIET=True)
plots_made.append(pauvre_plot)
plt.close("all")
return plots_made
|
Create bivariate plots.
Create four types of bivariate plots of x vs y, containing marginal summaries
-A scatter plot with histograms on axes
-A hexagonal binned plot with histograms on axes
-A kernel density plot with density curves on axes
-A pauvre-style plot using code from https://github.com/conchoecia/pauvre
|
entailment
|
def contains_variance(arrays, names):
"""
Make sure both arrays for bivariate ("scatter") plot have a stddev > 0
"""
for ar, name in zip(arrays, names):
if np.std(ar) == 0:
sys.stderr.write(
"No variation in '{}', skipping bivariate plots.\n".format(name.lower()))
logging.info("Nanoplotter: No variation in {}, skipping bivariate plot".format(name))
return False
else:
return True
|
Make sure both arrays for bivariate ("scatter") plot have a stddev > 0
|
entailment
|
def length_plots(array, name, path, title=None, n50=None, color="#4CB391", figformat="png"):
"""Create histogram of normal and log transformed read lengths."""
logging.info("Nanoplotter: Creating length plots for {}.".format(name))
maxvalx = np.amax(array)
if n50:
logging.info("Nanoplotter: Using {} reads with read length N50 of {}bp and maximum of {}bp."
.format(array.size, n50, maxvalx))
else:
logging.info("Nanoplotter: Using {} reads maximum of {}bp.".format(array.size, maxvalx))
plots = []
HistType = namedtuple('HistType', 'weight name ylabel')
for h_type in [HistType(None, "", "Number of reads"),
HistType(array, "Weighted ", "Number of bases")]:
histogram = Plot(
path=path + h_type.name.replace(" ", "_") + "Histogram"
+ name.replace(' ', '') + "." + figformat,
title=h_type.name + "Histogram of read lengths")
ax = sns.distplot(
a=array,
kde=False,
hist=True,
bins=max(round(int(maxvalx) / 500), 10),
color=color,
hist_kws=dict(weights=h_type.weight,
edgecolor=color,
linewidth=0.2,
alpha=0.8))
if n50:
plt.axvline(n50)
plt.annotate('N50', xy=(n50, np.amax([h.get_height() for h in ax.patches])), size=8)
ax.set(
xlabel='Read length',
ylabel=h_type.ylabel,
title=title or histogram.title)
plt.ticklabel_format(style='plain', axis='y')
histogram.fig = ax.get_figure()
histogram.save(format=figformat)
plt.close("all")
log_histogram = Plot(
path=path + h_type.name.replace(" ", "_") + "LogTransformed_Histogram"
+ name.replace(' ', '') + "." + figformat,
title=h_type.name + "Histogram of read lengths after log transformation")
ax = sns.distplot(
a=np.log10(array),
kde=False,
hist=True,
color=color,
hist_kws=dict(weights=h_type.weight,
edgecolor=color,
linewidth=0.2,
alpha=0.8))
ticks = [10**i for i in range(10) if not 10**i > 10 * maxvalx]
ax.set(
xticks=np.log10(ticks),
xticklabels=ticks,
xlabel='Read length',
ylabel=h_type.ylabel,
title=title or log_histogram.title)
if n50:
plt.axvline(np.log10(n50))
plt.annotate('N50', xy=(np.log10(n50), np.amax(
[h.get_height() for h in ax.patches])), size=8)
plt.ticklabel_format(style='plain', axis='y')
log_histogram.fig = ax.get_figure()
log_histogram.save(format=figformat)
plt.close("all")
plots.extend([histogram, log_histogram])
plots.append(yield_by_minimal_length_plot(array=array,
name=name,
path=path,
title=title,
color=color,
figformat=figformat))
return plots
|
Create histogram of normal and log transformed read lengths.
|
entailment
|
def make_layout(maxval):
"""Make the physical layout of the MinION flowcell.
based on https://bioinformatics.stackexchange.com/a/749/681
returned as a numpy array
"""
if maxval > 512:
return Layout(
structure=np.concatenate([np.array([list(range(10 * i + 1, i * 10 + 11))
for i in range(25)]) + j
for j in range(0, 3000, 250)],
axis=1),
template=np.zeros((25, 120)),
xticks=range(1, 121),
yticks=range(1, 26))
else:
layoutlist = []
for i, j in zip(
[33, 481, 417, 353, 289, 225, 161, 97],
[8, 456, 392, 328, 264, 200, 136, 72]):
for n in range(4):
layoutlist.append(list(range(i + n * 8, (i + n * 8) + 8, 1)) +
list(range(j + n * 8, (j + n * 8) - 8, -1)))
return Layout(
structure=np.array(layoutlist).transpose(),
template=np.zeros((16, 32)),
xticks=range(1, 33),
yticks=range(1, 17))
|
Make the physical layout of the MinION flowcell.
based on https://bioinformatics.stackexchange.com/a/749/681
returned as a numpy array
|
entailment
|
def spatial_heatmap(array, path, title=None, color="Greens", figformat="png"):
"""Taking channel information and creating post run channel activity plots."""
logging.info("Nanoplotter: Creating heatmap of reads per channel using {} reads."
.format(array.size))
activity_map = Plot(
path=path + "." + figformat,
title="Number of reads generated per channel")
layout = make_layout(maxval=np.amax(array))
valueCounts = pd.value_counts(pd.Series(array))
for entry in valueCounts.keys():
layout.template[np.where(layout.structure == entry)] = valueCounts[entry]
plt.figure()
ax = sns.heatmap(
data=pd.DataFrame(layout.template, index=layout.yticks, columns=layout.xticks),
xticklabels="auto",
yticklabels="auto",
square=True,
cbar_kws={"orientation": "horizontal"},
cmap=color,
linewidths=0.20)
ax.set_title(title or activity_map.title)
activity_map.fig = ax.get_figure()
activity_map.save(format=figformat)
plt.close("all")
return [activity_map]
|
Taking channel information and creating post run channel activity plots.
|
entailment
|
def main(database_dir, target_dir):
"""Generate CSV files from a CronosPro/CronosPlus database."""
if not os.path.isdir(database_dir):
raise click.ClickException("Database directory does not exist!")
try:
os.makedirs(target_dir)
except:
pass
try:
parse(database_dir, target_dir)
except CronosException as ex:
raise click.ClickException(ex.message)
|
Generate CSV files from a CronosPro/CronosPlus database.
|
entailment
|
def check_valid_time_and_sort(df, timescol, days=5, warning=True):
"""Check if the data contains reads created within the same `days` timeframe.
if not, print warning and only return part of the data which is within `days` days
Resetting the index twice to get also an "index" column for plotting the cum_yield_reads plot
"""
timediff = (df[timescol].max() - df[timescol].min()).days
if timediff < days:
return df.sort_values(timescol).reset_index(drop=True).reset_index()
else:
if warning:
sys.stderr.write(
"\nWarning: data generated is from more than {} days.\n".format(str(days)))
sys.stderr.write("Likely this indicates you are combining multiple runs.\n")
sys.stderr.write(
"Plots based on time are invalid and therefore truncated to first {} days.\n\n"
.format(str(days)))
logging.warning("Time plots truncated to first {} days: invalid timespan: {} days"
.format(str(days), str(timediff)))
return df[df[timescol] < timedelta(days=days)] \
.sort_values(timescol) \
.reset_index(drop=True) \
.reset_index()
|
Check if the data contains reads created within the same `days` timeframe.
if not, print warning and only return part of the data which is within `days` days
Resetting the index twice to get also an "index" column for plotting the cum_yield_reads plot
|
entailment
|
def time_plots(df, path, title=None, color="#4CB391", figformat="png",
log_length=False, plot_settings=None):
"""Making plots of time vs read length, time vs quality and cumulative yield."""
dfs = check_valid_time_and_sort(df, "start_time")
logging.info("Nanoplotter: Creating timeplots using {} reads.".format(len(dfs)))
cumyields = cumulative_yield(dfs=dfs.set_index("start_time"),
path=path,
figformat=figformat,
title=title,
color=color)
reads_pores_over_time = plot_over_time(dfs=dfs.set_index("start_time"),
path=path,
figformat=figformat,
title=title,
color=color)
violins = violin_plots_over_time(dfs=dfs,
path=path,
figformat=figformat,
title=title,
log_length=log_length,
plot_settings=plot_settings)
return cumyields + reads_pores_over_time + violins
|
Making plots of time vs read length, time vs quality and cumulative yield.
|
entailment
|
def violin_or_box_plot(df, y, figformat, path, y_name,
title=None, plot="violin", log=False, palette=None):
"""Create a violin or boxplot from the received DataFrame.
The x-axis should be divided based on the 'dataset' column,
the y-axis is specified in the arguments
"""
comp = Plot(path=path + "NanoComp_" + y.replace(' ', '_') + '.' + figformat,
title="Comparing {}".format(y))
if y == "quals":
comp.title = "Comparing base call quality scores"
if plot == 'violin':
logging.info("Nanoplotter: Creating violin plot for {}.".format(y))
process_violin_and_box(ax=sns.violinplot(x="dataset",
y=y,
data=df,
inner=None,
cut=0,
palette=palette,
linewidth=0),
log=log,
plot_obj=comp,
title=title,
y_name=y_name,
figformat=figformat,
ymax=np.amax(df[y]))
elif plot == 'box':
logging.info("Nanoplotter: Creating box plot for {}.".format(y))
process_violin_and_box(ax=sns.boxplot(x="dataset",
y=y,
data=df,
palette=palette),
log=log,
plot_obj=comp,
title=title,
y_name=y_name,
figformat=figformat,
ymax=np.amax(df[y]))
elif plot == 'ridge':
logging.info("Nanoplotter: Creating ridges plot for {}.".format(y))
comp.fig, axes = joypy.joyplot(df,
by="dataset",
column=y,
title=title or comp.title,
x_range=[-0.05, np.amax(df[y])])
if log:
xticks = [float(i.get_text()) for i in axes[-1].get_xticklabels()]
axes[-1].set_xticklabels([10**i for i in xticks])
axes[-1].set_xticklabels(axes[-1].get_xticklabels(), rotation=30, ha='center')
comp.save(format=figformat)
else:
logging.error("Unknown comp plot type {}".format(plot))
sys.exit("Unknown comp plot type {}".format(plot))
plt.close("all")
return [comp]
|
Create a violin or boxplot from the received DataFrame.
The x-axis should be divided based on the 'dataset' column,
the y-axis is specified in the arguments
|
entailment
|
def output_barplot(df, figformat, path, title=None, palette=None):
"""Create barplots based on number of reads and total sum of nucleotides sequenced."""
logging.info("Nanoplotter: Creating barplots for number of reads and total throughput.")
read_count = Plot(path=path + "NanoComp_number_of_reads." + figformat,
title="Comparing number of reads")
ax = sns.countplot(x="dataset",
data=df,
palette=palette)
ax.set(ylabel='Number of reads',
title=title or read_count.title)
plt.xticks(rotation=30, ha='center')
read_count.fig = ax.get_figure()
read_count.save(format=figformat)
plt.close("all")
throughput_bases = Plot(path=path + "NanoComp_total_throughput." + figformat,
title="Comparing throughput in gigabases")
if "aligned_lengths" in df:
throughput = df.groupby('dataset')['aligned_lengths'].sum()
ylabel = 'Total gigabase aligned'
else:
throughput = df.groupby('dataset')['lengths'].sum()
ylabel = 'Total gigabase sequenced'
ax = sns.barplot(x=list(throughput.index),
y=throughput / 1e9,
palette=palette,
order=df["dataset"].unique())
ax.set(ylabel=ylabel,
title=title or throughput_bases.title)
plt.xticks(rotation=30, ha='center')
throughput_bases.fig = ax.get_figure()
throughput_bases.save(format=figformat)
plt.close("all")
return read_count, throughput_bases
|
Create barplots based on number of reads and total sum of nucleotides sequenced.
|
entailment
|
def overlay_histogram(df, path, palette=None):
"""
Use plotly to create an overlay of length histograms
Return html code, but also save as png
Only has 10 colors, which get recycled up to 5 times.
"""
if palette is None:
palette = plotly.colors.DEFAULT_PLOTLY_COLORS * 5
hist = Plot(path=path + "NanoComp_OverlayHistogram.html",
title="Histogram of read lengths")
hist.html, hist.fig = plot_overlay_histogram(df, palette, title=hist.title)
hist.save()
hist_norm = Plot(path=path + "NanoComp_OverlayHistogram_Normalized.html",
title="Normalized histogram of read lengths")
hist_norm.html, hist_norm.fig = plot_overlay_histogram(
df, palette, title=hist_norm.title, histnorm="probability")
hist_norm.save()
log_hist = Plot(path=path + "NanoComp_OverlayLogHistogram.html",
title="Histogram of log transformed read lengths")
log_hist.html, log_hist.fig = plot_log_histogram(df, palette, title=log_hist.title)
log_hist.save()
log_hist_norm = Plot(path=path + "NanoComp_OverlayLogHistogram_Normalized.html",
title="Normalized histogram of log transformed read lengths")
log_hist_norm.html, log_hist_norm.fig = plot_log_histogram(
df, palette, title=log_hist_norm.title, histnorm="probability")
log_hist_norm.save()
return [hist, hist_norm, log_hist, log_hist_norm]
|
Use plotly to create an overlay of length histograms
Return html code, but also save as png
Only has 10 colors, which get recycled up to 5 times.
|
entailment
|
def plot_log_histogram(df, palette, title, histnorm=""):
"""
Plot overlaying histograms with log transformation of length
Return both html and fig for png
"""
data = [go.Histogram(x=np.log10(df.loc[df["dataset"] == d, "lengths"]),
opacity=0.4,
name=d,
histnorm=histnorm,
marker=dict(color=c))
for d, c in zip(df["dataset"].unique(), palette)]
xtickvals = [10**i for i in range(10) if not 10**i > 10 * np.amax(df["lengths"])]
html = plotly.offline.plot(
{"data": data,
"layout": go.Layout(barmode='overlay',
title=title,
xaxis=dict(tickvals=np.log10(xtickvals),
ticktext=xtickvals))},
output_type="div",
show_link=False)
fig = go.Figure(
{"data": data,
"layout": go.Layout(barmode='overlay',
title=title,
xaxis=dict(tickvals=np.log10(xtickvals),
ticktext=xtickvals))})
return html, fig
|
Plot overlaying histograms with log transformation of length
Return both html and fig for png
|
entailment
|
def get_file(db_folder, file_name):
"""Glob for the poor."""
if not os.path.isdir(db_folder):
return
file_name = file_name.lower().strip()
for cand_name in os.listdir(db_folder):
if cand_name.lower().strip() == file_name:
return os.path.join(db_folder, cand_name)
|
Glob for the poor.
|
entailment
|
def parse(db_folder, out_folder):
"""
Parse a cronos database.
Convert the database located in ``db_folder`` into CSV files in the
directory ``out_folder``.
"""
# The database structure, containing table and column definitions as
# well as other data.
stru_dat = get_file(db_folder, 'CroStru.dat')
# Index file for the database, which contains offsets for each record.
data_tad = get_file(db_folder, 'CroBank.tad')
# Actual data records, can only be decoded using CroBank.tad.
data_dat = get_file(db_folder, 'CroBank.dat')
if None in [stru_dat, data_tad, data_dat]:
raise CronosException("Not all database files are present.")
meta, tables = parse_structure(stru_dat)
for table in tables:
# TODO: do we want to export the "FL" table?
if table['abbr'] == 'FL' and table['name'] == 'Files':
continue
fh = open(make_csv_file_name(meta, table, out_folder), 'w')
columns = table.get('columns')
writer = csv.writer(fh)
writer.writerow([encode_cell(c['name']) for c in columns])
for row in parse_data(data_tad, data_dat, table.get('id'), columns):
writer.writerow([encode_cell(c) for c in row])
fh.close()
|
Parse a cronos database.
Convert the database located in ``db_folder`` into CSV files in the
directory ``out_folder``.
|
entailment
|
def encode1(self):
"""Return the base64 encoding of the figure file and insert in html image tag."""
data_uri = b64encode(open(self.path, 'rb').read()).decode('utf-8').replace('\n', '')
return '<img src="data:image/png;base64,{0}">'.format(data_uri)
|
Return the base64 encoding of the figure file and insert in html image tag.
|
entailment
|
def encode2(self):
"""Return the base64 encoding of the fig attribute and insert in html image tag."""
buf = BytesIO()
self.fig.savefig(buf, format='png', bbox_inches='tight', dpi=100)
buf.seek(0)
string = b64encode(buf.read())
return '<img src="data:image/png;base64,{0}">'.format(urlquote(string))
|
Return the base64 encoding of the fig attribute and insert in html image tag.
|
entailment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.