text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def request( self, method, path, data=None, files=None, json=None, params=None ):
"""Return the json content from the resource at ``path``. :param method: The request verb. E.g., get, post, put. :param path: The path of the request. This path will be combined with the ``oauth_url`` of the Requestor. :param data: Dictionary, bytes, or file-like object to send in the body of the request. :param files: Dictionary, mapping ``filename`` to file-like object. :param json: Object to be serialized to JSON in the body of the request. :param params: The query parameters to send with the request. Automatically refreshes the access token if it becomes invalid and a refresh token is available. Raises InvalidInvocation in such a case if a refresh token is not available. """ |
params = deepcopy(params) or {}
params["raw_json"] = 1
if isinstance(data, dict):
data = deepcopy(data)
data["api_type"] = "json"
data = sorted(data.items())
url = urljoin(self._requestor.oauth_url, path)
return self._request_with_retries(
data=data,
files=files,
json=json,
method=method,
params=params,
url=url,
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def request(self, method, url, params=None, **kwargs):
"""Perform a request, or return a cached response if available.""" |
params_key = tuple(params.items()) if params else ()
if method.upper() == "GET":
if (url, params_key) in self.get_cache:
print("Returning cached response for:", method, url, params)
return self.get_cache[(url, params_key)]
result = super().request(method, url, params, **kwargs)
if method.upper() == "GET":
self.get_cache[(url, params_key)] = result
print("Adding entry to the cache:", method, url, params)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse_routing_info(cls, records):
""" Parse the records returned from a getServers call and return a new RoutingTable instance. """ |
if len(records) != 1:
raise RoutingProtocolError("Expected exactly one record")
record = records[0]
routers = []
readers = []
writers = []
try:
servers = record["servers"]
for server in servers:
role = server["role"]
addresses = []
for address in server["addresses"]:
addresses.append(SocketAddress.parse(address, DEFAULT_PORT))
if role == "ROUTE":
routers.extend(addresses)
elif role == "READ":
readers.extend(addresses)
elif role == "WRITE":
writers.extend(addresses)
ttl = record["ttl"]
except (KeyError, TypeError):
raise RoutingProtocolError("Cannot parse routing info")
else:
return cls(routers, readers, writers, ttl) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_fresh(self, access_mode):
""" Indicator for whether routing information is still usable. """ |
log_debug("[#0000] C: <ROUTING> Checking table freshness for %r", access_mode)
expired = self.last_updated_time + self.ttl <= self.timer()
has_server_for_mode = bool(access_mode == READ_ACCESS and self.readers) or bool(access_mode == WRITE_ACCESS and self.writers)
log_debug("[#0000] C: <ROUTING> Table expired=%r", expired)
log_debug("[#0000] C: <ROUTING> Table routers=%r", self.routers)
log_debug("[#0000] C: <ROUTING> Table has_server_for_mode=%r", has_server_for_mode)
return not expired and self.routers and has_server_for_mode |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update(self, new_routing_table):
""" Update the current routing table with new routing information from a replacement table. """ |
self.routers.replace(new_routing_table.routers)
self.readers.replace(new_routing_table.readers)
self.writers.replace(new_routing_table.writers)
self.last_updated_time = self.timer()
self.ttl = new_routing_table.ttl
log_debug("[#0000] S: <ROUTING> table=%r", self) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def fetch_routing_info(self, address):
""" Fetch raw routing info from a given router address. :param address: router address :return: list of routing records or None if no connection could be established :raise ServiceUnavailable: if the server does not support routing or if routing support is broken """ |
metadata = {}
records = []
def fail(md):
if md.get("code") == "Neo.ClientError.Procedure.ProcedureNotFound":
raise RoutingProtocolError("Server {!r} does not support routing".format(address))
else:
raise RoutingProtocolError("Routing support broken on server {!r}".format(address))
try:
with self.acquire_direct(address) as cx:
_, _, server_version = (cx.server.agent or "").partition("/")
# TODO 2.0: remove old routing procedure
if server_version and Version.parse(server_version) >= Version((3, 2)):
log_debug("[#%04X] C: <ROUTING> query=%r", cx.local_port, self.routing_context or {})
cx.run("CALL dbms.cluster.routing.getRoutingTable({context})",
{"context": self.routing_context}, on_success=metadata.update, on_failure=fail)
else:
log_debug("[#%04X] C: <ROUTING> query={}", cx.local_port)
cx.run("CALL dbms.cluster.routing.getServers", {}, on_success=metadata.update, on_failure=fail)
cx.pull_all(on_success=metadata.update, on_records=records.extend)
cx.sync()
routing_info = [dict(zip(metadata.get("fields", ()), values)) for values in records]
log_debug("[#%04X] S: <ROUTING> info=%r", cx.local_port, routing_info)
return routing_info
except RoutingProtocolError as error:
raise ServiceUnavailable(*error.args)
except ServiceUnavailable:
self.deactivate(address)
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def fetch_routing_table(self, address):
""" Fetch a routing table from a given router address. :param address: router address :return: a new RoutingTable instance or None if the given router is currently unable to provide routing information :raise ServiceUnavailable: if no writers are available :raise ProtocolError: if the routing information received is unusable """ |
new_routing_info = self.fetch_routing_info(address)
if new_routing_info is None:
return None
# Parse routing info and count the number of each type of server
new_routing_table = RoutingTable.parse_routing_info(new_routing_info)
num_routers = len(new_routing_table.routers)
num_readers = len(new_routing_table.readers)
num_writers = len(new_routing_table.writers)
# No writers are available. This likely indicates a temporary state,
# such as leader switching, so we should not signal an error.
# When no writers available, then we flag we are reading in absence of writer
self.missing_writer = (num_writers == 0)
# No routers
if num_routers == 0:
raise RoutingProtocolError("No routing servers returned from server %r" % (address,))
# No readers
if num_readers == 0:
raise RoutingProtocolError("No read servers returned from server %r" % (address,))
# At least one of each is fine, so return this table
return new_routing_table |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update_routing_table_from(self, *routers):
""" Try to update routing tables with the given routers. :return: True if the routing table is successfully updated, otherwise False """ |
for router in routers:
new_routing_table = self.fetch_routing_table(router)
if new_routing_table is not None:
self.routing_table.update(new_routing_table)
return True
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update_routing_table(self):
""" Update the routing table from the first router able to provide valid routing information. """ |
# copied because it can be modified
existing_routers = list(self.routing_table.routers)
has_tried_initial_routers = False
if self.missing_writer:
has_tried_initial_routers = True
if self.update_routing_table_from(self.initial_address):
return
if self.update_routing_table_from(*existing_routers):
return
if not has_tried_initial_routers and self.initial_address not in existing_routers:
if self.update_routing_table_from(self.initial_address):
return
# None of the routers have been successful, so just fail
raise ServiceUnavailable("Unable to retrieve routing information") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ensure_routing_table_is_fresh(self, access_mode):
""" Update the routing table if stale. This method performs two freshness checks, before and after acquiring the refresh lock. If the routing table is already fresh on entry, the method exits immediately; otherwise, the refresh lock is acquired and the second freshness check that follows determines whether an update is still required. This method is thread-safe. :return: `True` if an update was required, `False` otherwise. """ |
if self.routing_table.is_fresh(access_mode):
return False
with self.refresh_lock:
if self.routing_table.is_fresh(access_mode):
if access_mode == READ_ACCESS:
# if reader is fresh but writers is not fresh, then we are reading in absence of writer
self.missing_writer = not self.routing_table.is_fresh(WRITE_ACCESS)
return False
self.update_routing_table()
self.update_connection_pool()
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def deactivate(self, address):
""" Deactivate an address from the connection pool, if present, remove from the routing table and also closing all idle connections to that address. """ |
log_debug("[#0000] C: <ROUTING> Deactivating address %r", address)
# We use `discard` instead of `remove` here since the former
# will not fail if the address has already been removed.
self.routing_table.routers.discard(address)
self.routing_table.readers.discard(address)
self.routing_table.writers.discard(address)
log_debug("[#0000] C: <ROUTING> table=%r", self.routing_table)
super(RoutingConnectionPool, self).deactivate(address) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def remove_writer(self, address):
""" Remove a writer address from the routing table, if present. """ |
log_debug("[#0000] C: <ROUTING> Removing writer %r", address)
self.routing_table.writers.discard(address)
log_debug("[#0000] C: <ROUTING> table=%r", self.routing_table) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def handle(self, error, connection):
""" Handle any cleanup or similar activity related to an error occurring on a pooled connection. """ |
error_class = error.__class__
if error_class in (ConnectionExpired, ServiceUnavailable, DatabaseUnavailableError):
self.deactivate(connection.address)
elif error_class in (NotALeaderError, ForbiddenOnReadOnlyDatabaseError):
self.remove_writer(connection.address) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def point_type(name, fields, srid_map):
""" Dynamically create a Point subclass. """ |
def srid(self):
try:
return srid_map[len(self)]
except KeyError:
return None
attributes = {"srid": property(srid)}
for index, subclass_field in enumerate(fields):
def accessor(self, i=index, f=subclass_field):
try:
return self[i]
except IndexError:
raise AttributeError(f)
for field_alias in {subclass_field, "xyz"[index]}:
attributes[field_alias] = property(accessor)
cls = type(name, (Point,), attributes)
with __srid_table_lock:
for dim, srid in srid_map.items():
__srid_table[srid] = (cls, dim)
return cls |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def main():
"""Read a directory containing json files for Kibana panels, beautify them and replace size value in aggregations as specified through corresponding params params. """ |
args = parse_args()
configure_logging(args.debug)
src_path = args.src_path
dest_path = args.dest_path
old_str1 = '\\"size\\":' + args.old_size
old_str2 = '\\"size\\": ' + args.old_size
new_str = '\\"size\\":' + args.new_size
logging.info('Input path: %s', src_path)
logging.info('Output path: %s', dest_path)
logging.info('old str: %s', old_str1)
logging.info('old str: %s', old_str2)
logging.info('new str: %s', new_str)
if os.path.abspath(src_path) == os.path.abspath(dest_path):
logging.error('source and destination directiories must be different')
sys.exit(1)
# Iterate over input files
json_files = [f for f in os.listdir(src_path) if f.endswith('.json')]
for filename in json_files:
in_file_path = os.path.join(src_path, filename)
in_file_path = os.path.join(src_path, filename)
out_file_path = os.path.join(dest_path, filename)
logging.info('INPUT FILE: %s',in_file_path)
logging.info('OUTPUT FILE: %s',out_file_path)
# First beautify input
pretty = utils.beautify(filename=in_file_path)
# Iterate the beautified json string line by line
pretty_replaced = utils.replace(pretty, old_str1, new_str)
pretty_replaced = utils.replace(pretty_replaced, old_str2, new_str)
with open(out_file_path, 'w') as output_file:
output_file.write(pretty_replaced)
logging.info('This is the end.') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def signal_handler(signal_name, frame):
"""Quit signal handler.""" |
sys.stdout.flush()
print("\nSIGINT in frame signal received. Quitting...")
sys.stdout.flush()
sys.exit(0) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def graph_format(new_mem, old_mem, is_firstiteration=True):
"""Show changes graphically in memory consumption""" |
if is_firstiteration:
output = " n/a "
elif new_mem - old_mem > 50000000:
output = " +++++"
elif new_mem - old_mem > 20000000:
output = " ++++ "
elif new_mem - old_mem > 5000000:
output = " +++ "
elif new_mem - old_mem > 1000000:
output = " ++ "
elif new_mem - old_mem > 50000:
output = " + "
elif old_mem - new_mem > 10000000:
output = "--- "
elif old_mem - new_mem > 2000000:
output = " -- "
elif old_mem - new_mem > 100000:
output = " - "
else:
output = " "
return output |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_cur_mem_use():
"""return utilization of memory""" |
# http://lwn.net/Articles/28345/
lines = open("/proc/meminfo", 'r').readlines()
emptySpace = re.compile('[ ]+')
for line in lines:
if "MemTotal" in line:
memtotal = float(emptySpace.split(line)[1])
if "SwapFree" in line:
swapfree = float(emptySpace.split(line)[1])
if "SwapTotal" in line:
swaptotal = float(emptySpace.split(line)[1])
if "MemFree" in line:
memfree = float(emptySpace.split(line)[1])
if "Cached" in line and not "SwapCached" in line:
cached = float(emptySpace.split(line)[1])
ramoccup = 1.0 - (memfree + cached) / memtotal
if swaptotal == 0:
swapoccup = 0
else:
swapoccup = 1.0 - swapfree / swaptotal
strramoccup = str(round(ramoccup * 100.0, 1))
strswapoccup = str(round(swapoccup * 100.0, 1))
return float(memtotal), strramoccup, strswapoccup |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_py_version():
"""Check if a propper Python version is used.""" |
try:
if sys.version_info >= (2, 7):
return
except:
pass
print(" ")
print(" ERROR - memtop needs python version at least 2.7")
print(("Chances are that you can install newer version from your "
"repositories, or even that you have some newer version "
"installed yet."))
print("(one way to find out which versions are installed is to try "
"following: 'which python2.7' , 'which python3' and so...)")
print(" ")
sys.exit(-1) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def character(prompt=None, empty=False):
"""Prompt a single character. Parameters prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. Returns ------- str or None A str if the user entered a single-character, non-empty string. None if the user pressed only Enter and ``empty`` was True. """ |
s = _prompt_input(prompt)
if empty and not s:
return None
elif len(s) == 1:
return s
else:
return character(prompt=prompt, empty=empty) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def email(prompt=None, empty=False, mode="simple"):
"""Prompt an email address. This check is based on a simple regular expression and does not verify whether an email actually exists. Parameters prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. mode : {'simple'}, optional 'simple' will use a simple regular expression. No other mode is implemented yet. Returns ------- str or None A str if the user entered a likely email address. None if the user pressed only Enter and ``empty`` was True. """ |
if mode == "simple":
s = _prompt_input(prompt)
if empty and not s:
return None
else:
if RE_EMAIL_SIMPLE.match(s):
return s
else:
return email(prompt=prompt, empty=empty, mode=mode)
else:
raise ValueError |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def integer(prompt=None, empty=False):
"""Prompt an integer. Parameters prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. Returns ------- int or None An int if the user entered a valid integer. None if the user pressed only Enter and ``empty`` was True. """ |
s = _prompt_input(prompt)
if empty and not s:
return None
else:
try:
return int(s)
except ValueError:
return integer(prompt=prompt, empty=empty) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def real(prompt=None, empty=False):
"""Prompt a real number. Parameters prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. Returns ------- float or None A float if the user entered a valid real number. None if the user pressed only Enter and ``empty`` was True. """ |
s = _prompt_input(prompt)
if empty and not s:
return None
else:
try:
return float(s)
except ValueError:
return real(prompt=prompt, empty=empty) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def regex(pattern, prompt=None, empty=False, flags=0):
"""Prompt a string that matches a regular expression. Parameters pattern : str A regular expression that must be matched. prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. flags : int, optional Flags that will be passed to ``re.match``. Returns ------- Match or None A match object if the user entered a matching string. None if the user pressed only Enter and ``empty`` was True. See Also -------- re.match """ |
s = _prompt_input(prompt)
if empty and not s:
return None
else:
m = re.match(pattern, s, flags=flags)
if m:
return m
else:
return regex(pattern, prompt=prompt, empty=empty, flags=flags) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def secret(prompt=None, empty=False):
"""Prompt a string without echoing. Parameters prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. Returns ------- str or None A str if the user entered a non-empty string. None if the user pressed only Enter and ``empty`` was True. Raises ------ getpass.GetPassWarning If echo free input is unavailable. See Also -------- getpass.getpass """ |
if prompt is None:
prompt = PROMPT
s = getpass.getpass(prompt=prompt)
if empty and not s:
return None
else:
if s:
return s
else:
return secret(prompt=prompt, empty=empty) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def string(prompt=None, empty=False):
"""Prompt a string. Parameters prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. Returns ------- str or None A str if the user entered a non-empty string. None if the user pressed only Enter and ``empty`` was True. """ |
s = _prompt_input(prompt)
if empty and not s:
return None
else:
if s:
return s
else:
return string(prompt=prompt, empty=empty) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_cache_plus_key(self):
"""Return a cache region plus key.""" |
key = getattr(self, '_cache_key', self.key_from_query())
return self._cache.cache, key |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_value(self, merge=True, createfunc=None, expiration_time=None, ignore_expiration=False):
""" Return the value from the cache for this query. """ |
cache, cache_key = self._get_cache_plus_key()
# ignore_expiration means, if the value is in the cache
# but is expired, return it anyway. This doesn't make sense
# with createfunc, which says, if the value is expired, generate
# a new value.
assert not ignore_expiration or not createfunc, \
"Can't ignore expiration and also provide createfunc"
if ignore_expiration or not createfunc:
cached_value = cache.get(cache_key,
expiration_time=expiration_time,
ignore_expiration=ignore_expiration)
else:
cached_value = cache.get(cache_key)
if not cached_value:
cached_value = createfunc()
cache.set(cache_key, cached_value, timeout=expiration_time)
if cached_value and merge:
cached_value = self.merge_result(cached_value, load=False)
return cached_value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_value(self, value):
"""Set the value in the cache for this query.""" |
cache, cache_key = self._get_cache_plus_key()
cache.set(cache_key, value) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def key_from_query(self, qualifier=None):
""" Given a Query, create a cache key. There are many approaches to this; here we use the simplest, which is to create an md5 hash of the text of the SQL statement, combined with stringified versions of all the bound parameters within it. There's a bit of a performance hit with compiling out "query.statement" here; other approaches include setting up an explicit cache key with a particular Query, then combining that with the bound parameter values. """ |
stmt = self.with_labels().statement
compiled = stmt.compile()
params = compiled.params
values = [str(compiled)]
for k in sorted(params):
values.append(repr(params[k]))
key = u" ".join(values)
return md5(key.encode('utf8')).hexdigest() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def process_query_conditionally(self, query):
""" Process a Query that is used within a lazy loader. (the process_query_conditionally() method is a SQLAlchemy hook invoked only within lazyload.) """ |
if query._current_path:
mapper, prop = query._current_path[-2:]
for cls in mapper.class_.__mro__:
k = (cls, prop.key)
relationship_option = self._relationship_options.get(k)
if relationship_option:
query._cache = relationship_option
break |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def fit(self, t, y, dy=1, presorted=False):
"""Fit the smoother Parameters t : array_like time locations of the points to smooth y : array_like y locations of the points to smooth dy : array_like or float (default = 1) Errors in the y values presorted : bool (default = False) If True, then t is assumed to be sorted. Returns ------- self : Smoother instance """ |
self.t, self.y, self.dy = self._validate_inputs(t, y, dy, presorted)
self._fit(self.t, self.y, self.dy)
return self |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def predict(self, t):
"""Predict the smoothed function value at time t Parameters t : array_like Times at which to predict the result Returns ------- y : ndarray Smoothed values at time t """ |
t = np.asarray(t)
return self._predict(np.ravel(t)).reshape(t.shape) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cv_residuals(self, cv=True):
"""Return the residuals of the cross-validation for the fit data""" |
vals = self.cv_values(cv)
return (self.y - vals) / self.dy |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cv_error(self, cv=True, skip_endpoints=True):
"""Return the sum of cross-validation residuals for the input data""" |
resids = self.cv_residuals(cv)
if skip_endpoints:
resids = resids[1:-1]
return np.mean(abs(resids)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def arcfour_drop(key, n=3072):
'''Return a generator for the RC4-drop pseudorandom keystream given by
the key and number of bytes to drop passed as arguments. Dropped bytes
default to the more conservative 3072, NOT the SCAN default of 768.'''
af = arcfour(key)
[af.next() for c in range(n)]
return af |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def reconnect(self):
"""Reconnect to the remote server.""" |
self.lock.acquire()
if self.use_ssl:
self.client = http.client.HTTPSConnection(self.host, self.port, context=self.ssl_context)
else:
self.client = http.client.HTTPConnection(self.host, self.port)
self.lock.release() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def call(self, method, *args, **kwargs):
""" Issue a call to the remote end point to execute the specified procedure. :param str method: The name of the remote procedure to execute. :return: The return value from the remote function. """ |
if kwargs:
options = self.encode(dict(args=args, kwargs=kwargs))
else:
options = self.encode(args)
headers = {}
if self.headers:
headers.update(self.headers)
headers['Content-Type'] = self.serializer.content_type
headers['Content-Length'] = str(len(options))
headers['Connection'] = 'close'
if self.username is not None and self.password is not None:
headers['Authorization'] = 'Basic ' + base64.b64encode((self.username + ':' + self.password).encode('UTF-8')).decode('UTF-8')
method = os.path.join(self.uri_base, method)
self.logger.debug('calling RPC method: ' + method[1:])
try:
with self.lock:
self.client.request('RPC', method, options, headers)
resp = self.client.getresponse()
except http.client.ImproperConnectionState:
raise RPCConnectionError('improper connection state')
if resp.status != 200:
raise RPCError(resp.reason, resp.status)
resp_data = resp.read()
resp_data = self.decode(resp_data)
if not ('exception_occurred' in resp_data and 'result' in resp_data):
raise RPCError('missing response information', resp.status)
if resp_data['exception_occurred']:
raise RPCError('remote method incurred an exception', resp.status, remote_exception=resp_data['exception'])
return resp_data['result'] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cache_call_refresh(self, method, *options):
""" Call a remote method and update the local cache with the result if it already existed. :param str method: The name of the remote procedure to execute. :return: The return value from the remote function. """ |
options_hash = self.encode(options)
if len(options_hash) > 20:
options_hash = hashlib.new('sha1', options).digest()
options_hash = sqlite3.Binary(options_hash)
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('DELETE FROM cache WHERE method = ? AND options_hash = ?', (method, options_hash))
return_value = self.call(method, *options)
store_return_value = sqlite3.Binary(self.encode(return_value))
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('INSERT INTO cache (method, options_hash, return_value) VALUES (?, ?, ?)', (method, options_hash, store_return_value))
self.cache_db.commit()
return return_value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cache_clear(self):
"""Purge the local store of all cached function information.""" |
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('DELETE FROM cache')
self.cache_db.commit()
self.logger.info('the RPC cache has been purged')
return |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def respond_file(self, file_path, attachment=False, query=None):
""" Respond to the client by serving a file, either directly or as an attachment. :param str file_path: The path to the file to serve, this does not need to be in the web root. :param bool attachment: Whether to serve the file as a download by setting the Content-Disposition header. """ |
del query
file_path = os.path.abspath(file_path)
try:
file_obj = open(file_path, 'rb')
except IOError:
self.respond_not_found()
return
self.send_response(200)
self.send_header('Content-Type', self.guess_mime_type(file_path))
fs = os.fstat(file_obj.fileno())
self.send_header('Content-Length', str(fs[6]))
if attachment:
file_name = os.path.basename(file_path)
self.send_header('Content-Disposition', 'attachment; filename=' + file_name)
self.send_header('Last-Modified', self.date_time_string(fs.st_mtime))
self.end_headers()
shutil.copyfileobj(file_obj, self.wfile)
file_obj.close()
return |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def respond_list_directory(self, dir_path, query=None):
""" Respond to the client with an HTML page listing the contents of the specified directory. :param str dir_path: The path of the directory to list the contents of. """ |
del query
try:
dir_contents = os.listdir(dir_path)
except os.error:
self.respond_not_found()
return
if os.path.normpath(dir_path) != self.__config['serve_files_root']:
dir_contents.append('..')
dir_contents.sort(key=lambda a: a.lower())
displaypath = html.escape(urllib.parse.unquote(self.path), quote=True)
f = io.BytesIO()
encoding = sys.getfilesystemencoding()
f.write(b'<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n')
f.write(b'<html>\n<title>Directory listing for ' + displaypath.encode(encoding) + b'</title>\n')
f.write(b'<body>\n<h2>Directory listing for ' + displaypath.encode(encoding) + b'</h2>\n')
f.write(b'<hr>\n<ul>\n')
for name in dir_contents:
fullname = os.path.join(dir_path, name)
displayname = linkname = name
# Append / for directories or @ for symbolic links
if os.path.isdir(fullname):
displayname = name + "/"
linkname = name + "/"
if os.path.islink(fullname):
displayname = name + "@"
# Note: a link to a directory displays with @ and links with /
f.write(('<li><a href="' + urllib.parse.quote(linkname) + '">' + html.escape(displayname, quote=True) + '</a>\n').encode(encoding))
f.write(b'</ul>\n<hr>\n</body>\n</html>\n')
length = f.tell()
f.seek(0)
self.send_response(200)
self.send_header('Content-Type', 'text/html; charset=' + encoding)
self.send_header('Content-Length', length)
self.end_headers()
shutil.copyfileobj(f, self.wfile)
f.close()
return |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def respond_redirect(self, location='/'):
""" Respond to the client with a 301 message and redirect them with a Location header. :param str location: The new location to redirect the client to. """ |
self.send_response(301)
self.send_header('Content-Length', 0)
self.send_header('Location', location)
self.end_headers()
return |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def respond_server_error(self, status=None, status_line=None, message=None):
""" Handle an internal server error, logging a traceback if executed within an exception handler. :param int status: The status code to respond to the client with. :param str status_line: The status message to respond to the client with. :param str message: The body of the response that is sent to the client. """ |
(ex_type, ex_value, ex_traceback) = sys.exc_info()
if ex_type:
(ex_file_name, ex_line, _, _) = traceback.extract_tb(ex_traceback)[-1]
line_info = "{0}:{1}".format(ex_file_name, ex_line)
log_msg = "encountered {0} in {1}".format(repr(ex_value), line_info)
self.server.logger.error(log_msg, exc_info=True)
status = (status or 500)
status_line = (status_line or http.client.responses.get(status, 'Internal Server Error')).strip()
self.send_response(status, status_line)
message = (message or status_line)
if isinstance(message, (str, bytes)):
self.send_header('Content-Length', len(message))
self.end_headers()
if isinstance(message, str):
self.wfile.write(message.encode(sys.getdefaultencoding()))
else:
self.wfile.write(message)
elif hasattr(message, 'fileno'):
fs = os.fstat(message.fileno())
self.send_header('Content-Length', fs[6])
self.end_headers()
shutil.copyfileobj(message, self.wfile)
else:
self.end_headers()
return |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def respond_unauthorized(self, request_authentication=False):
""" Respond to the client that the request is unauthorized. :param bool request_authentication: Whether to request basic authentication information by sending a WWW-Authenticate header. """ |
headers = {}
if request_authentication:
headers['WWW-Authenticate'] = 'Basic realm="' + self.__config['server_version'] + '"'
self.send_response_full(b'Unauthorized', status=401, headers=headers)
return |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def dispatch_handler(self, query=None):
""" Dispatch functions based on the established handler_map. It is generally not necessary to override this function and doing so will prevent any handlers from being executed. This function is executed automatically when requests of either GET, HEAD, or POST are received. :param dict query: Parsed query parameters from the corresponding request. """ |
query = (query or {})
# normalize the path
# abandon query parameters
self.path = self.path.split('?', 1)[0]
self.path = self.path.split('#', 1)[0]
original_path = urllib.parse.unquote(self.path)
self.path = posixpath.normpath(original_path)
words = self.path.split('/')
words = filter(None, words)
tmp_path = ''
for word in words:
_, word = os.path.splitdrive(word)
_, word = os.path.split(word)
if word in (os.curdir, os.pardir):
continue
tmp_path = os.path.join(tmp_path, word)
self.path = tmp_path
if self.path == 'robots.txt' and self.__config['serve_robots_txt']:
self.send_response_full(self.__config['robots_txt'])
return
self.cookies = http.cookies.SimpleCookie(self.headers.get('cookie', ''))
handler, is_method = self.__get_handler(is_rpc=False)
if handler is not None:
try:
handler(*((query,) if is_method else (self, query)))
except Exception:
self.respond_server_error()
return
if not self.__config['serve_files']:
self.respond_not_found()
return
file_path = self.__config['serve_files_root']
file_path = os.path.join(file_path, tmp_path)
if os.path.isfile(file_path) and os.access(file_path, os.R_OK):
self.respond_file(file_path, query=query)
return
elif os.path.isdir(file_path) and os.access(file_path, os.R_OK):
if not original_path.endswith('/'):
# redirect browser, doing what apache does
destination = self.path + '/'
if self.command == 'GET' and self.query_data:
destination += '?' + urllib.parse.urlencode(self.query_data, True)
self.respond_redirect(destination)
return
for index in ['index.html', 'index.htm']:
index = os.path.join(file_path, index)
if os.path.isfile(index) and os.access(index, os.R_OK):
self.respond_file(index, query=query)
return
if self.__config['serve_files_list_directories']:
self.respond_list_directory(file_path, query=query)
return
self.respond_not_found()
return |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def guess_mime_type(self, path):
""" Guess an appropriate MIME type based on the extension of the provided path. :param str path: The of the file to analyze. :return: The guessed MIME type of the default if non are found. :rtype: str """ |
_, ext = posixpath.splitext(path)
if ext in self.extensions_map:
return self.extensions_map[ext]
ext = ext.lower()
return self.extensions_map[ext if ext in self.extensions_map else ''] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_authorization(self):
""" Check for the presence of a basic auth Authorization header and if the credentials contained within in are valid. :return: Whether or not the credentials are valid. :rtype: bool """ |
try:
store = self.__config.get('basic_auth')
if store is None:
return True
auth_info = self.headers.get('Authorization')
if not auth_info:
return False
auth_info = auth_info.split()
if len(auth_info) != 2 or auth_info[0] != 'Basic':
return False
auth_info = base64.b64decode(auth_info[1]).decode(sys.getdefaultencoding())
username = auth_info.split(':')[0]
password = ':'.join(auth_info.split(':')[1:])
password_bytes = password.encode(sys.getdefaultencoding())
if hasattr(self, 'custom_authentication'):
if self.custom_authentication(username, password):
self.basic_auth_user = username
return True
return False
if not username in store:
self.server.logger.warning('received invalid username: ' + username)
return False
password_data = store[username]
if password_data['type'] == 'plain':
if password == password_data['value']:
self.basic_auth_user = username
return True
elif hashlib.new(password_data['type'], password_bytes).digest() == password_data['value']:
self.basic_auth_user = username
return True
self.server.logger.warning('received invalid password from user: ' + username)
except Exception:
pass
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cookie_get(self, name):
""" Check for a cookie value by name. :param str name: Name of the cookie value to retreive. :return: Returns the cookie value if it's set or None if it's not found. """ |
if not hasattr(self, 'cookies'):
return None
if self.cookies.get(name):
return self.cookies.get(name).value
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cookie_set(self, name, value):
""" Set the value of a client cookie. This can only be called while headers can be sent. :param str name: The name of the cookie value to set. :param str value: The value of the cookie to set. """ |
if not self.headers_active:
raise RuntimeError('headers have already been ended')
cookie = "{0}={1}; Path=/; HttpOnly".format(name, value)
self.send_header('Set-Cookie', cookie) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_content_type_charset(self, default='UTF-8'):
""" Inspect the Content-Type header to retrieve the charset that the client has specified. :param str default: The default charset to return if none exists. :return: The charset of the request. :rtype: str """ |
encoding = default
header = self.headers.get('Content-Type', '')
idx = header.find('charset=')
if idx > 0:
encoding = (header[idx + 8:].split(' ', 1)[0] or encoding)
return encoding |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def close(self):
""" Close the web socket connection and stop processing results. If the connection is still open, a WebSocket close message will be sent to the peer. """ |
if not self.connected:
return
self.connected = False
if self.handler.wfile.closed:
return
if select.select([], [self.handler.wfile], [], 0)[1]:
with self.lock:
self.handler.wfile.write(b'\x88\x00')
self.handler.wfile.flush()
self.on_closed() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def send_message(self, opcode, message):
""" Send a message to the peer over the socket. :param int opcode: The opcode for the message to send. :param bytes message: The message data to send. """ |
if not isinstance(message, bytes):
message = message.encode('utf-8')
length = len(message)
if not select.select([], [self.handler.wfile], [], 0)[1]:
self.logger.error('the socket is not ready for writing')
self.close()
return
buffer = b''
buffer += struct.pack('B', 0x80 + opcode)
if length <= 125:
buffer += struct.pack('B', length)
elif 126 <= length <= 65535:
buffer += struct.pack('>BH', 126, length)
else:
buffer += struct.pack('>BQ', 127, length)
buffer += message
self._last_sent_opcode = opcode
self.lock.acquire()
try:
self.handler.wfile.write(buffer)
self.handler.wfile.flush()
except Exception:
self.logger.error('an error occurred while sending a message', exc_info=True)
self.close()
finally:
self.lock.release() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def on_message(self, opcode, message):
""" The primary dispatch function to handle incoming WebSocket messages. :param int opcode: The opcode of the message that was received. :param bytes message: The data contained within the message. """ |
self.logger.debug("processing {0} (opcode: 0x{1:02x}) message".format(self._opcode_names.get(opcode, 'UNKNOWN'), opcode))
if opcode == self._opcode_close:
self.close()
elif opcode == self._opcode_ping:
if len(message) > 125:
self.close()
return
self.send_message(self._opcode_pong, message)
elif opcode == self._opcode_pong:
pass
elif opcode == self._opcode_binary:
self.on_message_binary(message)
elif opcode == self._opcode_text:
try:
message = self._decode_string(message)
except UnicodeDecodeError:
self.logger.warning('closing connection due to invalid unicode within a text message')
self.close()
else:
self.on_message_text(message)
elif opcode == self._opcode_continue:
self.close()
else:
self.logger.warning("received unknown opcode: {0} (0x{0:02x})".format(opcode))
self.close() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def from_content_type(cls, content_type):
""" Build a serializer object from a MIME Content-Type string. :param str content_type: The Content-Type string to parse. :return: A new serializer instance. :rtype: :py:class:`.Serializer` """ |
name = content_type
options = {}
if ';' in content_type:
name, options_str = content_type.split(';', 1)
for part in options_str.split(';'):
part = part.strip()
if '=' in part:
key, value = part.split('=')
else:
key, value = (part, None)
options[key] = value
# old style compatibility
if name.endswith('+zlib'):
options['compression'] = 'zlib'
name = name[:-5]
return cls(name, charset=options.get('charset', 'UTF-8'), compression=options.get('compression')) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def dumps(self, data):
""" Serialize a python data type for transmission or storage. :param data: The python object to serialize. :return: The serialized representation of the object. :rtype: bytes """ |
data = g_serializer_drivers[self.name]['dumps'](data)
if sys.version_info[0] == 3 and isinstance(data, str):
data = data.encode(self._charset)
if self._compression == 'zlib':
data = zlib.compress(data)
assert isinstance(data, bytes)
return data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def loads(self, data):
""" Deserialize the data into it's original python object. :param bytes data: The serialized object to load. :return: The original python object. """ |
if not isinstance(data, bytes):
raise TypeError("loads() argument 1 must be bytes, not {0}".format(type(data).__name__))
if self._compression == 'zlib':
data = zlib.decompress(data)
if sys.version_info[0] == 3 and self.name.startswith('application/'):
data = data.decode(self._charset)
data = g_serializer_drivers[self.name]['loads'](data, (self._charset if sys.version_info[0] == 3 else None))
if isinstance(data, list):
data = tuple(data)
return data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def shutdown(self):
"""Shutdown the server and stop responding to requests.""" |
self.__should_stop.set()
if self.__server_thread == threading.current_thread():
self.__is_shutdown.set()
self.__is_running.clear()
else:
if self.__wakeup_fd is not None:
os.write(self.__wakeup_fd.write_fd, b'\x00')
self.__is_shutdown.wait()
if self.__wakeup_fd is not None:
self.__wakeup_fd.close()
self.__wakeup_fd = None
for server in self.sub_servers:
server.shutdown() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def auth_set(self, status):
""" Enable or disable requiring authentication on all incoming requests. :param bool status: Whether to enable or disable requiring authentication. """ |
if not bool(status):
self.__config['basic_auth'] = None
self.logger.info('basic authentication has been disabled')
else:
self.__config['basic_auth'] = {}
self.logger.info('basic authentication has been enabled') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def auth_delete_creds(self, username=None):
""" Delete the credentials for a specific username if specified or all stored credentials. :param str username: The username of the credentials to delete. """ |
if not username:
self.__config['basic_auth'] = {}
self.logger.info('basic authentication database has been cleared of all entries')
return
del self.__config['basic_auth'][username] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def setattr_context(obj, **kwargs):
""" Context manager to temporarily change the values of object attributes while executing a function. Example ------- goodbye hello """ |
old_kwargs = dict([(key, getattr(obj, key)) for key in kwargs])
[setattr(obj, key, val) for key, val in kwargs.items()]
try:
yield
finally:
[setattr(obj, key, val) for key, val in old_kwargs.items()] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def validate_inputs(*arrays, **kwargs):
"""Validate input arrays This checks that - Arrays are mutually broadcastable - Broadcasted arrays are one-dimensional Optionally, arrays are sorted according to the ``sort_by`` argument. Parameters *args : ndarrays All non-keyword arguments are arrays which will be validated sort_by : array If specified, sort all inputs by the order given in this array. """ |
arrays = np.broadcast_arrays(*arrays)
sort_by = kwargs.pop('sort_by', None)
if kwargs:
raise ValueError("unrecognized arguments: {0}".format(kwargs.keys()))
if arrays[0].ndim != 1:
raise ValueError("Input arrays should be one-dimensional.")
if sort_by is not None:
isort = np.argsort(sort_by)
if isort.shape != arrays[0].shape:
raise ValueError("sort shape must equal array shape.")
arrays = tuple([a[isort] for a in arrays])
return arrays |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _prep_smooth(t, y, dy, span, t_out, span_out, period):
"""Private function to prepare & check variables for smooth utilities""" |
# If period is provided, sort by phases. Otherwise sort by t
if period:
t = t % period
if t_out is not None:
t_out = t_out % period
t, y, dy = validate_inputs(t, y, dy, sort_by=t)
if span_out is not None:
if t_out is None:
raise ValueError("Must specify t_out when span_out is given")
if span is not None:
raise ValueError("Must specify only one of span, span_out")
span, t_out = np.broadcast_arrays(span_out, t_out)
indices = np.searchsorted(t, t_out)
elif span is None:
raise ValueError("Must specify either span_out or span")
else:
indices = None
return t, y, dy, span, t_out, span_out, indices |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def moving_average_smooth(t, y, dy, span=None, cv=True, t_out=None, span_out=None, period=None):
"""Perform a moving-average smooth of the data Parameters t, y, dy : array_like time, value, and error in value of the input data span : array_like the integer spans of the data cv : boolean (default=True) if True, treat the problem as a cross-validation, i.e. don't use each point in the evaluation of its own smoothing. t_out : array_like (optional) the output times for the moving averages span_out : array_like (optional) the spans associated with the output times t_out period : float if provided, then consider the inputs periodic with the given period Returns ------- y_smooth : array_like smoothed y values at each time t (or t_out) """ |
prep = _prep_smooth(t, y, dy, span, t_out, span_out, period)
t, y, dy, span, t_out, span_out, indices = prep
w = 1. / (dy ** 2)
w, yw = windowed_sum([w, y * w], t=t, span=span, subtract_mid=cv,
indices=indices, period=period)
if t_out is None or span_out is not None:
return yw / w
else:
i = np.minimum(len(t) - 1, np.searchsorted(t, t_out))
return yw[i] / w[i] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def linear_smooth(t, y, dy, span=None, cv=True, t_out=None, span_out=None, period=None):
"""Perform a linear smooth of the data Parameters t, y, dy : array_like time, value, and error in value of the input data span : array_like the integer spans of the data cv : boolean (default=True) if True, treat the problem as a cross-validation, i.e. don't use each point in the evaluation of its own smoothing. t_out : array_like (optional) the output times for the moving averages span_out : array_like (optional) the spans associated with the output times t_out period : float if provided, then consider the inputs periodic with the given period Returns ------- y_smooth : array_like smoothed y values at each time t or t_out """ |
t_input = t
prep = _prep_smooth(t, y, dy, span, t_out, span_out, period)
t, y, dy, span, t_out, span_out, indices = prep
if period:
t_input = np.asarray(t_input) % period
w = 1. / (dy ** 2)
w, yw, tw, tyw, ttw = windowed_sum([w, y * w, w, y * w, w], t=t,
tpowers=[0, 0, 1, 1, 2],
span=span, indices=indices,
subtract_mid=cv, period=period)
denominator = (w * ttw - tw * tw)
slope = (tyw * w - tw * yw)
intercept = (ttw * yw - tyw * tw)
if np.any(denominator == 0):
raise ValueError("Zero denominator in linear smooth. This usually "
"indicates that the input contains duplicate points.")
if t_out is None:
return (slope * t_input + intercept) / denominator
elif span_out is not None:
return (slope * t_out + intercept) / denominator
else:
i = np.minimum(len(t) - 1, np.searchsorted(t, t_out))
return (slope[i] * t_out + intercept[i]) / denominator[i] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def multinterp(x, y, xquery, slow=False):
"""Multiple linear interpolations Parameters x : array_like, shape=(N,) sorted array of x values y : array_like, shape=(N, M) array of y values corresponding to each x value xquery : array_like, shape=(M,) array of query values slow : boolean, default=False if True, use slow method (used mainly for unit testing) Returns ------- yquery : ndarray, shape=(M,) The interpolated values corresponding to each x query. """ |
x, y, xquery = map(np.asarray, (x, y, xquery))
assert x.ndim == 1
assert xquery.ndim == 1
assert y.shape == x.shape + xquery.shape
# make sure xmin < xquery < xmax in all cases
xquery = np.clip(xquery, x.min(), x.max())
if slow:
from scipy.interpolate import interp1d
return np.array([interp1d(x, y)(xq) for xq, y in zip(xquery, y.T)])
elif len(x) == 3:
# Most common case: use a faster approach
yq_lower = y[0] + (xquery - x[0]) * (y[1] - y[0]) / (x[1] - x[0])
yq_upper = y[1] + (xquery - x[1]) * (y[2] - y[1]) / (x[2] - x[1])
return np.where(xquery < x[1], yq_lower, yq_upper)
else:
i = np.clip(np.searchsorted(x, xquery, side='right') - 1,
0, len(x) - 2)
j = np.arange(len(xquery))
return y[i, j] + ((xquery - x[i]) *
(y[i + 1, j] - y[i, j]) / (x[i + 1] - x[i])) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _create_session(self, test_connection=False):
""" Create a consulate.session object, and query for its leader to ensure that the connection is made. :param test_connection: call .leader() to ensure that the connection is valid :type test_connection: bool :return consulate.Session instance """ |
session = consulate.Session(host=self.host, port=self.port)
if test_connection:
session.status.leader()
return session |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def apply_remote_config(self, namespace=None):
""" Applies all config values defined in consul's kv store to self.app. There is no guarantee that these values will not be overwritten later elsewhere. :param namespace: kv namespace/directory. Defaults to DEFAULT_KV_NAMESPACE :return: None """ |
if namespace is None:
namespace = "config/{service}/{environment}/".format(
service=os.environ.get('SERVICE', 'generic_service'),
environment=os.environ.get('ENVIRONMENT', 'generic_environment')
)
for k, v in iteritems(self.session.kv.find(namespace)):
k = k.replace(namespace, '')
try:
self.app.config[k] = json.loads(v)
except (TypeError, ValueError):
self.app.logger.warning("Couldn't de-serialize {} to json, using raw value".format(v))
self.app.config[k] = v
msg = "Set {k}={v} from consul kv '{ns}'".format(
k=k,
v=v,
ns=namespace,
)
self.app.logger.debug(msg) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def register_service(self, **kwargs):
""" register this service with consul kwargs passed to Consul.agent.service.register """ |
kwargs.setdefault('name', self.app.name)
self.session.agent.service.register(**kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _resolve(self):
""" Query the consul DNS server for the service IP and port """ |
endpoints = {}
r = self.resolver.query(self.service, 'SRV')
for rec in r.response.additional:
name = rec.name.to_text()
addr = rec.items[0].address
endpoints[name] = {'addr': addr}
for rec in r.response.answer[0].items:
name = '.'.join(rec.target.labels)
endpoints[name]['port'] = rec.port
return [
'http://{ip}:{port}'.format(
ip=v['addr'], port=v['port']
) for v in endpoints.values()
] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def crop(gens, seconds=5, cropper=None):
'''
Crop the generator to a finite number of frames
Return a generator which outputs the provided generator limited
to enough samples to produce seconds seconds of audio (default 5s)
at the provided frame rate.
'''
if hasattr(gens, "next"):
# single generator
gens = (gens,)
if cropper == None:
cropper = lambda gen: itertools.islice(gen, 0, seconds * sampler.FRAME_RATE)
cropped = [cropper(gen) for gen in gens]
return cropped[0] if len(cropped) == 1 else cropped |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def crop_at_zero_crossing(gen, seconds=5, error=0.1):
'''
Crop the generator, ending at a zero-crossing
Crop the generator to produce approximately seconds seconds
(default 5s) of audio at the provided FRAME_RATE, attempting
to end the clip at a zero crossing point to avoid clicking.
'''
source = iter(gen)
buffer_length = int(2 * error * sampler.FRAME_RATE)
# split the source into two iterators:
# - start, which contains the bulk of the sound clip
# - and end, which contains the final 100ms, plus 100ms past
# the desired clip length. We may cut the clip anywhere
# within this +/-100ms end buffer.
start = itertools.islice(source, 0, int((seconds - error) * sampler.FRAME_RATE))
end = itertools.islice(source, 0, buffer_length)
for sample in start:
yield sample
# pull end buffer generator into memory so we can work with it
end = list(end)
# find min by sorting buffer samples, first by abs of sample, then by distance from optimal
best = sorted(enumerate(end), key=lambda x: (math.fabs(x[1]),abs((buffer_length/2)-x[0])))
print best[:10]
print best[0][0]
# todo: better logic when we don't have a perfect zero crossing
#if best[0][1] != 0:
# # we don't have a perfect zero crossing, so let's look for best fit?
# pass
# crop samples at index of best zero crossing
for sample in end[:best[0][0] + 1]:
yield sample |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def volume(gen, dB=0):
'''Change the volume of gen by dB decibles'''
if not hasattr(dB, 'next'):
# not a generator
scale = 10 ** (dB / 20.)
else:
def scale_gen():
while True:
yield 10 ** (next(dB) / 20.)
scale = scale_gen()
return envelope(gen, scale) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def mixer(inputs, mix=None):
'''
Mix `inputs` together based on `mix` tuple
`inputs` should be a tuple of *n* generators.
`mix` should be a tuple of *m* tuples, one per desired
output channel. Each of the *m* tuples should contain
*n* generators, corresponding to the time-sequence of
the desired mix levels for each of the *n* input channels.
That is, to make an ouput channel contain a 50/50 mix of the
two input channels, the tuple would be:
(constant(0.5), constant(0.5))
The mix generators need not be constant, allowing for time-varying
mix levels:
# 50% from input 1, pulse input 2 over a two second cycle
(constant(0.5), tone(0.5))
The mixer will return a list of *m* generators, each containing
the data from the inputs mixed as specified.
If no `mix` tuple is specified, all of the *n* input channels
will be mixed together into one generator, with the volume of
each reduced *n*-fold.
Example:
# three in, two out;
# 10Hz binaural beat with white noise across both channels
mixer(
(white_noise(), tone(440), tone(450)),
(
(constant(.5), constant(1), constant(0)),
(constant(.5), constant(0), constant(1)),
)
)
'''
if mix == None:
# by default, mix all inputs down to one channel
mix = ([constant(1.0 / len(inputs))] * len(inputs),)
duped_inputs = zip(*[itertools.tee(i, len(mix)) for i in inputs])
# second zip is backwards
return [\
sum(*[multiply(m,i) for m,i in zip(channel_mix, channel_inputs)])\
for channel_mix, channel_inputs in zip(mix, duped_inputs) \
] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def channelize(gen, channels):
'''
Break multi-channel generator into one sub-generator per channel
Takes a generator producing n-tuples of samples and returns n generators,
each producing samples for a single channel.
Since multi-channel generators are the only reasonable way to synchronize samples
across channels, and the sampler functions only take tuples of generators,
you must use this function to process synchronized streams for output.
'''
def pick(g, channel):
for samples in g:
yield samples[channel]
return [pick(gen_copy, channel) for channel, gen_copy in enumerate(itertools.tee(gen, channels))] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def file_is_seekable(f):
'''
Returns True if file `f` is seekable, and False if not
Useful to determine, for example, if `f` is STDOUT to
a pipe.
'''
try:
f.tell()
logger.info("File is seekable!")
except IOError, e:
if e.errno == errno.ESPIPE:
return False
else:
raise
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def sample(generator, min=-1, max=1, width=SAMPLE_WIDTH):
'''Convert audio waveform generator into packed sample generator.'''
# select signed char, short, or in based on sample width
fmt = { 1: '<B', 2: '<h', 4: '<i' }[width]
return (struct.pack(fmt, int(sample)) for sample in \
normalize(hard_clip(generator, min, max),\
min, max, -2**(width * 8 - 1), 2**(width * 8 - 1) - 1)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def sample_all(generators, *args, **kwargs):
'''Convert list of audio waveform generators into list of packed sample generators.'''
return [sample(gen, *args, **kwargs) for gen in generators] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def buffer(stream, buffer_size=BUFFER_SIZE):
'''
Buffer the generator into byte strings of buffer_size samples
Return a generator that outputs reasonably sized byte strings
containing buffer_size samples from the generator stream.
This allows us to outputing big chunks of the audio stream to
disk at once for faster writes.
'''
i = iter(stream)
return iter(lambda: "".join(itertools.islice(i, buffer_size)), "") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def wave_module_patched():
'''True if wave module can write data size of 0xFFFFFFFF, False otherwise.'''
f = StringIO()
w = wave.open(f, "wb")
w.setparams((1, 2, 44100, 0, "NONE", "no compression"))
patched = True
try:
w.setnframes((0xFFFFFFFF - 36) / w.getnchannels() / w.getsampwidth())
w._ensure_header_written(0)
except struct.error:
patched = False
logger.info("Error setting wave data size to 0xFFFFFFFF; wave module unpatched, setting sata size to 0x7FFFFFFF")
w.setnframes((0x7FFFFFFF - 36) / w.getnchannels() / w.getsampwidth())
w._ensure_header_written(0)
return patched |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def cache_finite_samples(f):
'''Decorator to cache audio samples produced by the wrapped generator.'''
cache = {}
def wrap(*args):
key = FRAME_RATE, args
if key not in cache:
cache[key] = [sample for sample in f(*args)]
return (sample for sample in cache[key])
return wrap |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def play(channels, blocking=True, raw_samples=False):
'''
Play the contents of the generator using PyAudio
Play to the system soundcard using PyAudio. PyAudio, an otherwise optional
depenency, must be installed for this feature to work.
'''
if not pyaudio_loaded:
raise Exception("Soundcard playback requires PyAudio. Install with `pip install pyaudio`.")
channel_count = 1 if hasattr(channels, "next") else len(channels)
wavgen = wav_samples(channels, raw_samples=raw_samples)
p = pyaudio.PyAudio()
stream = p.open(
format=p.get_format_from_width(SAMPLE_WIDTH),
channels=channel_count,
rate=FRAME_RATE,
output=True,
stream_callback=_pyaudio_callback(wavgen) if not blocking else None
)
if blocking:
try:
for chunk in buffer(wavgen, 1024):
stream.write(chunk)
except Exception:
raise
finally:
if not stream.is_stopped():
stream.stop_stream()
try:
stream.close()
except Exception:
pass
else:
return stream |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _pad_arrays(t, arrays, indices, span, period):
"""Internal routine to pad arrays for periodic models.""" |
N = len(t)
if indices is None:
indices = np.arange(N)
pad_left = max(0, 0 - np.min(indices - span // 2))
pad_right = max(0, np.max(indices + span - span // 2) - (N - 1))
if pad_left + pad_right > 0:
Nright, pad_right = divmod(pad_right, N)
Nleft, pad_left = divmod(pad_left, N)
t = np.concatenate([t[N - pad_left:] - (Nleft + 1) * period]
+ [t + i * period
for i in range(-Nleft, Nright + 1)]
+ [t[:pad_right] + (Nright + 1) * period])
arrays = [np.concatenate([a[N - pad_left:]]
+ (Nleft + Nright + 1) * [a]
+ [a[:pad_right]])
for a in arrays]
pad_left = pad_left % N
Nright = pad_right / N
pad_right = pad_right % N
return (t, arrays, slice(pad_left + Nleft * N,
pad_left + (Nleft + 1) * N))
else:
return (t, arrays, slice(None)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_i2c_bus_numbers(glober = glob.glob):
"""Search all the available I2C devices in the system""" |
res = []
for device in glober("/dev/i2c-*"):
r = re.match("/dev/i2c-([\d]){1,2}", device)
res.append(int(r.group(1)))
return res |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_led_register_from_name(self, name):
"""Parse the name for led number :param name: attribute name, like: led_1 """ |
res = re.match('^led_([0-9]{1,2})$', name)
if res is None:
raise AttributeError("Unknown attribute: '%s'" % name)
led_num = int(res.group(1))
if led_num < 0 or led_num > 15:
raise AttributeError("Unknown attribute: '%s'" % name)
return self.calc_led_register(led_num) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_pwm(self, led_num, value):
"""Set PWM value for the specified LED :param led_num: LED number (0-15) :param value: the 12 bit value (0-4095) """ |
self.__check_range('led_number', led_num)
self.__check_range('led_value', value)
register_low = self.calc_led_register(led_num)
self.write(register_low, value_low(value))
self.write(register_low + 1, value_high(value)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_pwm(self, led_num):
"""Generic getter for all LED PWM value""" |
self.__check_range('led_number', led_num)
register_low = self.calc_led_register(led_num)
return self.__get_led_value(register_low) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def sleep(self):
"""Send the controller to sleep""" |
logger.debug("Sleep the controller")
self.write(Registers.MODE_1, self.mode_1 | (1 << Mode1.SLEEP)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def write(self, reg, value):
"""Write raw byte value to the specified register :param reg: the register number (0-69, 250-255) :param value: byte value """ |
# TODO: check reg: 0-69, 250-255
self.__check_range('register_value', value)
logger.debug("Write '%s' to register '%s'" % (value, reg))
self.__bus.write_byte_data(self.__address, reg, value) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_pwm_frequency(self, value):
"""Set the frequency for all PWM output :param value: the frequency in Hz """ |
self.__check_range('pwm_frequency', value)
reg_val = self.calc_pre_scale(value)
logger.debug("Calculated prescale value is %s" % reg_val)
self.sleep()
self.write(Registers.PRE_SCALE, reg_val)
self.wake() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_valid_color(color):
"""Check if the color provided by the user is valid. If color is invalid the default is returned. """ |
if color in list(mcolors.CSS4_COLORS.keys()) + ["#4CB391"]:
logging.info("Nanoplotter: Valid color {}.".format(color))
return color
else:
logging.info("Nanoplotter: Invalid color {}, using default.".format(color))
sys.stderr.write("Invalid color {}, using default.\n".format(color))
return "#4CB391" |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_valid_format(figformat):
"""Check if the specified figure format is valid. If format is invalid the default is returned. Probably installation-dependent """ |
fig = plt.figure()
if figformat in list(fig.canvas.get_supported_filetypes().keys()):
logging.info("Nanoplotter: valid output format {}".format(figformat))
return figformat
else:
logging.info("Nanoplotter: invalid output format {}".format(figformat))
sys.stderr.write("Invalid format {}, using default.\n".format(figformat))
return "png" |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def spatial_heatmap(array, path, title=None, color="Greens", figformat="png"):
"""Taking channel information and creating post run channel activity plots.""" |
logging.info("Nanoplotter: Creating heatmap of reads per channel using {} reads."
.format(array.size))
activity_map = Plot(
path=path + "." + figformat,
title="Number of reads generated per channel")
layout = make_layout(maxval=np.amax(array))
valueCounts = pd.value_counts(pd.Series(array))
for entry in valueCounts.keys():
layout.template[np.where(layout.structure == entry)] = valueCounts[entry]
plt.figure()
ax = sns.heatmap(
data=pd.DataFrame(layout.template, index=layout.yticks, columns=layout.xticks),
xticklabels="auto",
yticklabels="auto",
square=True,
cbar_kws={"orientation": "horizontal"},
cmap=color,
linewidths=0.20)
ax.set_title(title or activity_map.title)
activity_map.fig = ax.get_figure()
activity_map.save(format=figformat)
plt.close("all")
return [activity_map] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_valid_time_and_sort(df, timescol, days=5, warning=True):
"""Check if the data contains reads created within the same `days` timeframe. if not, print warning and only return part of the data which is within `days` days Resetting the index twice to get also an "index" column for plotting the cum_yield_reads plot """ |
timediff = (df[timescol].max() - df[timescol].min()).days
if timediff < days:
return df.sort_values(timescol).reset_index(drop=True).reset_index()
else:
if warning:
sys.stderr.write(
"\nWarning: data generated is from more than {} days.\n".format(str(days)))
sys.stderr.write("Likely this indicates you are combining multiple runs.\n")
sys.stderr.write(
"Plots based on time are invalid and therefore truncated to first {} days.\n\n"
.format(str(days)))
logging.warning("Time plots truncated to first {} days: invalid timespan: {} days"
.format(str(days), str(timediff)))
return df[df[timescol] < timedelta(days=days)] \
.sort_values(timescol) \
.reset_index(drop=True) \
.reset_index() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def time_plots(df, path, title=None, color="#4CB391", figformat="png", log_length=False, plot_settings=None):
"""Making plots of time vs read length, time vs quality and cumulative yield.""" |
dfs = check_valid_time_and_sort(df, "start_time")
logging.info("Nanoplotter: Creating timeplots using {} reads.".format(len(dfs)))
cumyields = cumulative_yield(dfs=dfs.set_index("start_time"),
path=path,
figformat=figformat,
title=title,
color=color)
reads_pores_over_time = plot_over_time(dfs=dfs.set_index("start_time"),
path=path,
figformat=figformat,
title=title,
color=color)
violins = violin_plots_over_time(dfs=dfs,
path=path,
figformat=figformat,
title=title,
log_length=log_length,
plot_settings=plot_settings)
return cumyields + reads_pores_over_time + violins |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def violin_or_box_plot(df, y, figformat, path, y_name, title=None, plot="violin", log=False, palette=None):
"""Create a violin or boxplot from the received DataFrame. The x-axis should be divided based on the 'dataset' column, the y-axis is specified in the arguments """ |
comp = Plot(path=path + "NanoComp_" + y.replace(' ', '_') + '.' + figformat,
title="Comparing {}".format(y))
if y == "quals":
comp.title = "Comparing base call quality scores"
if plot == 'violin':
logging.info("Nanoplotter: Creating violin plot for {}.".format(y))
process_violin_and_box(ax=sns.violinplot(x="dataset",
y=y,
data=df,
inner=None,
cut=0,
palette=palette,
linewidth=0),
log=log,
plot_obj=comp,
title=title,
y_name=y_name,
figformat=figformat,
ymax=np.amax(df[y]))
elif plot == 'box':
logging.info("Nanoplotter: Creating box plot for {}.".format(y))
process_violin_and_box(ax=sns.boxplot(x="dataset",
y=y,
data=df,
palette=palette),
log=log,
plot_obj=comp,
title=title,
y_name=y_name,
figformat=figformat,
ymax=np.amax(df[y]))
elif plot == 'ridge':
logging.info("Nanoplotter: Creating ridges plot for {}.".format(y))
comp.fig, axes = joypy.joyplot(df,
by="dataset",
column=y,
title=title or comp.title,
x_range=[-0.05, np.amax(df[y])])
if log:
xticks = [float(i.get_text()) for i in axes[-1].get_xticklabels()]
axes[-1].set_xticklabels([10**i for i in xticks])
axes[-1].set_xticklabels(axes[-1].get_xticklabels(), rotation=30, ha='center')
comp.save(format=figformat)
else:
logging.error("Unknown comp plot type {}".format(plot))
sys.exit("Unknown comp plot type {}".format(plot))
plt.close("all")
return [comp] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def output_barplot(df, figformat, path, title=None, palette=None):
"""Create barplots based on number of reads and total sum of nucleotides sequenced.""" |
logging.info("Nanoplotter: Creating barplots for number of reads and total throughput.")
read_count = Plot(path=path + "NanoComp_number_of_reads." + figformat,
title="Comparing number of reads")
ax = sns.countplot(x="dataset",
data=df,
palette=palette)
ax.set(ylabel='Number of reads',
title=title or read_count.title)
plt.xticks(rotation=30, ha='center')
read_count.fig = ax.get_figure()
read_count.save(format=figformat)
plt.close("all")
throughput_bases = Plot(path=path + "NanoComp_total_throughput." + figformat,
title="Comparing throughput in gigabases")
if "aligned_lengths" in df:
throughput = df.groupby('dataset')['aligned_lengths'].sum()
ylabel = 'Total gigabase aligned'
else:
throughput = df.groupby('dataset')['lengths'].sum()
ylabel = 'Total gigabase sequenced'
ax = sns.barplot(x=list(throughput.index),
y=throughput / 1e9,
palette=palette,
order=df["dataset"].unique())
ax.set(ylabel=ylabel,
title=title or throughput_bases.title)
plt.xticks(rotation=30, ha='center')
throughput_bases.fig = ax.get_figure()
throughput_bases.save(format=figformat)
plt.close("all")
return read_count, throughput_bases |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def overlay_histogram(df, path, palette=None):
""" Use plotly to create an overlay of length histograms Return html code, but also save as png Only has 10 colors, which get recycled up to 5 times. """ |
if palette is None:
palette = plotly.colors.DEFAULT_PLOTLY_COLORS * 5
hist = Plot(path=path + "NanoComp_OverlayHistogram.html",
title="Histogram of read lengths")
hist.html, hist.fig = plot_overlay_histogram(df, palette, title=hist.title)
hist.save()
hist_norm = Plot(path=path + "NanoComp_OverlayHistogram_Normalized.html",
title="Normalized histogram of read lengths")
hist_norm.html, hist_norm.fig = plot_overlay_histogram(
df, palette, title=hist_norm.title, histnorm="probability")
hist_norm.save()
log_hist = Plot(path=path + "NanoComp_OverlayLogHistogram.html",
title="Histogram of log transformed read lengths")
log_hist.html, log_hist.fig = plot_log_histogram(df, palette, title=log_hist.title)
log_hist.save()
log_hist_norm = Plot(path=path + "NanoComp_OverlayLogHistogram_Normalized.html",
title="Normalized histogram of log transformed read lengths")
log_hist_norm.html, log_hist_norm.fig = plot_log_histogram(
df, palette, title=log_hist_norm.title, histnorm="probability")
log_hist_norm.save()
return [hist, hist_norm, log_hist, log_hist_norm] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def plot_log_histogram(df, palette, title, histnorm=""):
""" Plot overlaying histograms with log transformation of length Return both html and fig for png """ |
data = [go.Histogram(x=np.log10(df.loc[df["dataset"] == d, "lengths"]),
opacity=0.4,
name=d,
histnorm=histnorm,
marker=dict(color=c))
for d, c in zip(df["dataset"].unique(), palette)]
xtickvals = [10**i for i in range(10) if not 10**i > 10 * np.amax(df["lengths"])]
html = plotly.offline.plot(
{"data": data,
"layout": go.Layout(barmode='overlay',
title=title,
xaxis=dict(tickvals=np.log10(xtickvals),
ticktext=xtickvals))},
output_type="div",
show_link=False)
fig = go.Figure(
{"data": data,
"layout": go.Layout(barmode='overlay',
title=title,
xaxis=dict(tickvals=np.log10(xtickvals),
ticktext=xtickvals))})
return html, fig |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_file(db_folder, file_name):
"""Glob for the poor.""" |
if not os.path.isdir(db_folder):
return
file_name = file_name.lower().strip()
for cand_name in os.listdir(db_folder):
if cand_name.lower().strip() == file_name:
return os.path.join(db_folder, cand_name) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.