text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def from_model(cls, model_name, **kwargs):
""" Define a grid using the specifications of a given model. Parameters model_name : string Name the model (see :func:`get_supported_models` for available model names). Supports multiple formats (e.g., 'GEOS5', 'GEOS-5' or 'GEOS_5'). **kwargs : string Parameters that override the model or default grid settings (See Other Parameters below). Returns ------- A :class:`CTMGrid` object. Other Parameters resolution : (float, float) Horizontal grid resolution (lon, lat) or (DI, DJ) [degrees] Psurf : float Average surface pressure [hPa] (default: 1013.15) Notes ----- Regridded vertical models may have several valid names (e.g., 'GEOS5_47L' and 'GEOS5_REDUCED' refer to the same model). """ |
settings = _get_model_info(model_name)
model = settings.pop('model_name')
for k, v in list(kwargs.items()):
if k in ('resolution', 'Psurf'):
settings[k] = v
return cls(model, **settings) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def copy_from_model(cls, model_name, reference, **kwargs):
""" Set-up a user-defined grid using specifications of a reference grid model. Parameters model_name : string name of the user-defined grid model. reference : string or :class:`CTMGrid` instance Name of the reference model (see :func:`get_supported_models`), or a :class:`CTMGrid` object from which grid set-up is copied. **kwargs Any set-up parameter which will override the settings of the reference model (see :class:`CTMGrid` parameters). Returns ------- A :class:`CTMGrid` object. """ |
if isinstance(reference, cls):
settings = reference.__dict__.copy()
settings.pop('model')
else:
settings = _get_model_info(reference)
settings.pop('model_name')
settings.update(kwargs)
settings['reference'] = reference
return cls(model_name, **settings) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_layers(self, Psurf=1013.25, Ptop=0.01, **kwargs):
""" Compute scalars or coordinates associated to the vertical layers. Parameters grid_spec : CTMGrid object CTMGrid containing the information necessary to re-construct grid levels for a given model coordinate system. Returns ------- dictionary of vertical grid components, including eta (unitless), sigma (unitless), pressure (hPa), and altitude (km) on both layer centers and edges, ordered from bottom-to-top. Notes ----- For pure sigma grids, sigma coordinates are given by the esig (edges) and csig (centers). For both pure sigma and hybrid grids, pressures at layers edges L are calculated as follows: .. math:: P_e(L) = A_p(L) + B_p(L) * (P_{surf} - C_p) where :math:`P_{surf}`, :math:`P_{top}` Air pressures at the surface and the top of the modeled atmosphere (:attr:`Psurf` and :attr:`Ptop` attributes of the :class:`CTMGrid` instance). :math:`A_p(L)`, :math:`Bp(L)` Specified in the grid set-up (`Ap` and `Bp` attributes) for hybrid grids, or respectively equals :math:`P_{top}` and :attr:`esig` attribute for pure sigma grids. :math:`Cp(L)` equals :math:`P_{top}` for pure sigma grids or equals 0 for hybrid grids. Pressures at grid centers are averages of pressures at grid edges: .. math:: P_c(L) = (P_e(L) + P_e(L+1)) / 2 For hybrid grids, ETA coordinates of grid edges and grid centers are given by; .. math:: ETA_{e}(L) = (P_e(L) - P_{top}) / (P_{surf} - P_{top}) .. math:: ETA_{c}(L) = (P_c(L) - P_{top}) / (P_{surf} - P_{top}) Altitude values are fit using a 5th-degree polynomial; see `gridspec.prof_altitude` for more details. """ |
Psurf = np.asarray(Psurf)
output_ndims = Psurf.ndim + 1
if output_ndims > 3:
raise ValueError("`Psurf` argument must be a float or an array"
" with <= 2 dimensions (or None)")
# Compute all variables: takes not much memory, fast
# and better for code reading
SIGe = None
SIGc = None
ETAe = None
ETAc = None
if self.hybrid:
try:
Ap = broadcast_1d_array(self.Ap, output_ndims)
Bp = broadcast_1d_array(self.Bp, output_ndims)
except KeyError:
raise ValueError("Impossible to compute vertical levels,"
" data is missing (Ap, Bp)")
Cp = 0.
else:
try:
Bp = SIGe = broadcast_1d_array(self.esig, output_ndims)
SIGc = broadcast_1d_array(self.csig, output_ndims)
except KeyError:
raise ValueError("Impossible to compute vertical levels,"
" data is missing (esig, csig)")
Ap = Cp = Ptop
Pe = Ap + Bp * (Psurf - Cp)
Pc = 0.5 * (Pe[0:-1] + Pe[1:])
if self.hybrid:
ETAe = (Pe - Ptop)/(Psurf - Ptop)
ETAc = (Pc - Ptop)/(Psurf - Ptop)
else:
SIGe = SIGe * np.ones_like(Psurf)
SIGc = SIGc * np.ones_like(Psurf)
Ze = prof_altitude(Pe, **kwargs)
Zc = prof_altitude(Pc, **kwargs)
all_vars = {'eta_edges': ETAe,
'eta_centers': ETAc,
'sigma_edges': SIGe,
'sigma_centers': SIGc,
'pressure_edges': Pe,
'pressure_centers': Pc,
'altitude_edges': Ze,
'altitude_centers': Zc}
return all_vars |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_template_dirs():
"""existing directories where to search for jinja2 templates. The order is important. The first found template from the first found dir wins!""" |
return filter(lambda x: os.path.exists(x), [
# user dir
os.path.join(os.path.expanduser('~'), '.py2pack', 'templates'),
# system wide dir
os.path.join('/', 'usr', 'share', 'py2pack', 'templates'),
# usually inside the site-packages dir
os.path.join(os.path.dirname(os.path.abspath(__file__)), 'templates'),
]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _license_from_classifiers(data):
"""try to get a license from the classifiers""" |
classifiers = data.get('classifiers', [])
found_license = None
for c in classifiers:
if c.startswith("License :: OSI Approved :: "):
found_license = c.replace("License :: OSI Approved :: ", "")
return found_license |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _normalize_license(data):
"""try to get SDPX license""" |
license = data.get('license', None)
if not license:
# try to get license from classifiers
license = _license_from_classifiers(data)
if license:
if license in SDPX_LICENSES.keys():
data['license'] = SDPX_LICENSES[license]
else:
data['license'] = "%s (FIXME:No SPDX)" % (license)
else:
data['license'] = "" |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def wrap_prompts_class(Klass):
""" Wrap an IPython's Prompt class This is needed in order for Prompt to inject the correct escape sequences at the right positions for shell integrations. """ |
try:
from prompt_toolkit.token import ZeroWidthEscape
except ImportError:
return Klass
class ITerm2IPythonPrompt(Klass):
def in_prompt_tokens(self, cli=None):
return [
(ZeroWidthEscape, last_status(self.shell)+BEFORE_PROMPT),
]+\
super(ITerm2IPythonPrompt, self).in_prompt_tokens(cli)+\
[(ZeroWidthEscape, AFTER_PROMPT)]
return ITerm2IPythonPrompt |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_all_keys(self, start=None):
""" A generator which yields a list of all valid keys starting at the given `start` offset. If `start` is `None`, we will start from the root of the tree. """ |
s = self.stream
if not start:
start = HEADER_SIZE + self.block_size * self.root_block
s.seek(start)
block_type = s.read(2)
if block_type == LEAF:
reader = LeafReader(self)
num_keys = struct.unpack('>i', reader.read(4))[0]
for _ in range(num_keys):
cur_key = reader.read(self.key_size)
# We to a tell/seek here so that the user can read from
# the file while this loop is still being run
cur_pos = s.tell()
yield cur_key
s.seek(cur_pos)
length = sbon.read_varint(reader)
reader.seek(length, 1)
elif block_type == INDEX:
(_, num_keys, first_child) = struct.unpack('>Bii', s.read(9))
children = [first_child]
for _ in range(num_keys):
# Skip the key field.
_ = s.read(self.key_size)
# Read pointer to the child block.
next_child = struct.unpack('>i', s.read(4))[0]
children.append(next_child)
for child_loc in children:
for key in self.get_all_keys(HEADER_SIZE + self.block_size * child_loc):
yield key
elif block_type == FREE:
pass
else:
raise Exception('Unhandled block type: {}'.format(block_type)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def readline(self, fmt=None):
""" Return next unformatted "line". If format is given, unpack content, otherwise return byte string. """ |
prefix_size = self._fix()
if fmt is None:
content = self.read(prefix_size)
else:
fmt = self.endian + fmt
fmt = _replace_star(fmt, prefix_size)
content = struct.unpack(fmt, self.read(prefix_size))
try:
suffix_size = self._fix()
except EOFError:
# when endian is invalid and prefix_size > total file size
suffix_size = -1
if prefix_size != suffix_size:
raise IOError(_FIX_ERROR)
return content |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def skipline(self):
""" Skip the next line and returns position and size of line. Raises IOError if pre- and suffix of line do not match. """ |
position = self.tell()
prefix = self._fix()
self.seek(prefix, 1) # skip content
suffix = self._fix()
if prefix != suffix:
raise IOError(_FIX_ERROR)
return position, prefix |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def writelines(self, lines, fmt):
""" Write `lines` with given `format`. """ |
if isinstance(fmt, basestring):
fmt = [fmt] * len(lines)
for f, line in zip(fmt, lines):
self.writeline(f, line, self.endian) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read_varint(stream):
"""Read while the most significant bit is set, then put the 7 least significant bits of all read bytes together to create a number. """ |
value = 0
while True:
byte = ord(stream.read(1))
if not byte & 0b10000000:
return value << 7 | byte
value = value << 7 | (byte & 0b01111111) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def open_bpchdataset(filename, fields=[], categories=[], tracerinfo_file='tracerinfo.dat', diaginfo_file='diaginfo.dat', endian=">", decode_cf=True, memmap=True, dask=True, return_store=False):
""" Open a GEOS-Chem BPCH file output as an xarray Dataset. Parameters filename : string Path to the output file to read in. {tracerinfo,diaginfo}_file : string, optional Path to the metadata "info" .dat files which are used to decipher the metadata corresponding to each variable in the output dataset. If not provided, will look for them in the current directory or fall back on a generic set. fields : list, optional List of a subset of variable names to return. This can substantially improve read performance. Note that the field here is just the tracer name - not the category, e.g. 'O3' instead of 'IJ-AVG-$_O3'. categories : list, optional List a subset of variable categories to look through. This can substantially improve read performance. endian : {'=', '>', '<'}, optional Endianness of file on disk. By default, "big endian" (">") is assumed. decode_cf : bool Enforce CF conventions for variable names, units, and other metadata default_dtype : numpy.dtype, optional Default datatype for variables encoded in file on disk (single-precision float by default). memmap : bool Flag indicating that data should be memory-mapped from disk instead of eagerly loaded into memory dask : bool Flag indicating that data reading should be deferred (delayed) to construct a task-graph for later execution return_store : bool Also return the underlying DataStore to the user Returns ------- ds : xarray.Dataset Dataset containing the requested fields (or the entire file), with data contained in proxy containers for access later. store : xarray.AbstractDataStore Underlying DataStore which handles the loading and processing of bpch files on disk """ |
store = BPCHDataStore(
filename, fields=fields, categories=categories,
tracerinfo_file=tracerinfo_file,
diaginfo_file=diaginfo_file, endian=endian,
use_mmap=memmap, dask_delayed=dask
)
ds = xr.Dataset.load_store(store)
# Record what the file object underlying the store which we culled this
# Dataset from is so that we can clean it up later
ds._file_obj = store._bpch
# Handle CF corrections
if decode_cf:
decoded_vars = OrderedDict()
rename_dict = {}
for v in ds.variables:
cf_name = cf.get_valid_varname(v)
rename_dict[v] = cf_name
new_var = cf.enforce_cf_variable(ds[v])
decoded_vars[cf_name] = new_var
ds = xr.Dataset(decoded_vars, attrs=ds.attrs.copy())
# ds.rename(rename_dict, inplace=True)
# TODO: There's a bug with xr.decode_cf which eagerly loads data.
# Re-enable this once that bug is fixed
# Note that we do not need to decode the times because we explicitly
# kept track of them as we parsed the data.
# ds = xr.decode_cf(ds, decode_times=False)
# Set attributes for CF conventions
ts = get_timestamp()
ds.attrs.update(dict(
Conventions='CF1.6',
source=filename,
tracerinfo=tracerinfo_file,
diaginfo=diaginfo_file,
filetype=store._bpch.filetype,
filetitle=store._bpch.filetitle,
history=(
"{}: Processed/loaded by xbpch-{} from {}"
.format(ts, ver, filename)
),
))
# To immediately load the data from the BPCHDataProxy paylods, need
# to execute ds.data_vars for some reason...
if return_store:
return ds, store
else:
return ds |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def open_mfbpchdataset(paths, concat_dim='time', compat='no_conflicts', preprocess=None, lock=None, **kwargs):
""" Open multiple bpch files as a single dataset. You must have dask installed for this to work, as this greatly simplifies issues relating to multi-file I/O. Also, please note that this is not a very performant routine. I/O is still limited by the fact that we need to manually scan/read through each bpch file so that we can figure out what its contents are, since that metadata isn't saved anywhere. So this routine will actually sequentially load Datasets for each bpch file, then concatenate them along the "time" axis. You may wish to simply process each file individually, coerce to NetCDF, and then ingest through xarray as normal. Parameters paths : list of strs Filenames to load; order doesn't matter as they will be lexicographically sorted before we read in the data concat_dim : str, default='time' Dimension to concatenate Datasets over. We default to "time" since this is how GEOS-Chem splits output files compat : str (optional) String indicating how to compare variables of the same name for potential conflicts when merging: - 'broadcast_equals': all values must be equal when variables are broadcast against each other to ensure common dimensions. - 'equals': all values and dimensions must be the same. - 'identical': all values, dimensions and attributes must be the same. - 'no_conflicts': only values which are not null in both datasets must be equal. The returned dataset then contains the combination of all non-null values. preprocess : callable (optional) A pre-processing function to apply to each Dataset prior to concatenation lock : False, True, or threading.Lock (optional) Passed to :py:func:`dask.array.from_array`. By default, xarray employs a per-variable lock when reading data from NetCDF files, but this model has not yet been extended or implemented for bpch files and so this is not actually used. However, it is likely necessary before dask's multi-threaded backend can be used **kwargs : optional Additional arguments to pass to :py:func:`xbpch.open_bpchdataset`. """ |
from xarray.backends.api import _MultiFileCloser
# TODO: Include file locks?
# Check for dask
dask = kwargs.pop('dask', False)
if not dask:
raise ValueError("Reading multiple files without dask is not supported")
kwargs['dask'] = True
# Add th
if isinstance(paths, basestring):
paths = sorted(glob(paths))
if not paths:
raise IOError("No paths to files were passed into open_mfbpchdataset")
datasets = [open_bpchdataset(filename, **kwargs)
for filename in paths]
bpch_objs = [ds._file_obj for ds in datasets]
if preprocess is not None:
datasets = [preprocess(ds) for ds in datasets]
# Concatenate over time
combined = xr.auto_combine(datasets, compat=compat, concat_dim=concat_dim)
combined._file_obj = _MultiFileCloser(bpch_objs)
combined.attrs = datasets[0].attrs
ts = get_timestamp()
fns_str = " ".join(paths)
combined.attrs['history'] = (
"{}: Processed/loaded by xbpch-{} from {}"
.format(ts, ver, fns_str)
)
return combined |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def image_bytes(b, filename=None, inline=1, width='auto', height='auto', preserve_aspect_ratio=None):
""" Return a bytes string that displays image given by bytes b in the terminal If filename=None, the filename defaults to "Unnamed file" width and height are strings, following the format N: N character cells. Npx: N pixels. N%: N percent of the session's width or height. 'auto': The image's inherent size will be used to determine an appropriate dimension. preserve_aspect_ratio sets whether the aspect ratio of the image is preserved. The default (None) is True unless both width and height are set. See https://www.iterm2.com/documentation-images.html """ |
if preserve_aspect_ratio is None:
if width != 'auto' and height != 'auto':
preserve_aspect_ratio = False
else:
preserve_aspect_ratio = True
data = {
'name': base64.b64encode((filename or 'Unnamed file').encode('utf-8')).decode('ascii'),
'inline': inline,
'size': len(b),
'base64_img': base64.b64encode(b).decode('ascii'),
'width': width,
'height': height,
'preserve_aspect_ratio': int(preserve_aspect_ratio),
}
# IMAGE_CODE is a string because bytes doesn't support formatting
return IMAGE_CODE.format(**data).encode('ascii') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def display_image_bytes(b, filename=None, inline=1, width='auto', height='auto', preserve_aspect_ratio=None):
""" Display the image given by the bytes b in the terminal. If filename=None the filename defaults to "Unnamed file". width and height are strings, following the format N: N character cells. Npx: N pixels. N%: N percent of the session's width or height. 'auto': The image's inherent size will be used to determine an appropriate dimension. preserve_aspect_ratio sets whether the aspect ratio of the image is preserved. The default (None) is True unless both width and height are set. See https://www.iterm2.com/documentation-images.html """ |
sys.stdout.buffer.write(image_bytes(b, filename=filename, inline=inline,
width=width, height=height, preserve_aspect_ratio=preserve_aspect_ratio))
sys.stdout.write('\n') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def display_image_file(fn, width='auto', height='auto', preserve_aspect_ratio=None):
""" Display an image in the terminal. A newline is not printed. width and height are strings, following the format N: N character cells. Npx: N pixels. N%: N percent of the session's width or height. 'auto': The image's inherent size will be used to determine an appropriate dimension. preserve_aspect_ratio sets whether the aspect ratio of the image is preserved. The default (None) is True unless both width and height are set. See https://www.iterm2.com/documentation-images.html """ |
with open(os.path.realpath(os.path.expanduser(fn)), 'rb') as f:
sys.stdout.buffer.write(image_bytes(f.read(), filename=fn,
width=width, height=height,
preserve_aspect_ratio=preserve_aspect_ratio)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_entity_uuid_coords(self, uuid):
""" Returns the coordinates of the given entity UUID inside this world, or `None` if the UUID is not found. """ |
if uuid in self._entity_to_region_map:
coords = self._entity_to_region_map[uuid]
entities = self.get_entities(*coords)
for entity in entities:
if 'uniqueId' in entity.data and entity.data['uniqueId'] == uuid:
return tuple(entity.data['tilePosition'])
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create_fuzzy_pattern(pattern):
""" Convert a string into a fuzzy regular expression pattern. :param pattern: The input pattern (a string). :returns: A compiled regular expression object. This function works by adding ``.*`` between each of the characters in the input pattern and compiling the resulting expression into a case insensitive regular expression. """ |
return re.compile(".*".join(map(re.escape, pattern)), re.IGNORECASE) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def fuzzy_search(self, *filters):
""" Perform a "fuzzy" search that matches the given characters in the given order. :param filters: The pattern(s) to search for. :returns: The matched password names (a list of strings). """ |
matches = []
logger.verbose(
"Performing fuzzy search on %s (%s) ..", pluralize(len(filters), "pattern"), concatenate(map(repr, filters))
)
patterns = list(map(create_fuzzy_pattern, filters))
for entry in self.filtered_entries:
if all(p.search(entry.name) for p in patterns):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using fuzzy search.",
pluralize(len(matches), "password"),
)
return matches |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def select_entry(self, *arguments):
""" Select a password from the available choices. :param arguments: Refer to :func:`smart_search()`. :returns: The name of a password (a string) or :data:`None` (when no password matched the given `arguments`). """ |
matches = self.smart_search(*arguments)
if len(matches) > 1:
logger.info("More than one match, prompting for choice ..")
labels = [entry.name for entry in matches]
return matches[labels.index(prompt_for_choice(labels))]
else:
logger.info("Matched one entry: %s", matches[0].name)
return matches[0] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def simple_search(self, *keywords):
""" Perform a simple search for case insensitive substring matches. :param keywords: The string(s) to search for. :returns: The matched password names (a generator of strings). Only passwords whose names matches *all* of the given keywords are returned. """ |
matches = []
keywords = [kw.lower() for kw in keywords]
logger.verbose(
"Performing simple search on %s (%s) ..",
pluralize(len(keywords), "keyword"),
concatenate(map(repr, keywords)),
)
for entry in self.filtered_entries:
normalized = entry.name.lower()
if all(kw in normalized for kw in keywords):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using simple search.",
pluralize(len(matches), "password"),
)
return matches |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def smart_search(self, *arguments):
""" Perform a smart search on the given keywords or patterns. :param arguments: The keywords or patterns to search for. :returns: The matched password names (a list of strings). :raises: The following exceptions can be raised: - :exc:`.NoMatchingPasswordError` when no matching passwords are found. - :exc:`.EmptyPasswordStoreError` when the password store is empty. This method first tries :func:`simple_search()` and if that doesn't produce any matches it will fall back to :func:`fuzzy_search()`. If no matches are found an exception is raised (see above). """ |
matches = self.simple_search(*arguments)
if not matches:
logger.verbose("Falling back from substring search to fuzzy search ..")
matches = self.fuzzy_search(*arguments)
if not matches:
if len(self.filtered_entries) > 0:
raise NoMatchingPasswordError(
format("No passwords matched the given arguments! (%s)", concatenate(map(repr, arguments)))
)
else:
msg = "You don't have any passwords yet! (no *.gpg files found)"
raise EmptyPasswordStoreError(msg)
return matches |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_diaginfo(diaginfo_file):
""" Read an output's diaginfo.dat file and parse into a DataFrame for use in selecting and parsing categories. Parameters diaginfo_file : str Path to diaginfo.dat Returns ------- DataFrame containing the category information. """ |
widths = [rec.width for rec in diag_recs]
col_names = [rec.name for rec in diag_recs]
dtypes = [rec.type for rec in diag_recs]
usecols = [name for name in col_names if not name.startswith('-')]
diag_df = pd.read_fwf(diaginfo_file, widths=widths, names=col_names,
dtypes=dtypes, comment="#", header=None,
usecols=usecols)
diag_desc = {diag.name: diag.desc for diag in diag_recs
if not diag.name.startswith('-')}
return diag_df, diag_desc |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_tracerinfo(tracerinfo_file):
""" Read an output's tracerinfo.dat file and parse into a DataFrame for use in selecting and parsing categories. Parameters tracerinfo_file : str Path to tracerinfo.dat Returns ------- DataFrame containing the tracer information. """ |
widths = [rec.width for rec in tracer_recs]
col_names = [rec.name for rec in tracer_recs]
dtypes = [rec.type for rec in tracer_recs]
usecols = [name for name in col_names if not name.startswith('-')]
tracer_df = pd.read_fwf(tracerinfo_file, widths=widths, names=col_names,
dtypes=dtypes, comment="#", header=None,
usecols=usecols)
# Check an edge case related to a bug in GEOS-Chem v12.0.3 which
# erroneously dropped short/long tracer names in certain tracerinfo.dat outputs.
# What we do here is figure out which rows were erroneously processed (they'll
# have NaNs in them) and raise a warning if there are any
na_free = tracer_df.dropna(subset=['tracer', 'scale'])
only_na = tracer_df[~tracer_df.index.isin(na_free.index)]
if len(only_na) > 0:
warn("At least one row in {} wasn't decoded correctly; we strongly"
" recommend you manually check that file to see that all"
" tracers are properly recorded."
.format(tracerinfo_file))
tracer_desc = {tracer.name: tracer.desc for tracer in tracer_recs
if not tracer.name.startswith('-')}
# Process some of the information about which variables are hydrocarbons
# and chemical tracers versus other diagnostics.
def _assign_hydrocarbon(row):
if row['C'] != 1:
row['hydrocarbon'] = True
row['molwt'] = C_MOLECULAR_WEIGHT
else:
row['hydrocarbon'] = False
return row
tracer_df = (
tracer_df
.apply(_assign_hydrocarbon, axis=1)
.assign(chemical=lambda x: x['molwt'].astype(bool))
)
return tracer_df, tracer_desc |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read_from_bpch(filename, file_position, shape, dtype, endian, use_mmap=False):
""" Read a chunk of data from a bpch output file. Parameters filename : str Path to file on disk containing the data file_position : int Position (bytes) where desired data chunk begins shape : tuple of ints Resultant (n-dimensional) shape of requested data; the chunk will be read sequentially from disk and then re-shaped dtype : dtype Dtype of data; for best results, pass a dtype which includes an endian indicator, e.g. `dtype = np.dtype('>f4')` endian : str Endianness of data; should be consistent with `dtype` use_mmap : bool Memory map the chunk of data to the file on disk, else read immediately Returns ------- Array with shape `shape` and dtype `dtype` containing the requested chunk of data from `filename`. """ |
offset = file_position + 4
if use_mmap:
d = np.memmap(filename, dtype=dtype, mode='r', shape=shape,
offset=offset, order='F')
else:
with FortranFile(filename, 'rb', endian) as ff:
ff.seek(file_position)
d = np.array(ff.readline('*f'))
d = d.reshape(shape, order='F')
# As a sanity check, *be sure* that the resulting data block has the
# correct shape, and fail early if it doesn't.
if (d.shape != shape):
raise IOError("Data chunk read from {} does not have the right shape,"
" (expected {} but got {})"
.format(filename, shape, d.shape))
return d |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _read(self):
""" Helper function to load the data referenced by this bundle. """ |
if self._dask:
d = da.from_delayed(
delayed(read_from_bpch, )(
self.filename, self.file_position, self.shape,
self.dtype, self.endian, use_mmap=self._mmap
),
self.shape, self.dtype
)
else:
d = read_from_bpch(
self.filename, self.file_position, self.shape,
self.dtype, self.endian, use_mmap=self._mmap
)
return d |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def close(self):
""" Close this bpch file. """ |
if not self.fp.closed:
for v in list(self.var_data):
del self.var_data[v]
self.fp.close() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _read_metadata(self):
""" Read the main metadata packaged within a bpch file, indicating the output filetype and its title. """ |
filetype = self.fp.readline().strip()
filetitle = self.fp.readline().strip()
# Decode to UTF string, if possible
try:
filetype = str(filetype, 'utf-8')
filetitle = str(filetitle, 'utf-8')
except:
# TODO: Handle this edge-case of converting file metadata more elegantly.
pass
self.__setattr__('filetype', filetype)
self.__setattr__('filetitle', filetitle) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _read_var_data(self):
""" Iterate over the block of this bpch file and return handlers in the form of `BPCHDataBundle`s for access to the data contained therein. """ |
var_bundles = OrderedDict()
var_attrs = OrderedDict()
n_vars = 0
while self.fp.tell() < self.fsize:
var_attr = OrderedDict()
# read first and second header lines
line = self.fp.readline('20sffii')
modelname, res0, res1, halfpolar, center180 = line
line = self.fp.readline('40si40sdd40s7i')
category_name, number, unit, tau0, tau1, reserved = line[:6]
dim0, dim1, dim2, dim3, dim4, dim5, skip = line[6:]
var_attr['number'] = number
# Decode byte-strings to utf-8
category_name = str(category_name, 'utf-8')
var_attr['category'] = category_name.strip()
unit = str(unit, 'utf-8')
# get additional metadata from tracerinfo / diaginfo
try:
cat_df = self.diaginfo_df[
self.diaginfo_df.name == category_name.strip()
]
# TODO: Safer logic for handling case where more than one
# tracer metadata match was made
# if len(cat_df > 1):
# raise ValueError(
# "More than one category matching {} found in "
# "diaginfo.dat".format(
# category_name.strip()
# )
# )
# Safe now to select the only row in the DataFrame
cat = cat_df.T.squeeze()
tracer_num = int(cat.offset) + int(number)
diag_df = self.tracerinfo_df[
self.tracerinfo_df.tracer == tracer_num
]
# TODO: Safer logic for handling case where more than one
# tracer metadata match was made
# if len(diag_df > 1):
# raise ValueError(
# "More than one tracer matching {:d} found in "
# "tracerinfo.dat".format(tracer_num)
# )
# Safe now to select only row in the DataFrame
diag = diag_df.T.squeeze()
diag_attr = diag.to_dict()
if not unit.strip(): # unit may be empty in bpch
unit = diag_attr['unit'] # but not in tracerinfo
var_attr.update(diag_attr)
except:
diag = {'name': '', 'scale': 1}
var_attr.update(diag)
var_attr['unit'] = unit
vname = diag['name']
fullname = category_name.strip() + "_" + vname
# parse metadata, get data or set a data proxy
if dim2 == 1:
data_shape = (dim0, dim1) # 2D field
else:
data_shape = (dim0, dim1, dim2)
var_attr['original_shape'] = data_shape
# Add proxy time dimension to shape
data_shape = tuple([1, ] + list(data_shape))
origin = (dim3, dim4, dim5)
var_attr['origin'] = origin
timelo, timehi = cf.tau2time(tau0), cf.tau2time(tau1)
pos = self.fp.tell()
# Note that we don't pass a dtype, and assume everything is
# single-fp floats with the correct endian, as hard-coded
var_bundle = BPCHDataBundle(
data_shape, self.endian, self.filename, pos, [timelo, timehi],
metadata=var_attr,
use_mmap=self.use_mmap, dask_delayed=self.dask_delayed
)
self.fp.skipline()
# Save the data as a "bundle" for concatenating in the final step
if fullname in var_bundles:
var_bundles[fullname].append(var_bundle)
else:
var_bundles[fullname] = [var_bundle, ]
var_attrs[fullname] = var_attr
n_vars += 1
self.var_data = var_bundles
self.var_attrs = var_attrs |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_timestamp(time=True, date=True, fmt=None):
""" Return the current timestamp in machine local time. Parameters: time, date : Boolean Flag to include the time or date components, respectively, in the output. fmt : str, optional If passed, will override the time/date choice and use as the format string passed to `strftime`. """ |
time_format = "%H:%M:%S"
date_format = "%m-%d-%Y"
if fmt is None:
if time and date:
fmt = time_format + " " + date_format
elif time:
fmt = time_format
elif date:
fmt = date_format
else:
raise ValueError("One of `date` or `time` must be True!")
return datetime.now().strftime(fmt) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def fix_attr_encoding(ds):
""" This is a temporary hot-fix to handle the way metadata is encoded when we read data directly from bpch files. It removes the 'scale_factor' and 'units' attributes we encode with the data we ingest, converts the 'hydrocarbon' and 'chemical' attribute to a binary integer instead of a boolean, and removes the 'units' attribute from the "time" dimension since that too is implicitly encoded. In future versions of this library, when upstream issues in decoding data wrapped in dask arrays is fixed, this won't be necessary and will be removed. """ |
def _maybe_del_attr(da, attr):
""" Possibly delete an attribute on a DataArray if it's present """
if attr in da.attrs:
del da.attrs[attr]
return da
def _maybe_decode_attr(da, attr):
# TODO: Fix this so that bools get written as attributes just fine
""" Possibly coerce an attribute on a DataArray to an easier type
to write to disk. """
# bool -> int
if (attr in da.attrs) and (type(da.attrs[attr] == bool)):
da.attrs[attr] = int(da.attrs[attr])
return da
for v in ds.data_vars:
da = ds[v]
da = _maybe_del_attr(da, 'scale_factor')
da = _maybe_del_attr(da, 'units')
da = _maybe_decode_attr(da, 'hydrocarbon')
da = _maybe_decode_attr(da, 'chemical')
# Also delete attributes on time.
if hasattr(ds, 'time'):
times = ds.time
times = _maybe_del_attr(times, 'units')
return ds |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def after_output(command_status):
""" Shell sequence to be run after the command output. The ``command_status`` should be in the range 0-255. """ |
if command_status not in range(256):
raise ValueError("command_status must be an integer in the range 0-255")
sys.stdout.write(AFTER_OUTPUT.format(command_status=command_status))
# Flushing is important as the command timing feature maybe based on
# AFTER_OUTPUT in the future.
sys.stdout.flush() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def enforce_cf_variable(var, mask_and_scale=True):
""" Given a Variable constructed from GEOS-Chem output, enforce CF-compliant metadata and formatting. Until a bug with lazily-loaded data and masking/scaling is resolved in xarray, you have the option to manually mask and scale the data here. Parameters var : xarray.Variable A variable holding information decoded from GEOS-Chem output. mask_and_scale : bool Flag to scale and mask the data given the unit conversions provided Returns ------- out : xarray.Variable The original variable processed to conform to CF standards .. note:: This method borrows heavily from the ideas in ``xarray.decode_cf_variable`` """ |
var = as_variable(var)
data = var._data # avoid loading by accessing _data instead of data
dims = var.dims
attrs = var.attrs.copy()
encoding = var.encoding.copy()
orig_dtype = data.dtype
# Process masking/scaling coordinates. We only expect a "scale" value
# for the units with this output.
if 'scale' in attrs:
scale = attrs.pop('scale')
attrs['scale_factor'] = scale
encoding['scale_factor'] = scale
# TODO: Once the xr.decode_cf bug is fixed, we won't need to manually
# handle masking/scaling
if mask_and_scale:
data = scale*data
# Process units
# TODO: How do we want to handle parts-per-* units? These are not part of
# the udunits standard, and the CF conventions suggest using units
# like 1e-6 for parts-per-million. But we potentially mix mass and
# volume/molar mixing ratios in GEOS-Chem output, so we need a way
# to handle that edge case.
if 'unit' in attrs:
unit = attrs.pop('unit')
unit = get_cfcompliant_units(unit)
attrs['units'] = unit
# TODO: Once the xr.decode_cf bug is fixed, we won't need to manually
# handle masking/scaling
return Variable(dims, data, attrs, encoding=encoding) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def published(self, check_language=True, language=None, kwargs=None, exclude_kwargs=None):
""" Returns all entries, which publication date has been hit or which have no date and which language matches the current language. """ |
if check_language:
qs = NewsEntry.objects.language(language or get_language()).filter(
is_published=True)
else:
qs = self.get_queryset()
qs = qs.filter(
models.Q(pub_date__lte=now()) | models.Q(pub_date__isnull=True)
)
if kwargs is not None:
qs = qs.filter(**kwargs)
if exclude_kwargs is not None:
qs = qs.exclude(**exclude_kwargs)
return qs.distinct().order_by('-pub_date') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def recent(self, check_language=True, language=None, limit=3, exclude=None, kwargs=None, category=None):
""" Returns recently published new entries. """ |
if category:
if not kwargs:
kwargs = {}
kwargs['categories__in'] = [category]
qs = self.published(check_language=check_language, language=language,
kwargs=kwargs)
if exclude:
qs = qs.exclude(pk=exclude.pk)
return qs[:limit] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_newsentry_meta_description(newsentry):
"""Returns the meta description for the given entry.""" |
if newsentry.meta_description:
return newsentry.meta_description
# If there is no seo addon found, take the info from the placeholders
text = newsentry.get_description()
if len(text) > 160:
return u'{}...'.format(text[:160])
return text |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _requirement_filter_by_marker(req):
# type: (pkg_resources.Requirement) -> bool """Check if the requirement is satisfied by the marker. This function checks for a given Requirement whether its environment marker is satisfied on the current platform. Currently only the python version and system platform are checked. """ |
if hasattr(req, 'marker') and req.marker:
marker_env = {
'python_version': '.'.join(map(str, sys.version_info[:2])),
'sys_platform': sys.platform
}
if not req.marker.evaluate(environment=marker_env):
return False
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _requirement_find_lowest_possible(req):
# type: (pkg_resources.Requirement) -> List[str] """Find lowest required version. Given a single Requirement, this function calculates the lowest required version to satisfy it. If the requirement excludes a specific version, then this version will not be used as the minimal supported version. Examples -------- ['foobar', '>=', '1.0'] ['baz', '>=', '1.3'] """ |
version_dep = None # type: Optional[str]
version_comp = None # type: Optional[str]
for dep in req.specs:
version = pkg_resources.parse_version(dep[1])
# we don't want to have a not supported version as minimal version
if dep[0] == '!=':
continue
# try to use the lowest version available
# i.e. for ">=0.8.4,>=0.9.7", select "0.8.4"
if (not version_dep or
version < pkg_resources.parse_version(version_dep)):
version_dep = dep[1]
version_comp = dep[0]
assert (version_dep is None and version_comp is None) or \
(version_dep is not None and version_comp is not None)
return [
x for x in (req.unsafe_name, version_comp, version_dep)
if x is not None] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _ensure_coroutine_function(func):
"""Return a coroutine function. func: either a coroutine function or a regular function Note a coroutine function is not a coroutine! """ |
if asyncio.iscoroutinefunction(func):
return func
else:
@asyncio.coroutine
def coroutine_function(evt):
func(evt)
yield
return coroutine_function |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def location(self):
"""Return a string uniquely identifying the event. This string can be used to find the event in the event store UI (cf. id attribute, which is the UUID that at time of writing doesn't let you easily find the event). """ |
if self._location is None:
self._location = "{}/{}-{}".format(
self.stream,
self.type,
self.sequence,
)
return self._location |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
async def find_backwards(self, stream_name, predicate, predicate_label='predicate'):
"""Return first event matching predicate, or None if none exists. Note: 'backwards', both here and in Event Store, means 'towards the event emitted furthest in the past'. """ |
logger = self._logger.getChild(predicate_label)
logger.info('Fetching first matching event')
uri = self._head_uri
try:
page = await self._fetcher.fetch(uri)
except HttpNotFoundError as e:
raise StreamNotFoundError() from e
while True:
evt = next(page.iter_events_matching(predicate), None)
if evt is not None:
return evt
uri = page.get_link("next")
if uri is None:
logger.warning("No matching event found")
return None
page = await self._fetcher.fetch(uri) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def main():
"""Command line interface for the ``qpass`` program.""" |
# Initialize logging to the terminal.
coloredlogs.install()
# Prepare for command line argument parsing.
action = show_matching_entry
program_opts = dict(exclude_list=[])
show_opts = dict(filters=[], use_clipboard=is_clipboard_supported())
verbosity = 0
# Parse the command line arguments.
try:
options, arguments = getopt.gnu_getopt(
sys.argv[1:],
"elnp:f:x:vqh",
["edit", "list", "no-clipboard", "password-store=", "filter=", "exclude=", "verbose", "quiet", "help"],
)
for option, value in options:
if option in ("-e", "--edit"):
action = edit_matching_entry
elif option in ("-l", "--list"):
action = list_matching_entries
elif option in ("-n", "--no-clipboard"):
show_opts["use_clipboard"] = False
elif option in ("-p", "--password-store"):
stores = program_opts.setdefault("stores", [])
stores.append(PasswordStore(directory=value))
elif option in ("-f", "--filter"):
show_opts["filters"].append(value)
elif option in ("-x", "--exclude"):
program_opts["exclude_list"].append(value)
elif option in ("-v", "--verbose"):
coloredlogs.increase_verbosity()
verbosity += 1
elif option in ("-q", "--quiet"):
coloredlogs.decrease_verbosity()
verbosity -= 1
elif option in ("-h", "--help"):
usage(__doc__)
return
else:
raise Exception("Unhandled option! (programming error)")
if not (arguments or action == list_matching_entries):
usage(__doc__)
return
except Exception as e:
warning("Error: %s", e)
sys.exit(1)
# Execute the requested action.
try:
show_opts["quiet"] = verbosity < 0
kw = show_opts if action == show_matching_entry else {}
action(QuickPass(**program_opts), arguments, **kw)
except PasswordStoreError as e:
# Known issues don't get a traceback.
logger.error("%s", e)
sys.exit(1)
except KeyboardInterrupt:
# If the user interrupted an interactive prompt they most likely did so
# intentionally, so there's no point in generating more output here.
sys.exit(1) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def edit_matching_entry(program, arguments):
"""Edit the matching entry.""" |
entry = program.select_entry(*arguments)
entry.context.execute("pass", "edit", entry.name) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def SVGdocument():
"Create default SVG document"
import xml.dom.minidom
implementation = xml.dom.minidom.getDOMImplementation()
doctype = implementation.createDocumentType(
"svg", "-//W3C//DTD SVG 1.1//EN",
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"
)
document= implementation.createDocument(None, "svg", doctype)
document.documentElement.setAttribute(
'xmlns', 'http://www.w3.org/2000/svg'
)
return document |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def polyline(document, coords):
"polyline with more then 2 vertices"
points = []
for i in range(0, len(coords), 2):
points.append("%s,%s" % (coords[i], coords[i+1]))
return setattribs(
document.createElement('polyline'),
points = ' '.join(points),
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def cubic_bezier(document, coords):
"cubic bezier polyline"
element = document.createElement('path')
points = [(coords[i], coords[i+1]) for i in range(0, len(coords), 2)]
path = ["M%s %s" %points[0]]
for n in xrange(1, len(points), 3):
A, B, C = points[n:n+3]
path.append("C%s,%s %s,%s %s,%s" % (A[0], A[1], B[0], B[1], C[0], C[1]))
element.setAttribute('d', ' '.join(path))
return element |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def smoothpolygon(document, coords):
"smoothed filled polygon"
element = document.createElement('path')
path = []
points = [(coords[i], coords[i+1]) for i in range(0, len(coords), 2)]
def pt(points):
p = points
n = len(points)
for i in range(0, len(points)):
a = p[(i-1) % n]
b = p[i]
c = p[(i+1) % n]
yield lerp(a, b, 0.5), b, lerp(b, c, 0.5)
for i, (A, B, C) in enumerate(pt(points)):
if i == 0:
path.append("M%s,%s Q%s,%s %s,%s" % (A[0], A[1], B[0], B[1], C[0], C[1]))
else:
path.append("T%s,%s" % (C[0], C[1]))
path.append("z")
element.setAttribute('d', ' '.join(path))
return element |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def font_actual(tkapp, font):
"actual font parameters"
tmp = tkapp.call('font', 'actual', font)
return dict(
(tmp[i][1:], tmp[i+1]) for i in range(0, len(tmp), 2)
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def parse_dash(string, width):
"parse dash pattern specified with string"
# DashConvert from {tk-sources}/generic/tkCanvUtil.c
w = max(1, int(width + 0.5))
n = len(string)
result = []
for i, c in enumerate(string):
if c == " " and len(result):
result[-1] += w + 1
elif c == "_":
result.append(8*w)
result.append(4*w)
elif c == "-":
result.append(6*w)
result.append(4*w)
elif c == ",":
result.append(4*w)
result.append(4*w)
elif c == ".":
result.append(2*w)
result.append(4*w)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def prof_altitude(pressure, p_coef=(-0.028389, -0.0493698, 0.485718, 0.278656, -17.5703, 48.0926)):
""" Return altitude for given pressure. This function evaluates a polynomial at log10(pressure) values. Parameters pressure : array-like pressure values [hPa]. p_coef : array-like coefficients of the polynomial (default values are for the US Standard Atmosphere). Returns ------- altitude : array-like altitude values [km] (same shape than the pressure input array). See Also -------- prof_pressure : Returns pressure for given altitude. prof_temperature : Returns air temperature for given altitude. Notes ----- Default coefficient values represent a 5th degree polynomial which had been fitted to USSA data from 0-100 km. Accuracy is on the order of 1% for 0-100 km and 0.5% below 30 km. This function, with default values, may thus produce bad results with pressure less than about 3e-4 hPa. Examples -------- array([ 0.1065092 , 1.95627858, 4.2060627 ]) """ |
pressure = np.asarray(pressure)
altitude = np.polyval(p_coef, np.log10(pressure.flatten()))
return altitude.reshape(pressure.shape) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def prof_pressure(altitude, z_coef=(1.94170e-9, -5.14580e-7, 4.57018e-5, -1.55620e-3, -4.61994e-2, 2.99955)):
""" Return pressure for given altitude. This function evaluates a polynomial at altitudes values. Parameters altitude : array-like altitude values [km]. z_coef : array-like coefficients of the polynomial (default values are for the US Standard Atmosphere). Returns ------- pressure : array-like pressure values [hPa] (same shape than the altitude input array). See Also -------- prof_altitude : Returns altitude for given pressure. prof_temperature : Returns air temperature for given altitude. Notes ----- Default coefficient values represent a 5th degree polynomial which had been fitted to USA data from 0-100 km. Accuracy is on the order of 1% for 0-100 km and 0.5% below 30 km. This function, with default values, may thus produce bad results with altitude > 100 km. Examples -------- array([ 998.96437334, 264.658697 , 55.28114631]) """ |
altitude = np.asarray(altitude)
pressure = np.power(10, np.polyval(z_coef, altitude.flatten()))
return pressure.reshape(altitude.shape) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_model_info(model_name):
""" Get the grid specifications for a given model. Parameters model_name : string Name of the model. Supports multiple formats (e.g., 'GEOS5', 'GEOS-5' or 'GEOS_5'). Returns ------- specifications : dict Grid specifications as a dictionary. Raises ------ ValueError If the model is not supported (see `models`) or if the given `model_name` corresponds to several entries in the list of supported models. """ |
# trying to get as much as possible a valid model name from the given
# `model_name`, using regular expressions.
split_name = re.split(r'[\-_\s]', model_name.strip().upper())
sep_chars = ('', ' ', '-', '_')
gen_seps = itertools.combinations_with_replacement(
sep_chars, len(split_name) - 1
)
test_names = ("".join((n for n in itertools.chain(*list(zip(split_name,
s + ('',))))))
for s in gen_seps)
match_names = list([name for name in test_names if name
in _get_supported_models()])
if not len(match_names):
raise ValueError("Model '{0}' is not supported".format(model_name))
elif len(match_names) > 1:
raise ValueError("Multiple matched models for given model name '{0}'"
.format(model_name))
valid_model_name = match_names[0]
parent_models = _find_references(valid_model_name)
model_spec = dict()
for m in parent_models:
model_spec.update(MODELS[m])
model_spec.pop('reference')
model_spec['model_family'] = parent_models[0]
model_spec['model_name'] = valid_model_name
return model_spec |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_archive_filelist(filename):
# type: (str) -> List[str] """Extract the list of files from a tar or zip archive. Args: filename: name of the archive Returns: Sorted list of files in the archive, excluding './' Raises: ValueError: when the file is neither a zip nor a tar archive FileNotFoundError: when the provided file does not exist (for Python 3) IOError: when the provided file does not exist (for Python 2) """ |
names = [] # type: List[str]
if tarfile.is_tarfile(filename):
with tarfile.open(filename) as tar_file:
names = sorted(tar_file.getnames())
elif zipfile.is_zipfile(filename):
with zipfile.ZipFile(filename) as zip_file:
names = sorted(zip_file.namelist())
else:
raise ValueError("Can not get filenames from '{!s}'. "
"Not a tar or zip file".format(filename))
if "./" in names:
names.remove("./")
return names |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _augment_book(self, uuid, event):
""" Checks if the newly created object is a book and only has an ISBN. If so, tries to fetch the book data off the internet. :param uuid: uuid of book to augment :param client: requesting client """ |
try:
if not isbnmeta:
self.log(
"No isbntools found! Install it to get full "
"functionality!",
lvl=warn)
return
new_book = objectmodels['book'].find_one({'uuid': uuid})
try:
if len(new_book.isbn) != 0:
self.log('Got a lookup candidate: ', new_book._fields)
try:
meta = isbnmeta(
new_book.isbn,
service=self.config.isbnservice
)
mapping = libraryfieldmapping[
self.config.isbnservice
]
new_meta = {}
for key in meta.keys():
if key in mapping:
if isinstance(mapping[key], tuple):
name, conv = mapping[key]
try:
new_meta[name] = conv(meta[key])
except ValueError:
self.log(
'Bad value from lookup:',
name, conv, key
)
else:
new_meta[mapping[key]] = meta[key]
new_book.update(new_meta)
new_book.save()
self._notify_result(event, new_book)
self.log("Book successfully augmented from ",
self.config.isbnservice)
except Exception as e:
self.log("Error during meta lookup: ", e, type(e),
new_book.isbn, lvl=error, exc=True)
error_response = {
'component': 'hfos.alert.manager',
'action': 'notify',
'data': {
'type': 'error',
'message': 'Could not look up metadata, sorry:' + str(e)
}
}
self.log(event, event.client, pretty=True)
self.fireEvent(send(event.client.uuid, error_response))
except Exception as e:
self.log("Error during book update.", e, type(e),
exc=True, lvl=error)
except Exception as e:
self.log("Book creation notification error: ", uuid, e, type(e),
lvl=error, exc=True) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def opened(self, *args):
"""Initiates communication with the remote controlled device. :param args: """ |
self._serial_open = True
self.log("Opened: ", args, lvl=debug)
self._send_command(b'l,1') # Saying hello, shortly
self.log("Turning off engine, pump and neutralizing rudder")
self._send_command(b'v')
self._handle_servo(self._machine_channel, 0)
self._handle_servo(self._rudder_channel, 127)
self._set_digital_pin(self._pump_channel, 0)
# self._send_command(b'h')
self._send_command(b'l,0')
self._send_command(b'm,HFOS Control') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def on_machinerequest(self, event):
""" Sets a new machine speed. :param event: """ |
self.log("Updating new machine power: ", event.controlvalue)
self._handle_servo(self._machine_channel, event.controlvalue) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def on_rudderrequest(self, event):
""" Sets a new rudder angle. :param event: """ |
self.log("Updating new rudder angle: ", event.controlvalue)
self._handle_servo(self._rudder_channel, event.controlvalue) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def on_pumprequest(self, event):
""" Activates or deactivates a connected pump. :param event: """ |
self.log("Updating pump status: ", event.controlvalue)
self._set_digital_pin(self._pump_channel, event.controlvalue) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def provisionList(items, database_name, overwrite=False, clear=False, skip_user_check=False):
"""Provisions a list of items according to their schema :param items: A list of provisionable items. :param database_object: A warmongo database object :param overwrite: Causes existing items to be overwritten :param clear: Clears the collection first (Danger!) :param skip_user_check: Skips checking if a system user is existing already (for user provisioning) :return: """ |
log('Provisioning', items, database_name, lvl=debug)
system_user = None
def get_system_user():
"""Retrieves the node local system user"""
user = objectmodels['user'].find_one({'name': 'System'})
try:
log('System user uuid: ', user.uuid, lvl=verbose)
return user.uuid
except AttributeError as e:
log('No system user found:', e, lvl=warn)
log('Please install the user provision to setup a system user or check your database configuration',
lvl=error)
return False
# TODO: Do not check this on specific objects but on the model (i.e. once)
def needs_owner(obj):
"""Determines whether a basic object has an ownership field"""
for privilege in obj._fields.get('perms', None):
if 'owner' in obj._fields['perms'][privilege]:
return True
return False
import pymongo
from hfos.database import objectmodels, dbhost, dbport, dbname
database_object = objectmodels[database_name]
log(dbhost, dbname)
# TODO: Fix this to make use of the dbhost
client = pymongo.MongoClient(dbhost, dbport)
db = client[dbname]
if not skip_user_check:
system_user = get_system_user()
if not system_user:
return
else:
# TODO: Evaluate what to do instead of using a hardcoded UUID
# This is usually only here for provisioning the system user
# One way to avoid this, is to create (instead of provision)
# this one upon system installation.
system_user = '0ba87daa-d315-462e-9f2e-6091d768fd36'
col_name = database_object.collection_name()
if clear is True:
log("Clearing collection for", col_name, lvl=warn)
db.drop_collection(col_name)
counter = 0
for no, item in enumerate(items):
new_object = None
item_uuid = item['uuid']
log("Validating object (%i/%i):" % (no + 1, len(items)), item_uuid, lvl=debug)
if database_object.count({'uuid': item_uuid}) > 0:
log('Object already present', lvl=warn)
if overwrite is False:
log("Not updating item", item, lvl=warn)
else:
log("Overwriting item: ", item_uuid, lvl=warn)
new_object = database_object.find_one({'uuid': item_uuid})
new_object._fields.update(item)
else:
new_object = database_object(item)
if new_object is not None:
try:
if needs_owner(new_object):
if not hasattr(new_object, 'owner'):
log('Adding system owner to object.', lvl=verbose)
new_object.owner = system_user
except Exception as e:
log('Error during ownership test:', e, type(e),
exc=True, lvl=error)
try:
new_object.validate()
new_object.save()
counter += 1
except ValidationError as e:
raise ValidationError(
"Could not provision object: " + str(item_uuid), e)
log("Provisioned %i out of %i items successfully." % (counter, len(items))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def DefaultExtension(schema_obj, form_obj, schemata=None):
"""Create a default field""" |
if schemata is None:
schemata = ['systemconfig', 'profile', 'client']
DefaultExtends = {
'schema': {
"properties/modules": [
schema_obj
]
},
'form': {
'modules': {
'items/': form_obj
}
}
}
output = {}
for schema in schemata:
output[schema] = DefaultExtends
return output |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def copytree(root_src_dir, root_dst_dir, hardlink=True):
"""Copies a whole directory tree""" |
for src_dir, dirs, files in os.walk(root_src_dir):
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
for file_ in files:
src_file = os.path.join(src_dir, file_)
dst_file = os.path.join(dst_dir, file_)
try:
if os.path.exists(dst_file):
if hardlink:
hfoslog('Removing frontend link:', dst_file,
emitter='BUILDER', lvl=verbose)
os.remove(dst_file)
else:
hfoslog('Overwriting frontend file:', dst_file,
emitter='BUILDER', lvl=verbose)
hfoslog('Hardlinking ', src_file, dst_dir, emitter='BUILDER',
lvl=verbose)
if hardlink:
os.link(src_file, dst_file)
else:
copy(src_file, dst_dir)
except PermissionError as e:
hfoslog(
" No permission to remove/create target %s for "
"frontend:" % ('link' if hardlink else 'copy'),
dst_dir, e, emitter='BUILDER', lvl=error)
except Exception as e:
hfoslog("Error during", 'link' if hardlink else 'copy',
"creation:", type(e), e, emitter='BUILDER',
lvl=error)
hfoslog('Done linking', root_dst_dir, emitter='BUILDER',
lvl=verbose) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delete(ctx, componentname):
"""Delete an existing component configuration. This will trigger the creation of its default configuration upon next restart.""" |
col = ctx.obj['col']
if col.count({'name': componentname}) > 1:
log('More than one component configuration of this name! Try '
'one of the uuids as argument. Get a list with "config '
'list"')
return
log('Deleting component configuration', componentname,
emitter='MANAGE')
configuration = col.find_one({'name': componentname})
if configuration is None:
configuration = col.find_one({'uuid': componentname})
if configuration is None:
log('Component configuration not found:', componentname,
emitter='MANAGE')
return
configuration.delete()
log('Done') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def show(ctx, component):
"""Show the stored, active configuration of a component.""" |
col = ctx.obj['col']
if col.count({'name': component}) > 1:
log('More than one component configuration of this name! Try '
'one of the uuids as argument. Get a list with "config '
'list"')
return
if component is None:
configurations = col.find()
for configuration in configurations:
log("%-15s : %s" % (configuration.name,
configuration.uuid),
emitter='MANAGE')
else:
configuration = col.find_one({'name': component})
if configuration is None:
configuration = col.find_one({'uuid': component})
if configuration is None:
log('No component with that name or uuid found.')
return
print(json.dumps(configuration.serializablefields(), indent=4)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def debugrequest(self, event):
"""Handler for client-side debug requests""" |
try:
self.log("Event: ", event.__dict__, lvl=critical)
if event.data == "storejson":
self.log("Storing received object to /tmp", lvl=critical)
fp = open('/tmp/hfosdebugger_' + str(
event.user.useruuid) + "_" + str(uuid4()), "w")
json.dump(event.data, fp, indent=True)
fp.close()
if event.data == "memdebug":
self.log("Memory hogs:", lvl=critical)
objgraph.show_most_common_types(limit=20)
if event.data == "growth":
self.log("Memory growth since last call:", lvl=critical)
objgraph.show_growth()
if event.data == "graph":
self._drawgraph()
if event.data == "exception":
class TestException(BaseException):
"""Generic exception to test exception monitoring"""
pass
raise TestException
if event.data == "heap":
self.log("Heap log:", self.heapy.heap(), lvl=critical)
if event.data == "buildfrontend":
self.log("Sending frontend build command")
self.fireEvent(frontendbuildrequest(force=True), "setup")
if event.data == "logtail":
self.fireEvent(logtailrequest(event.user, None, None,
event.client), "logger")
if event.data == "trigger_anchorwatch":
from hfos.anchor.anchorwatcher import cli_trigger_anchorwatch
self.fireEvent(cli_trigger_anchorwatch())
except Exception as e:
self.log("Exception during debug handling:", e, type(e),
lvl=critical) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def register_event(self, event):
"""Registers a new command line interface event hook as command""" |
self.log('Registering event hook:', event.cmd, event.thing,
pretty=True, lvl=verbose)
self.hooks[event.cmd] = event.thing |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def populate_user_events():
"""Generate a list of all registered authorized and anonymous events""" |
global AuthorizedEvents
global AnonymousEvents
def inheritors(klass):
"""Find inheritors of a specified object class"""
subclasses = {}
subclasses_set = set()
work = [klass]
while work:
parent = work.pop()
for child in parent.__subclasses__():
if child not in subclasses_set:
# pprint(child.__dict__)
name = child.__module__ + "." + child.__name__
if name.startswith('hfos'):
subclasses_set.add(child)
event = {
'event': child,
'name': name,
'doc': child.__doc__,
'args': []
}
if child.__module__ in subclasses:
subclasses[child.__module__][
child.__name__] = event
else:
subclasses[child.__module__] = {
child.__name__: event
}
work.append(child)
return subclasses
# TODO: Change event system again, to catch authorized (i.e. "user") as
# well as normal events, so they can be processed by Automat
# NormalEvents = inheritors(Event)
AuthorizedEvents = inheritors(authorizedevent)
AnonymousEvents = inheritors(anonymousevent) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def clear(ctx, schema):
"""Clears an entire database collection irrevocably. Use with caution!""" |
response = _ask('Are you sure you want to delete the collection "%s"' % (
schema), default='N', data_type='bool')
if response is True:
host, port = ctx.obj['dbhost'].split(':')
client = pymongo.MongoClient(host=host, port=int(port))
database = client[ctx.obj['dbname']]
log("Clearing collection for", schema, lvl=warn,
emitter='MANAGE')
result = database.drop_collection(schema)
if not result['ok']:
log("Could not drop collection:", lvl=error)
log(result, pretty=True, lvl=error)
else:
log("Done") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def provision_system_config(items, database_name, overwrite=False, clear=False, skip_user_check=False):
"""Provision a basic system configuration""" |
from hfos.provisions.base import provisionList
from hfos.database import objectmodels
default_system_config_count = objectmodels['systemconfig'].count({
'name': 'Default System Configuration'})
if default_system_config_count == 0 or (clear or overwrite):
provisionList([SystemConfiguration], 'systemconfig', overwrite, clear, skip_user_check)
hfoslog('Provisioning: System: Done.', emitter='PROVISIONS')
else:
hfoslog('Default system configuration already present.', lvl=warn,
emitter='PROVISIONS') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def userlogin(self, event):
"""Provides the newly authenticated user with a backlog and general channel status information""" |
try:
user_uuid = event.useruuid
user = objectmodels['user'].find_one({'uuid': user_uuid})
if user_uuid not in self.lastlogs:
self.log('Setting up lastlog for a new user.', lvl=debug)
lastlog = objectmodels['chatlastlog']({
'owner': user_uuid,
'uuid': std_uuid(),
'channels': {}
})
lastlog.save()
self.lastlogs[user_uuid] = lastlog
self.users[user_uuid] = user
self.user_attention[user_uuid] = None
self._send_status(user_uuid, event.clientuuid)
except Exception as e:
self.log('Error during chat setup of user:', e, type(e), exc=True) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def install_docs(instance, clear_target):
"""Builds and installs the complete HFOS documentation.""" |
_check_root()
def make_docs():
"""Trigger a Sphinx make command to build the documentation."""
log("Generating HTML documentation")
try:
build = Popen(
[
'make',
'html'
],
cwd='docs/'
)
build.wait()
except Exception as e:
log("Problem during documentation building: ", e, type(e),
exc=True, lvl=error)
return False
return True
make_docs()
# If these need changes, make sure they are watertight and don't remove
# wanted stuff!
target = os.path.join('/var/lib/hfos', instance, 'frontend/docs')
source = 'docs/build/html'
log("Updating documentation directory:", target)
if not os.path.exists(os.path.join(os.path.curdir, source)):
log(
"Documentation not existing yet. Run python setup.py "
"build_sphinx first.", lvl=error)
return
if os.path.exists(target):
log("Path already exists: " + target)
if clear_target:
log("Cleaning up " + target, lvl=warn)
shutil.rmtree(target)
log("Copying docs to " + target)
copy_tree(source, target)
log("Done: Install Docs") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def install_modules(wip):
"""Install the plugin modules""" |
def install_module(hfos_module):
"""Install a single module via setuptools"""
try:
setup = Popen(
[
sys.executable,
'setup.py',
'develop'
],
cwd='modules/' + hfos_module + "/"
)
setup.wait()
except Exception as e:
log("Problem during module installation: ", hfos_module, e,
type(e), exc=True, lvl=error)
return False
return True
# TODO: Sort module dependencies via topological sort or let pip do this in future.
# # To get the module dependencies:
# packages = {}
# for provision_entrypoint in iter_entry_points(group='hfos.provisions',
# name=None):
# log("Found packages: ", provision_entrypoint.dist.project_name, lvl=warn)
#
# _package_name = provision_entrypoint.dist.project_name
# _package = pkg_resources.working_set.by_key[_package_name]
#
# print([str(r) for r in _package.requires()]) # retrieve deps from setup.py
modules_production = [
# TODO: Poor man's dependency management, as long as the modules are
# installed from local sources and they're not available on pypi,
# which would handle real dependency management for us:
'navdata',
# Now all the rest:
'alert',
'automat',
'busrepeater',
'calendar',
'countables',
'dash',
# 'dev',
'enrol',
'mail',
'maps',
'nmea',
'nodestate',
'project',
'webguides',
'wiki'
]
modules_wip = [
'calc',
'camera',
'chat',
'comms',
'contacts',
'crew',
'equipment',
'filemanager',
'garden',
'heroic',
'ldap',
'library',
'logbook',
'protocols',
'polls',
'mesh',
'robot',
'switchboard',
'shareables',
]
installables = modules_production
if wip:
installables.extend(modules_wip)
success = []
failed = []
for installable in installables:
log('Installing module ', installable)
if install_module(installable):
success.append(installable)
else:
failed.append(installable)
log('Installed modules: ', success)
if len(failed) > 0:
log('Failed modules: ', failed)
log('Done: Install Modules') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def install_cert(selfsigned):
"""Install a local SSL certificate""" |
_check_root()
if selfsigned:
log('Generating self signed (insecure) certificate/key '
'combination')
try:
os.mkdir('/etc/ssl/certs/hfos')
except FileExistsError:
pass
except PermissionError:
log("Need root (e.g. via sudo) to generate ssl certificate")
sys.exit(1)
def create_self_signed_cert():
"""Create a simple self signed SSL certificate"""
# create a key pair
k = crypto.PKey()
k.generate_key(crypto.TYPE_RSA, 1024)
if os.path.exists(cert_file):
try:
certificate = open(cert_file, "rb").read()
old_cert = crypto.load_certificate(crypto.FILETYPE_PEM,
certificate)
serial = old_cert.get_serial_number() + 1
except (crypto.Error, OSError) as e:
log('Could not read old certificate to increment '
'serial:', type(e), e, exc=True, lvl=warn)
serial = 1
else:
serial = 1
# create a self-signed certificate
certificate = crypto.X509()
certificate.get_subject().C = "DE"
certificate.get_subject().ST = "Berlin"
certificate.get_subject().L = "Berlin"
# noinspection PyPep8
certificate.get_subject().O = "Hackerfleet"
certificate.get_subject().OU = "Hackerfleet"
certificate.get_subject().CN = gethostname()
certificate.set_serial_number(serial)
certificate.gmtime_adj_notBefore(0)
certificate.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60)
certificate.set_issuer(certificate.get_subject())
certificate.set_pubkey(k)
certificate.sign(k, b'sha512')
open(key_file, "wt").write(str(
crypto.dump_privatekey(crypto.FILETYPE_PEM, k),
encoding="ASCII"))
open(cert_file, "wt").write(str(
crypto.dump_certificate(crypto.FILETYPE_PEM, certificate),
encoding="ASCII"))
open(combined_file, "wt").write(str(
crypto.dump_certificate(crypto.FILETYPE_PEM, certificate),
encoding="ASCII") + str(
crypto.dump_privatekey(crypto.FILETYPE_PEM, k),
encoding="ASCII"))
create_self_signed_cert()
log('Done: Install Cert')
else:
# TODO
log('Not implemented yet. You can build your own certificate and '
'store it in /etc/ssl/certs/hfos/server-cert.pem - it should '
'be a certificate with key, as this is used server side and '
'there is no way to enter a separate key.', lvl=error) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def frontend(ctx, dev, rebuild, no_install, build_type):
"""Build and install frontend""" |
install_frontend(instance=ctx.obj['instance'],
forcerebuild=rebuild,
development=dev,
install=not no_install,
build_type=build_type) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def install_all(ctx, clear_all):
"""Default-Install everything installable \b This includes * System user (hfos.hfos) * Self signed certificate * Variable data locations (/var/lib/hfos and /var/cache/hfos) * All the official modules in this repository * Default module provisioning data * Documentation * systemd service descriptor It does NOT build and install the HTML5 frontend.""" |
_check_root()
instance = ctx.obj['instance']
dbhost = ctx.obj['dbhost']
dbname = ctx.obj['dbname']
port = ctx.obj['port']
install_system_user()
install_cert(selfsigned=True)
install_var(instance, clear_target=clear_all, clear_all=clear_all)
install_modules(wip=False)
install_provisions(provision=None, clear_provisions=clear_all)
install_docs(instance, clear_target=clear_all)
install_service(instance, dbhost, dbname, port)
install_nginx(instance, dbhost, dbname, port)
log('Done') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def uninstall():
"""Uninstall data and resource locations""" |
_check_root()
response = _ask("This will delete all data of your HFOS installations! Type"
"YES to continue:", default="N", show_hint=False)
if response == 'YES':
shutil.rmtree('/var/lib/hfos')
shutil.rmtree('/var/cache/hfos') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update(ctx, no_restart, no_rebuild):
"""Update a HFOS node""" |
# 0. (NOT YET! MAKE A BACKUP OF EVERYTHING)
# 1. update repository
# 2. update frontend repository
# 3. (Not yet: update venv)
# 4. rebuild frontend
# 5. restart service
instance = ctx.obj['instance']
log('Pulling github updates')
run_process('.', ['git', 'pull', 'origin', 'master'])
run_process('./frontend', ['git', 'pull', 'origin', 'master'])
if not no_rebuild:
log('Rebuilding frontend')
install_frontend(instance, forcerebuild=True, install=False, development=True)
if not no_restart:
log('Restaring service')
if instance != 'hfos':
instance = 'hfos-' + instance
run_process('.', ['sudo', 'systemctl', 'restart', instance])
log('Done') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _build_model_factories(store):
"""Generate factories to construct objects from schemata""" |
result = {}
for schemaname in store:
schema = None
try:
schema = store[schemaname]['schema']
except KeyError:
schemata_log("No schema found for ", schemaname, lvl=critical, exc=True)
try:
result[schemaname] = warmongo.model_factory(schema)
except Exception as e:
schemata_log("Could not create factory for schema ", schemaname, schema, lvl=critical, exc=True)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _build_collections(store):
"""Generate database collections with indices from the schemastore""" |
result = {}
client = pymongo.MongoClient(host=dbhost, port=dbport)
db = client[dbname]
for schemaname in store:
schema = None
indices = None
try:
schema = store[schemaname]['schema']
indices = store[schemaname].get('indices', None)
except KeyError:
db_log("No schema found for ", schemaname, lvl=critical)
try:
result[schemaname] = db[schemaname]
except Exception:
db_log("Could not get collection for schema ", schemaname, schema, lvl=critical, exc=True)
if indices is not None:
col = db[schemaname]
db_log('Adding indices to', schemaname, lvl=debug)
i = 0
keys = list(indices.keys())
while i < len(indices):
index_name = keys[i]
index = indices[index_name]
index_type = index.get('type', None)
index_unique = index.get('unique', False)
index_sparse = index.get('sparse', True)
index_reindex = index.get('reindex', False)
if index_type in (None, 'text'):
index_type = pymongo.TEXT
elif index_type == '2dsphere':
index_type = pymongo.GEOSPHERE
def do_index():
col.ensure_index([(index_name, index_type)],
unique=index_unique,
sparse=index_sparse)
db_log('Enabling index of type', index_type, 'on', index_name, lvl=debug)
try:
do_index()
i += 1
except pymongo.errors.OperationFailure:
db_log(col.list_indexes().__dict__, pretty=True, lvl=verbose)
if not index_reindex:
db_log('Index was not created!', lvl=warn)
i += 1
else:
try:
col.drop_index(index_name)
do_index()
i += 1
except pymongo.errors.OperationFailure as e:
db_log('Index recreation problem:', exc=True, lvl=error)
col.drop_indexes()
i = 0
# for index in col.list_indexes():
# db_log("Index: ", index)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def initialize(address='127.0.0.1:27017', database_name='hfos', instance_name="default", reload=False):
"""Initializes the database connectivity, schemata and finally object models""" |
global schemastore
global l10n_schemastore
global objectmodels
global collections
global dbhost
global dbport
global dbname
global instance
global initialized
if initialized and not reload:
hfoslog('Already initialized and not reloading.', lvl=warn, emitter="DB", frame_ref=2)
return
dbhost = address.split(':')[0]
dbport = int(address.split(":")[1]) if ":" in address else 27017
dbname = database_name
db_log("Using database:", dbname, '@', dbhost, ':', dbport)
try:
client = pymongo.MongoClient(host=dbhost, port=dbport)
db = client[dbname]
db_log("Database: ", db.command('buildinfo'), lvl=debug)
except Exception as e:
db_log("No database available! Check if you have mongodb > 3.0 "
"installed and running as well as listening on port 27017 "
"of localhost. (Error: %s) -> EXIT" % e, lvl=critical)
sys.exit(5)
warmongo.connect(database_name)
schemastore = _build_schemastore_new()
l10n_schemastore = _build_l10n_schemastore(schemastore)
objectmodels = _build_model_factories(schemastore)
collections = _build_collections(schemastore)
instance = instance_name
initialized = True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def profile(schemaname='sensordata', profiletype='pjs'):
"""Profiles object model handling with a very simple benchmarking test""" |
db_log("Profiling ", schemaname)
schema = schemastore[schemaname]['schema']
db_log("Schema: ", schema, lvl=debug)
testclass = None
if profiletype == 'warmongo':
db_log("Running Warmongo benchmark")
testclass = warmongo.model_factory(schema)
elif profiletype == 'pjs':
db_log("Running PJS benchmark")
try:
import python_jsonschema_objects as pjs
except ImportError:
db_log("PJS benchmark selected but not available. Install "
"python_jsonschema_objects (PJS)")
return
db_log()
builder = pjs.ObjectBuilder(schema)
ns = builder.build_classes()
pprint(ns)
testclass = ns[schemaname]
db_log("ns: ", ns, lvl=warn)
if testclass is not None:
db_log("Instantiating elements...")
for i in range(100):
testclass()
else:
db_log("No Profiletype available!")
db_log("Profiling done") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _check_collections(self):
"""Checks node local collection storage sizes""" |
self.collection_sizes = {}
self.collection_total = 0
for col in self.db.collection_names(include_system_collections=False):
self.collection_sizes[col] = self.db.command('collstats', col).get(
'storageSize', 0)
self.collection_total += self.collection_sizes[col]
sorted_x = sorted(self.collection_sizes.items(),
key=operator.itemgetter(1))
for item in sorted_x:
self.log("Collection size (%s): %.2f MB" % (
item[0], item[1] / 1024.0 / 1024),
lvl=verbose)
self.log("Total collection sizes: %.2f MB" % (self.collection_total /
1024.0 / 1024)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _check_free_space(self):
"""Checks used filesystem storage sizes""" |
def get_folder_size(path):
"""Aggregates used size of a specified path, recursively"""
total_size = 0
for item in walk(path):
for file in item[2]:
try:
total_size = total_size + getsize(join(item[0], file))
except (OSError, PermissionError) as e:
self.log("error with file: " + join(item[0], file), e)
return total_size
for name, checkpoint in self.config.locations.items():
try:
stats = statvfs(checkpoint['location'])
except (OSError, PermissionError) as e:
self.log('Location unavailable:', name, e, type(e),
lvl=error, exc=True)
continue
free_space = stats.f_frsize * stats.f_bavail
used_space = get_folder_size(
checkpoint['location']
) / 1024.0 / 1024
self.log('Location %s uses %.2f MB' % (name, used_space))
if free_space < checkpoint['minimum']:
self.log('Short of free space on %s: %.2f MB left' % (
name, free_space / 1024.0 / 1024 / 1024),
lvl=warn) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def send_mail_worker(config, mail, event):
"""Worker task to send out an email, which blocks the process unless it is threaded""" |
log = ""
try:
if config.mail_ssl:
server = SMTP_SSL(config.mail_server, port=config.mail_server_port, timeout=30)
else:
server = SMTP(config.mail_server, port=config.mail_server_port, timeout=30)
if config.mail_tls:
log += 'Starting TLS\n'
server.starttls()
if config.mail_username != '':
log += 'Logging in with ' + str(config.mail_username) + "\n"
server.login(config.mail_username, config.mail_password)
else:
log += 'No username, trying anonymous access\n'
log += 'Sending Mail\n'
response_send = server.send_message(mail)
server.quit()
except timeout as e:
log += 'Could not send email to enrollee, mailserver timeout: ' + str(e) + "\n"
return False, log, event
log += 'Server response:' + str(response_send)
return True, log, event |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def reload_configuration(self, event):
"""Reload the current configuration and set up everything depending on it""" |
super(EnrolManager, self).reload_configuration(event)
self.log('Reloaded configuration.')
self._setup() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def change(self, event):
"""An admin user requests a change to an enrolment""" |
uuid = event.data['uuid']
status = event.data['status']
if status not in ['Open', 'Pending', 'Accepted', 'Denied', 'Resend']:
self.log('Erroneous status for enrollment requested!', lvl=warn)
return
self.log('Changing status of an enrollment', uuid, 'to', status)
enrollment = objectmodels['enrollment'].find_one({'uuid': uuid})
if enrollment is not None:
self.log('Enrollment found', lvl=debug)
else:
return
if status == 'Resend':
enrollment.timestamp = std_now()
enrollment.save()
self._send_invitation(enrollment, event)
reply = {True: 'Resent'}
else:
enrollment.status = status
enrollment.save()
reply = {True: enrollment.serializablefields()}
if status == 'Accepted' and enrollment.method == 'Enrolled':
self._create_user(enrollment.name, enrollment.password, enrollment.email, 'Invited', event.client.uuid)
self._send_acceptance(enrollment, None, event)
packet = {
'component': 'hfos.enrol.enrolmanager',
'action': 'change',
'data': reply
}
self.log('packet:', packet, lvl=verbose)
self.fireEvent(send(event.client.uuid, packet))
self.log('Enrollment changed', lvl=debug) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def changepassword(self, event):
"""An enrolled user wants to change their password""" |
old = event.data['old']
new = event.data['new']
uuid = event.user.uuid
# TODO: Write email to notify user of password change
user = objectmodels['user'].find_one({'uuid': uuid})
if std_hash(old, self.salt) == user.passhash:
user.passhash = std_hash(new, self.salt)
user.save()
packet = {
'component': 'hfos.enrol.enrolmanager',
'action': 'changepassword',
'data': True
}
self.fireEvent(send(event.client.uuid, packet))
self.log('Successfully changed password for user', uuid)
else:
packet = {
'component': 'hfos.enrol.enrolmanager',
'action': 'changepassword',
'data': False
}
self.fireEvent(send(event.client.uuid, packet))
self.log('User tried to change password without supplying old one', lvl=warn) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def invite(self, event):
"""A new user has been invited to enrol by an admin user""" |
self.log('Inviting new user to enrol')
name = event.data['name']
email = event.data['email']
method = event.data['method']
self._invite(name, method, email, event.client.uuid, event) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def enrol(self, event):
"""A user tries to self-enrol with the enrolment form""" |
if self.config.allow_registration is False:
self.log('Someone tried to register although enrolment is closed.')
return
self.log('Client trying to register a new account:', event, pretty=True)
# self.log(event.data, pretty=True)
uuid = event.client.uuid
if uuid in self.captchas and event.data.get('captcha', None) == self.captchas[uuid]['text']:
self.log('Captcha solved!')
else:
self.log('Captcha failed!')
self._fail(event, _('You did not solve the captcha correctly.', event))
self._generate_captcha(event)
return
mail = event.data.get('mail', None)
if mail is None:
self._fail(event, _('You have to supply all required fields.', event))
return
elif not validate_email(mail):
self._fail(event, _('The supplied email address seems invalid', event))
return
if objectmodels['user'].count({'mail': mail}) > 0:
self._fail(event, _('Your mail address cannot be used.', event))
return
password = event.data.get('password', None)
if password is None or len(password) < 5:
self._fail(event, _('Your password is not long enough.', event))
return
username = event.data.get('username', None)
if username is None or len(username) < 1:
self._fail(event, _('Your username is not long enough.', event))
return
elif (objectmodels['user'].count({'name': username}) > 0) or \
(objectmodels['enrollment'].count({'name': username}) > 0):
self._fail(event, _('The username you supplied is not available.', event))
return
self.log('Provided data is good to enrol.')
if self.config.no_verify:
self._create_user(username, password, mail, 'Enrolled', uuid)
else:
self._invite(username, 'Enrolled', mail, uuid, event, password) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def status(self, event):
"""An anonymous client wants to know if we're open for enrollment""" |
self.log('Registration status requested')
response = {
'component': 'hfos.enrol.enrolmanager',
'action': 'status',
'data': self.config.allow_registration
}
self.fire(send(event.client.uuid, response)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def request_reset(self, event):
"""An anonymous client requests a password reset""" |
self.log('Password reset request received:', event.__dict__, lvl=hilight)
user_object = objectmodels['user']
email = event.data.get('email', None)
email_user = None
if email is not None and user_object.count({'mail': email}) > 0:
email_user = user_object.find_one({'mail': email})
if email_user is None:
self._fail(event, msg="Mail address unknown")
return |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def captcha_transmit(self, captcha, uuid):
"""Delayed transmission of a requested captcha""" |
self.log('Transmitting captcha')
response = {
'component': 'hfos.enrol.enrolmanager',
'action': 'captcha',
'data': b64encode(captcha['image'].getvalue()).decode('utf-8')
}
self.fire(send(uuid, response)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _invite(self, name, method, email, uuid, event, password=""):
"""Actually invite a given user""" |
props = {
'uuid': std_uuid(),
'status': 'Open',
'name': name,
'method': method,
'email': email,
'password': password,
'timestamp': std_now()
}
enrollment = objectmodels['enrollment'](props)
enrollment.save()
self.log('Enrollment stored', lvl=debug)
self._send_invitation(enrollment, event)
packet = {
'component': 'hfos.enrol.enrolmanager',
'action': 'invite',
'data': [True, email]
}
self.fireEvent(send(uuid, packet)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _create_user(self, username, password, mail, method, uuid):
"""Create a new user and all initial data""" |
try:
if method == 'Invited':
config_role = self.config.group_accept_invited
else:
config_role = self.config.group_accept_enrolled
roles = []
if ',' in config_role:
for item in config_role.split(','):
roles.append(item.lstrip().rstrip())
else:
roles = [config_role]
newuser = objectmodels['user']({
'name': username,
'passhash': std_hash(password, self.salt),
'mail': mail,
'uuid': std_uuid(),
'roles': roles,
'created': std_now()
})
if method == 'Invited':
newuser.needs_password_change = True
newuser.save()
except Exception as e:
self.log("Problem creating new user: ", type(e), e,
lvl=error)
return
try:
newprofile = objectmodels['profile']({
'uuid': std_uuid(),
'owner': newuser.uuid
})
self.log("New profile uuid: ", newprofile.uuid,
lvl=verbose)
newprofile.save()
packet = {
'component': 'hfos.enrol.enrolmanager',
'action': 'enrol',
'data': [True, mail]
}
self.fireEvent(send(uuid, packet))
# TODO: Notify crew-admins
except Exception as e:
self.log("Problem creating new profile: ", type(e),
e, lvl=error) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _send_invitation(self, enrollment, event):
"""Send an invitation mail to an open enrolment""" |
self.log('Sending enrollment status mail to user')
self._send_mail(self.config.invitation_subject, self.config.invitation_mail, enrollment, event) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _send_acceptance(self, enrollment, password, event):
"""Send an acceptance mail to an open enrolment""" |
self.log('Sending acceptance status mail to user')
if password is not "":
password_hint = '\n\nPS: Your new password is ' + password + ' - please change it after your first login!'
acceptance_text = self.config.acceptance_mail + password_hint
else:
acceptance_text = self.config.acceptance_mail
self._send_mail(self.config.acceptance_subject, acceptance_text, enrollment, event) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_auth_hook(self, event):
"""Register event hook on reception of add_auth_hook-event""" |
self.log('Adding authentication hook for', event.authenticator_name)
self.auth_hooks[event.authenticator_name] = event.event |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _fail(self, event, message='Invalid credentials'):
"""Sends a failure message to the requesting client""" |
notification = {
'component': 'auth',
'action': 'fail',
'data': message
}
ip = event.sock.getpeername()[0]
self.failing_clients[ip] = event
Timer(3, Event.create('notify_fail', event.clientuuid, notification, ip)).register(self) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _login(self, event, user_account, user_profile, client_config):
"""Send login notification to client""" |
user_account.lastlogin = std_now()
user_account.save()
user_account.passhash = ""
self.fireEvent(
authentication(user_account.name, (
user_account, user_profile, client_config),
event.clientuuid,
user_account.uuid,
event.sock),
"auth") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _handle_autologin(self, event):
"""Automatic logins for client configurations that allow it""" |
self.log("Verifying automatic login request")
# TODO: Check for a common secret
# noinspection PyBroadException
try:
client_config = objectmodels['client'].find_one({
'uuid': event.requestedclientuuid
})
except Exception:
client_config = None
if client_config is None or client_config.autologin is False:
self.log("Autologin failed:", event.requestedclientuuid,
lvl=error)
self._fail(event)
return
try:
user_account = objectmodels['user'].find_one({
'uuid': client_config.owner
})
if user_account is None:
raise AuthenticationError
self.log("Autologin for", user_account.name, lvl=debug)
except Exception as e:
self.log("No user object due to error: ", e, type(e),
lvl=error)
self._fail(event)
return
if user_account.active is False:
self.log("Account deactivated.")
self._fail(event, 'Account deactivated.')
return
user_profile = self._get_profile(user_account)
self._login(event, user_account, user_profile, client_config)
self.log("Autologin successful!", lvl=warn) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.