sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def validate_str(s):
"""Validate a string
Parameters
----------
s: str
Returns
-------
str
Raises
------
ValueError"""
if not isinstance(s, six.string_types):
raise ValueError("Did not found string!")
return six.text_type(s) | Validate a string
Parameters
----------
s: str
Returns
-------
str
Raises
------
ValueError | entailment |
def validate_stringlist(s):
"""Validate a list of strings
Parameters
----------
val: iterable of strings
Returns
-------
list
list of str
Raises
------
ValueError"""
if isinstance(s, six.string_types):
return [six.text_type(v.strip()) for v in s.split(',') if v.strip()]
else:
try:
return list(map(validate_str, s))
except TypeError as e:
raise ValueError(e.message) | Validate a list of strings
Parameters
----------
val: iterable of strings
Returns
-------
list
list of str
Raises
------
ValueError | entailment |
def add_base_str(self, base_str, pattern='.+', pattern_base=None,
append=True):
"""
Add further base string to this instance
Parameters
----------
base_str: str or list of str
Strings that are used as to look for keys to get and set keys in
the :attr:`base` dictionary. If a string does not contain
``'%(key)s'``, it will be appended at the end. ``'%(key)s'`` will
be replaced by the specific key for getting and setting an item.
pattern: str
Default: ``'.+'``. This is the pattern that is inserted for
``%(key)s`` in a base string to look for matches (using the
:mod:`re` module) in the `base` dictionary. The default `pattern`
matches everything without white spaces.
pattern_base: str or list or str
If None, the whatever is given in the `base_str` is used.
Those strings will be used for generating the final search
patterns. You can specify this parameter by yourself to avoid the
misinterpretation of patterns. For example for a `base_str` like
``'my.str'`` it is recommended to additionally provide the
`pattern_base` keyword with ``'my\.str'``.
Like for `base_str`, the ``%(key)s`` is appended if not already in
the string.
append: bool
If True, the given `base_str` are appended (i.e. it is first
looked for them in the :attr:`base` dictionary), otherwise they are
put at the beginning"""
base_str = safe_list(base_str)
pattern_base = safe_list(pattern_base or [])
for i, s in enumerate(base_str):
if '%(key)s' not in s:
base_str[i] += '%(key)s'
if pattern_base:
for i, s in enumerate(pattern_base):
if '%(key)s' not in s:
pattern_base[i] += '%(key)s'
else:
pattern_base = base_str
self.base_str = base_str + self.base_str
self.patterns = list(map(lambda s: re.compile(s.replace(
'%(key)s', '(?P<key>%s)' % pattern)), pattern_base)) + \
self.patterns | Add further base string to this instance
Parameters
----------
base_str: str or list of str
Strings that are used as to look for keys to get and set keys in
the :attr:`base` dictionary. If a string does not contain
``'%(key)s'``, it will be appended at the end. ``'%(key)s'`` will
be replaced by the specific key for getting and setting an item.
pattern: str
Default: ``'.+'``. This is the pattern that is inserted for
``%(key)s`` in a base string to look for matches (using the
:mod:`re` module) in the `base` dictionary. The default `pattern`
matches everything without white spaces.
pattern_base: str or list or str
If None, the whatever is given in the `base_str` is used.
Those strings will be used for generating the final search
patterns. You can specify this parameter by yourself to avoid the
misinterpretation of patterns. For example for a `base_str` like
``'my.str'`` it is recommended to additionally provide the
`pattern_base` keyword with ``'my\.str'``.
Like for `base_str`, the ``%(key)s`` is appended if not already in
the string.
append: bool
If True, the given `base_str` are appended (i.e. it is first
looked for them in the :attr:`base` dictionary), otherwise they are
put at the beginning | entailment |
def iterkeys(self):
"""Unsorted iterator over keys"""
patterns = self.patterns
replace = self.replace
seen = set()
for key in six.iterkeys(self.base):
for pattern in patterns:
m = pattern.match(key)
if m:
ret = m.group('key') if replace else m.group()
if ret not in seen:
seen.add(ret)
yield ret
break
for key in DictMethods.iterkeys(self):
if key not in seen:
yield key | Unsorted iterator over keys | entailment |
def validate(self):
"""Dictionary with validation methods as values"""
depr = self._all_deprecated
return dict((key, val[1]) for key, val in
six.iteritems(self.defaultParams)
if key not in depr) | Dictionary with validation methods as values | entailment |
def descriptions(self):
"""The description of each keyword in the rcParams dictionary"""
return {key: val[2] for key, val in six.iteritems(self.defaultParams)
if len(val) >= 3} | The description of each keyword in the rcParams dictionary | entailment |
def connect(self, key, func):
"""Connect a function to the given formatoption
Parameters
----------
key: str
The rcParams key
func: function
The function that shall be called when the rcParams key changes.
It must accept a single value that is the new value of the
key."""
key = self._get_depreceated(key)[0]
if key is not None:
self._connections[key].append(func) | Connect a function to the given formatoption
Parameters
----------
key: str
The rcParams key
func: function
The function that shall be called when the rcParams key changes.
It must accept a single value that is the new value of the
key. | entailment |
def disconnect(self, key=None, func=None):
"""Disconnect the connections to the an rcParams key
Parameters
----------
key: str
The rcParams key. If None, all keys are used
func: function
The function that is connected. If None, all functions are
connected
"""
if key is None:
for key, connections in self._connections.items():
for conn in connections[:]:
if func is None or conn is func:
connections.remove(conn)
else:
connections = self._connections[key]
for conn in connections[:]:
if func is None or conn is func:
connections.remove(conn) | Disconnect the connections to the an rcParams key
Parameters
----------
key: str
The rcParams key. If None, all keys are used
func: function
The function that is connected. If None, all functions are
connected | entailment |
def update_from_defaultParams(self, defaultParams=None,
plotters=True):
"""Update from the a dictionary like the :attr:`defaultParams`
Parameters
----------
defaultParams: dict
The :attr:`defaultParams` like dictionary. If None, the
:attr:`defaultParams` attribute will be updated
plotters: bool
If True, ``'project.plotters'`` will be updated too"""
if defaultParams is None:
defaultParams = self.defaultParams
self.update({key: val[0] for key, val in defaultParams.items()
if plotters or key != 'project.plotters'}) | Update from the a dictionary like the :attr:`defaultParams`
Parameters
----------
defaultParams: dict
The :attr:`defaultParams` like dictionary. If None, the
:attr:`defaultParams` attribute will be updated
plotters: bool
If True, ``'project.plotters'`` will be updated too | entailment |
def keys(self):
"""
Return sorted list of keys.
"""
k = list(dict.keys(self))
k.sort()
return k | Return sorted list of keys. | entailment |
def find_all(self, pattern):
"""
Return the subset of this RcParams dictionary whose keys match,
using :func:`re.search`, the given ``pattern``.
Parameters
----------
pattern: str
pattern as suitable for re.compile
Returns
-------
RcParams
RcParams instance with entries that match the given `pattern`
Notes
-----
Changes to the returned dictionary are (different from
:meth:`find_and_replace` are *not* propagated to the parent RcParams
dictionary.
See Also
--------
find_and_replace"""
pattern_re = re.compile(pattern)
ret = RcParams()
ret.defaultParams = self.defaultParams
ret.update((key, value) for key, value in self.items()
if pattern_re.search(key))
return ret | Return the subset of this RcParams dictionary whose keys match,
using :func:`re.search`, the given ``pattern``.
Parameters
----------
pattern: str
pattern as suitable for re.compile
Returns
-------
RcParams
RcParams instance with entries that match the given `pattern`
Notes
-----
Changes to the returned dictionary are (different from
:meth:`find_and_replace` are *not* propagated to the parent RcParams
dictionary.
See Also
--------
find_and_replace | entailment |
def load_from_file(self, fname=None):
"""Update rcParams from user-defined settings
This function updates the instance with what is found in `fname`
Parameters
----------
fname: str
Path to the yaml configuration file. Possible keys of the
dictionary are defined by :data:`config.rcsetup.defaultParams`.
If None, the :func:`config.rcsetup.psyplot_fname` function is used.
See Also
--------
dump_to_file, psyplot_fname"""
fname = fname or psyplot_fname()
if fname and os.path.exists(fname):
with open(fname) as f:
d = yaml.load(f)
self.update(d)
if (d.get('project.plotters.user') and
'project.plotters' in self):
self['project.plotters'].update(d['project.plotters.user']) | Update rcParams from user-defined settings
This function updates the instance with what is found in `fname`
Parameters
----------
fname: str
Path to the yaml configuration file. Possible keys of the
dictionary are defined by :data:`config.rcsetup.defaultParams`.
If None, the :func:`config.rcsetup.psyplot_fname` function is used.
See Also
--------
dump_to_file, psyplot_fname | entailment |
def dump(self, fname=None, overwrite=True, include_keys=None,
exclude_keys=['project.plotters'], include_descriptions=True,
**kwargs):
"""Dump this instance to a yaml file
Parameters
----------
fname: str or None
file name to write to. If None, the string that would be written
to a file is returned
overwrite: bool
If True and `fname` already exists, it will be overwritten
include_keys: None or list of str
Keys in the dictionary to be included. If None, all keys are
included
exclude_keys: list of str
Keys from the :class:`RcParams` instance to be excluded
Other Parameters
----------------
``**kwargs``
Any other parameter for the :func:`yaml.dump` function
Returns
-------
str or None
if fname is ``None``, the string is returned. Otherwise, ``None``
is returned
Raises
------
IOError
If `fname` already exists and `overwrite` is False
See Also
--------
load_from_file"""
if fname is not None and not overwrite and os.path.exists(fname):
raise IOError(
'%s already exists! Set overwrite=True to overwrite it!' % (
fname))
if six.PY2:
kwargs.setdefault('encoding', 'utf-8')
d = {key: val for key, val in six.iteritems(self) if (
include_keys is None or key in include_keys) and
key not in exclude_keys}
kwargs['default_flow_style'] = False
if include_descriptions:
s = yaml.dump(d, **kwargs)
desc = self.descriptions
i = 2
header = self.HEADER.splitlines() + [
'', 'Created with python', ''] + sys.version.splitlines() + [
'', '']
lines = ['# ' + l for l in header] + s.splitlines()
for l in lines[2:]:
key = l.split(':')[0]
if key in desc:
lines.insert(i, '# ' + '\n# '.join(desc[key].splitlines()))
i += 1
i += 1
s = '\n'.join(lines)
if fname is None:
return s
else:
with open(fname, 'w') as f:
f.write(s)
else:
if fname is None:
return yaml.dump(d, **kwargs)
with open(fname, 'w') as f:
yaml.dump(d, f, **kwargs)
return None | Dump this instance to a yaml file
Parameters
----------
fname: str or None
file name to write to. If None, the string that would be written
to a file is returned
overwrite: bool
If True and `fname` already exists, it will be overwritten
include_keys: None or list of str
Keys in the dictionary to be included. If None, all keys are
included
exclude_keys: list of str
Keys from the :class:`RcParams` instance to be excluded
Other Parameters
----------------
``**kwargs``
Any other parameter for the :func:`yaml.dump` function
Returns
-------
str or None
if fname is ``None``, the string is returned. Otherwise, ``None``
is returned
Raises
------
IOError
If `fname` already exists and `overwrite` is False
See Also
--------
load_from_file | entailment |
def _load_plugin_entrypoints(self):
"""Load the modules for the psyplot plugins
Yields
------
pkg_resources.EntryPoint
The entry point for the psyplot plugin module"""
from pkg_resources import iter_entry_points
def load_plugin(ep):
if plugins_env == ['no']:
return False
elif ep.module_name in exclude_plugins:
return False
elif include_plugins and ep.module_name not in include_plugins:
return False
return True
self._plugins = self._plugins or []
plugins_env = os.getenv('PSYPLOT_PLUGINS', '').split('::')
include_plugins = [s[4:] for s in plugins_env if s.startswith('yes:')]
exclude_plugins = [s[3:] for s in plugins_env if s.startswith('no:')]
logger = logging.getLogger(__name__)
for ep in iter_entry_points(group='psyplot', name='plugin'):
if not load_plugin(ep):
logger.debug('Skipping entrypoint %s', ep)
continue
self._plugins.append(str(ep))
logger.debug('Loading entrypoint %s', ep)
yield ep | Load the modules for the psyplot plugins
Yields
------
pkg_resources.EntryPoint
The entry point for the psyplot plugin module | entailment |
def load_plugins(self, raise_error=False):
"""
Load the plotters and defaultParams from the plugins
This method loads the `plotters` attribute and `defaultParams`
attribute from the plugins that use the entry point specified by
`group`. Entry points must be objects (or modules) that have a
`defaultParams` and a `plotters` attribute.
Parameters
----------
raise_error: bool
If True, an error is raised when multiple plugins define the same
plotter or rcParams key. Otherwise only a warning is raised"""
pm_env = os.getenv('PSYPLOT_PLOTMETHODS', '').split('::')
include_pms = [s[4:] for s in pm_env if s.startswith('yes:')]
exclude_pms = [s[3:] for s in pm_env if s.startswith('no:')]
logger = logging.getLogger(__name__)
plotters = self['project.plotters']
def_plots = {'default': list(plotters)}
defaultParams = self.defaultParams
def_keys = {'default': defaultParams}
def register_pm(ep, name):
full_name = '%s:%s' % (ep.module_name, name)
ret = True
if pm_env == ['no']:
ret = False
elif name in exclude_pms or full_name in exclude_pms:
ret = False
elif include_pms and (name not in include_pms and
full_name not in include_pms):
ret = False
if not ret:
logger.debug('Skipping plot method %s', full_name)
return ret
for ep in self._load_plugin_entrypoints():
plugin_mod = ep.load()
rc = plugin_mod.rcParams
# load the plotters
plugin_plotters = {
key: val for key, val in rc.get('project.plotters', {}).items()
if register_pm(ep, key)}
already_defined = set(plotters).intersection(plugin_plotters)
if already_defined:
msg = ("Error while loading psyplot plugin %s! The "
"following plotters have already been "
"defined") % ep
msg += 'and will be overwritten:' if not raise_error else ':'
msg += '\n' + '\n'.join(chain.from_iterable(
(('%s by %s' % (key, plugin)
for plugin, keys in def_plots.items() if key in keys)
for key in already_defined)))
if raise_error:
raise ImportError(msg)
else:
warn(msg)
for d in plugin_plotters.values():
d['plugin'] = ep.module_name
plotters.update(plugin_plotters)
def_plots[ep] = list(plugin_plotters)
# load the defaultParams keys
plugin_defaultParams = rc.defaultParams
already_defined = set(defaultParams).intersection(
plugin_defaultParams) - {'project.plotters'}
if already_defined:
msg = ("Error while loading psyplot plugin %s! The "
"following default keys have already been "
"defined:") % ep
msg += '\n' + '\n'.join(chain.from_iterable(
(('%s by %s' % (key, plugin)
for plugin, keys in def_keys.items() if key in keys)
for key in already_defined)))
if raise_error:
raise ImportError(msg)
else:
warn(msg)
update_keys = set(plugin_defaultParams) - {'project.plotters'}
def_keys[ep] = update_keys
self.defaultParams.update(
{key: plugin_defaultParams[key] for key in update_keys})
# load the rcParams (without validation)
super(RcParams, self).update({key: rc[key] for key in update_keys})
# add the deprecated keys
self._deprecated_ignore_map.update(rc._deprecated_ignore_map)
self._deprecated_map.update(rc._deprecated_map) | Load the plotters and defaultParams from the plugins
This method loads the `plotters` attribute and `defaultParams`
attribute from the plugins that use the entry point specified by
`group`. Entry points must be objects (or modules) that have a
`defaultParams` and a `plotters` attribute.
Parameters
----------
raise_error: bool
If True, an error is raised when multiple plugins define the same
plotter or rcParams key. Otherwise only a warning is raised | entailment |
def deploy(project_name):
"""Assemble the middleware pipeline"""
request_log = requestlog.RequestLog
header_addon = HeaderControl
fault_wrapper = FaultWrapper
application = handler.SdkHandler()
# currently we have 3 middleware
for middleware in (header_addon,
fault_wrapper,
request_log,
):
if middleware:
application = middleware(application)
return application | Assemble the middleware pipeline | entailment |
def guest_start(self, userid):
""""Power on z/VM instance."""
LOG.info("Begin to power on vm %s", userid)
self._smtclient.guest_start(userid)
LOG.info("Complete power on vm %s", userid) | Power on z/VM instance. | entailment |
def guest_reboot(self, userid):
"""Reboot a guest vm."""
LOG.info("Begin to reboot vm %s", userid)
self._smtclient.guest_reboot(userid)
LOG.info("Complete reboot vm %s", userid) | Reboot a guest vm. | entailment |
def guest_reset(self, userid):
"""Reset z/VM instance."""
LOG.info("Begin to reset vm %s", userid)
self._smtclient.guest_reset(userid)
LOG.info("Complete reset vm %s", userid) | Reset z/VM instance. | entailment |
def live_migrate_vm(self, userid, destination, parms, action):
"""Move an eligible, running z/VM(R) virtual machine transparently
from one z/VM system to another within an SSI cluster."""
# Check guest state is 'on'
state = self.get_power_state(userid)
if state != 'on':
LOG.error("Failed to live migrate guest %s, error: "
"guest is inactive, cann't perform live migrate." %
userid)
raise exception.SDKConflictError(modID='guest', rs=1,
userid=userid)
# Do live migrate
if action.lower() == 'move':
LOG.info("Moving the specific vm %s", userid)
self._smtclient.live_migrate_move(userid, destination, parms)
LOG.info("Complete move vm %s", userid)
if action.lower() == 'test':
LOG.info("Testing the eligiblity of specific vm %s", userid)
self._smtclient.live_migrate_test(userid, destination) | Move an eligible, running z/VM(R) virtual machine transparently
from one z/VM system to another within an SSI cluster. | entailment |
def create_vm(self, userid, cpu, memory, disk_list,
user_profile, max_cpu, max_mem, ipl_from,
ipl_param, ipl_loadparam):
"""Create z/VM userid into user directory for a z/VM instance."""
LOG.info("Creating the user directory for vm %s", userid)
info = self._smtclient.create_vm(userid, cpu, memory,
disk_list, user_profile,
max_cpu, max_mem, ipl_from,
ipl_param, ipl_loadparam)
# add userid into smapi namelist
self._smtclient.namelist_add(self._namelist, userid)
return info | Create z/VM userid into user directory for a z/VM instance. | entailment |
def delete_vm(self, userid):
"""Delete z/VM userid for the instance."""
LOG.info("Begin to delete vm %s", userid)
self._smtclient.delete_vm(userid)
# remove userid from smapi namelist
self._smtclient.namelist_remove(self._namelist, userid)
LOG.info("Complete delete vm %s", userid) | Delete z/VM userid for the instance. | entailment |
def execute_cmd(self, userid, cmdStr):
"""Execute commands on the guest vm."""
LOG.debug("executing cmd: %s", cmdStr)
return self._smtclient.execute_cmd(userid, cmdStr) | Execute commands on the guest vm. | entailment |
def set_hostname(self, userid, hostname, os_version):
"""Punch a script that used to set the hostname of the guest.
:param str guest: the user id of the guest
:param str hostname: the hostname of the guest
:param str os_version: version of guest operation system
"""
tmp_path = self._pathutils.get_guest_temp_path(userid)
if not os.path.exists(tmp_path):
os.makedirs(tmp_path)
tmp_file = tmp_path + '/hostname.sh'
lnxdist = self._dist_manager.get_linux_dist(os_version)()
lines = lnxdist.generate_set_hostname_script(hostname)
with open(tmp_file, 'w') as f:
f.writelines(lines)
requestData = "ChangeVM " + userid + " punchfile " + \
tmp_file + " --class x"
LOG.debug("Punch script to guest %s to set hostname" % userid)
try:
self._smtclient._request(requestData)
except exception.SDKSMTRequestFailed as err:
msg = ("Failed to punch set_hostname script to userid '%s'. SMT "
"error: %s" % (userid, err.format_message()))
LOG.error(msg)
raise exception.SDKSMTRequestFailed(err.results, msg)
finally:
self._pathutils.clean_temp_folder(tmp_path) | Punch a script that used to set the hostname of the guest.
:param str guest: the user id of the guest
:param str hostname: the hostname of the guest
:param str os_version: version of guest operation system | entailment |
def cvtToBlocks(rh, diskSize):
"""
Convert a disk storage value to a number of blocks.
Input:
Request Handle
Size of disk in bytes
Output:
Results structure:
overallRC - Overall return code for the function:
0 - Everything went ok
4 - Input validation error
rc - Return code causing the return. Same as overallRC.
rs - Reason code causing the return.
errno - Errno value causing the return. Always zero.
Converted value in blocks
"""
rh.printSysLog("Enter generalUtils.cvtToBlocks")
blocks = 0
results = {'overallRC': 0, 'rc': 0, 'rs': 0, 'errno': 0}
blocks = diskSize.strip().upper()
lastChar = blocks[-1]
if lastChar == 'G' or lastChar == 'M':
# Convert the bytes to blocks
byteSize = blocks[:-1]
if byteSize == '':
# The size of the disk is not valid.
msg = msgs.msg['0200'][1] % (modId, blocks)
rh.printLn("ES", msg)
results = msgs.msg['0200'][0]
else:
try:
if lastChar == 'M':
blocks = (float(byteSize) * 1024 * 1024) / 512
elif lastChar == 'G':
blocks = (float(byteSize) * 1024 * 1024 * 1024) / 512
blocks = str(int(math.ceil(blocks)))
except Exception:
# Failed to convert to a number of blocks.
msg = msgs.msg['0201'][1] % (modId, byteSize)
rh.printLn("ES", msg)
results = msgs.msg['0201'][0]
elif blocks.strip('1234567890'):
# Size is not an integer size of blocks.
msg = msgs.msg['0202'][1] % (modId, blocks)
rh.printLn("ES", msg)
results = msgs.msg['0202'][0]
rh.printSysLog("Exit generalUtils.cvtToBlocks, rc: " +
str(results['overallRC']))
return results, blocks | Convert a disk storage value to a number of blocks.
Input:
Request Handle
Size of disk in bytes
Output:
Results structure:
overallRC - Overall return code for the function:
0 - Everything went ok
4 - Input validation error
rc - Return code causing the return. Same as overallRC.
rs - Reason code causing the return.
errno - Errno value causing the return. Always zero.
Converted value in blocks | entailment |
def cvtToMag(rh, size):
"""
Convert a size value to a number with a magnitude appended.
Input:
Request Handle
Size bytes
Output:
Converted value with a magnitude
"""
rh.printSysLog("Enter generalUtils.cvtToMag")
mSize = ''
size = size / (1024 * 1024)
if size > (1024 * 5):
# Size is greater than 5G. Using "G" magnitude.
size = size / 1024
mSize = "%.1fG" % size
else:
# Size is less than or equal 5G. Using "M" magnitude.
mSize = "%.1fM" % size
rh.printSysLog("Exit generalUtils.cvtToMag, magSize: " + mSize)
return mSize | Convert a size value to a number with a magnitude appended.
Input:
Request Handle
Size bytes
Output:
Converted value with a magnitude | entailment |
def getSizeFromPage(rh, page):
"""
Convert a size value from page to a number with a magnitude appended.
Input:
Request Handle
Size in page
Output:
Converted value with a magnitude
"""
rh.printSysLog("Enter generalUtils.getSizeFromPage")
bSize = float(page) * 4096
mSize = cvtToMag(rh, bSize)
rh.printSysLog("Exit generalUtils.getSizeFromPage, magSize: " + mSize)
return mSize | Convert a size value from page to a number with a magnitude appended.
Input:
Request Handle
Size in page
Output:
Converted value with a magnitude | entailment |
def parseCmdline(rh, posOpsList, keyOpsList):
"""
Parse the request command input.
Input:
Request Handle
Positional Operands List. This is a dictionary that contains
an array for each subfunction. The array contains a entry
(itself an array) for each positional operand.
That array contains:
- Human readable name of the operand,
- Property in the parms dictionary to hold the value,
- Is it required (True) or optional (False),
- Type of data (1: int, 2: string).
Keyword Operands List. This is a dictionary that contains
an item for each subfunction. The value for the subfunction is a
dictionary that contains a key for each recognized operand.
The value associated with the key is an array that contains
the following:
- the related ReqHandle.parms item that stores the value,
- how many values follow the keyword, and
- the type of data for those values (1: int, 2: string)
Output:
Request Handle updated with parsed input.
Return code - 0: ok, non-zero: error
"""
rh.printSysLog("Enter generalUtils.parseCmdline")
# Handle any positional operands on the line.
if rh.results['overallRC'] == 0 and rh.subfunction in posOpsList:
ops = posOpsList[rh.subfunction]
currOp = 0
# While we have operands on the command line AND
# we have more operands in the positional operand list.
while rh.argPos < rh.totalParms and currOp < len(ops):
key = ops[currOp][1] # key for rh.parms[]
opType = ops[currOp][3] # data type
if opType == 1:
# Handle an integer data type
try:
rh.parms[key] = int(rh.request[rh.argPos])
except ValueError:
# keyword is not an integer
msg = msgs.msg['0001'][1] % (modId, rh.function,
rh.subfunction, (currOp + 1),
ops[currOp][0], rh.request[rh.argPos])
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0001'][0])
break
else:
rh.parms[key] = rh.request[rh.argPos]
currOp += 1
rh.argPos += 1
if (rh.argPos >= rh.totalParms and currOp < len(ops) and
ops[currOp][2] is True):
# Check for missing required operands.
msg = msgs.msg['0002'][1] % (modId, rh.function,
rh.subfunction, ops[currOp][0], (currOp + 1))
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0002'][0])
# Handle any keyword operands on the line.
if rh.results['overallRC'] == 0 and rh.subfunction in keyOpsList:
while rh.argPos < rh.totalParms:
if rh.request[rh.argPos] in keyOpsList[rh.subfunction]:
keyword = rh.request[rh.argPos]
rh.argPos += 1
ops = keyOpsList[rh.subfunction]
if keyword in ops:
key = ops[keyword][0]
opCnt = ops[keyword][1]
opType = ops[keyword][2]
if opCnt == 0:
# Keyword has no additional value
rh.parms[key] = True
else:
# Keyword has values following it.
storeIntoArray = False # Assume single word
if opCnt < 0:
storeIntoArray = True
# Property is a list all of the rest of the parms.
opCnt = rh.totalParms - rh.argPos
if opCnt == 0:
# Need at least 1 operand value
opCnt = 1
elif opCnt > 1:
storeIntoArray = True
if opCnt + rh.argPos > rh.totalParms:
# keyword is missing its related value operand
msg = msgs.msg['0003'][1] % (modId, rh.function,
rh.subfunction, keyword)
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0003'][0])
break
"""
Add the expected value to the property.
Take into account if there are more than 1.
"""
if storeIntoArray:
# Initialize the list.
rh.parms[key] = []
for i in range(0, opCnt):
if opType == 1:
# convert from string to int and save it.
try:
if not storeIntoArray:
rh.parms[key] = (
int(rh.request[rh.argPos]))
else:
rh.parms[key].append(int(
rh.request[rh.argPos]))
except ValueError:
# keyword is not an integer
msg = (msgs.msg['0004'][1] %
(modId, rh.function, rh.subfunction,
keyword, rh.request[rh.argPos]))
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0004'][0])
break
else:
# Value is a string, save it.
if not storeIntoArray:
rh.parms[key] = rh.request[rh.argPos]
else:
rh.parms[key].append(rh.request[rh.argPos])
rh.argPos += 1
if rh.results['overallRC'] != 0:
# Upper loop had an error break from loops.
break
else:
# keyword is not in the subfunction's keyword list
msg = msgs.msg['0005'][1] % (modId, rh.function,
rh.subfunction, keyword)
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0005'][0])
break
else:
# Subfunction does not support keywords
msg = (msgs.msg['0006'][1] % (modId, rh.function,
rh.subfunction, rh.request[rh.argPos]))
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0006'][0])
break
rh.printSysLog("Exit generalUtils.parseCmdLine, rc: " +
str(rh.results['overallRC']))
return rh.results['overallRC'] | Parse the request command input.
Input:
Request Handle
Positional Operands List. This is a dictionary that contains
an array for each subfunction. The array contains a entry
(itself an array) for each positional operand.
That array contains:
- Human readable name of the operand,
- Property in the parms dictionary to hold the value,
- Is it required (True) or optional (False),
- Type of data (1: int, 2: string).
Keyword Operands List. This is a dictionary that contains
an item for each subfunction. The value for the subfunction is a
dictionary that contains a key for each recognized operand.
The value associated with the key is an array that contains
the following:
- the related ReqHandle.parms item that stores the value,
- how many values follow the keyword, and
- the type of data for those values (1: int, 2: string)
Output:
Request Handle updated with parsed input.
Return code - 0: ok, non-zero: error | entailment |
def default_52xhandler(response, resource, url, params):
"""
Default 52x handler that loops every second until a non 52x response is received.
:param response: The response of the last executed api request.
:param resource: The resource of the last executed api request.
:param url: The url of the last executed api request sans encoded query parameters.
:param params: The query params of the last executed api request in dictionary format.
"""
time.sleep(1)
return resource.execute(url, params) | Default 52x handler that loops every second until a non 52x response is received.
:param response: The response of the last executed api request.
:param resource: The resource of the last executed api request.
:param url: The url of the last executed api request sans encoded query parameters.
:param params: The query params of the last executed api request in dictionary format. | entailment |
def validate(self):
"""
Perform validation check on properties.
"""
if not self.api_token or not self.api_token_secret:
raise ImproperlyConfigured("'api_token' and 'api_token_secret' are required for authentication.")
if self.response_type not in ["json", "pson", "xml", "debug", None]:
raise ImproperlyConfigured("'%s' is an invalid response_type" % self.response_type) | Perform validation check on properties. | entailment |
def execute(self, url, params):
""" Executes a call to the API.
:param url: The full url for the api call.
:param params: Query parameters encoded in the request.
"""
response = requests.get(url, params=params, **self.config.requests_kwargs)
if 520 <= response.status_code < 530:
if self.config.handler52x:
return self.config.handler52x(response, self, url, params)
response.raise_for_status()
if not self.config.response_type:
return response.json()
else:
return response.text | Executes a call to the API.
:param url: The full url for the api call.
:param params: Query parameters encoded in the request. | entailment |
def _create_invokeScript(self, network_file_path, commands,
files_map):
"""invokeScript: Configure zLinux os network
invokeScript is included in the network.doscript, it is used to put
the network configuration file to the directory where it belongs and
call znetconfig to configure the network
"""
LOG.debug('Creating invokeScript shell in the folder %s'
% network_file_path)
invokeScript = "invokeScript.sh"
conf = "#!/bin/bash \n"
command = commands
for file in files_map:
target_path = file['target_path']
source_file = file['source_file']
# potential risk: whether target_path exist
command += 'mv ' + source_file + ' ' + target_path + '\n'
command += 'sleep 2\n'
command += '/bin/bash /tmp/znetconfig.sh\n'
command += 'rm -rf invokeScript.sh\n'
scriptfile = os.path.join(network_file_path, invokeScript)
with open(scriptfile, "w") as f:
f.write(conf)
f.write(command) | invokeScript: Configure zLinux os network
invokeScript is included in the network.doscript, it is used to put
the network configuration file to the directory where it belongs and
call znetconfig to configure the network | entailment |
def _create_network_doscript(self, network_file_path):
"""doscript: contains a invokeScript.sh which will do the special work
The network.doscript contains network configuration files and it will
be used by zvmguestconfigure to configure zLinux os network when it
starts up
"""
# Generate the tar package for punch
LOG.debug('Creating network doscript in the folder %s'
% network_file_path)
network_doscript = os.path.join(network_file_path, 'network.doscript')
tar = tarfile.open(network_doscript, "w")
for file in os.listdir(network_file_path):
file_name = os.path.join(network_file_path, file)
tar.add(file_name, arcname=file)
tar.close()
return network_doscript | doscript: contains a invokeScript.sh which will do the special work
The network.doscript contains network configuration files and it will
be used by zvmguestconfigure to configure zLinux os network when it
starts up | entailment |
def R(X, destination, a1, a2, b):
"""A single Salsa20 row operation"""
a = (X[a1] + X[a2]) & 0xffffffff
X[destination] ^= ((a << b) | (a >> (32 - b))) | A single Salsa20 row operation | entailment |
def salsa20_8(B, x, src, s_start, dest, d_start):
"""Salsa20/8 http://en.wikipedia.org/wiki/Salsa20"""
# Merged blockxor for speed
for i in xrange(16):
x[i] = B[i] = B[i] ^ src[s_start + i]
# This is the actual Salsa 20/8: four identical double rounds
for i in xrange(4):
R(x, 4, 0,12, 7);R(x, 8, 4, 0, 9);R(x,12, 8, 4,13);R(x, 0,12, 8,18)
R(x, 9, 5, 1, 7);R(x,13, 9, 5, 9);R(x, 1,13, 9,13);R(x, 5, 1,13,18)
R(x,14,10, 6, 7);R(x, 2,14,10, 9);R(x, 6, 2,14,13);R(x,10, 6, 2,18)
R(x, 3,15,11, 7);R(x, 7, 3,15, 9);R(x,11, 7, 3,13);R(x,15,11, 7,18)
R(x, 1, 0, 3, 7);R(x, 2, 1, 0, 9);R(x, 3, 2, 1,13);R(x, 0, 3, 2,18)
R(x, 6, 5, 4, 7);R(x, 7, 6, 5, 9);R(x, 4, 7, 6,13);R(x, 5, 4, 7,18)
R(x,11,10, 9, 7);R(x, 8,11,10, 9);R(x, 9, 8,11,13);R(x,10, 9, 8,18)
R(x,12,15,14, 7);R(x,13,12,15, 9);R(x,14,13,12,13);R(x,15,14,13,18)
# While we are handling the data, write it to the correct dest.
# The latter half is still part of salsa20
for i in xrange(16):
dest[d_start + i] = B[i] = (x[i] + B[i]) & 0xffffffff | Salsa20/8 http://en.wikipedia.org/wiki/Salsa20 | entailment |
def fileChunkIter(file_object, file_chunk_size=65536):
"""
Return an iterator to a file-like object that yields fixed size chunks
:param file_object: a file-like object
:param file_chunk_size: maximum size of chunk
"""
while True:
chunk = file_object.read(file_chunk_size)
if chunk:
yield chunk
else:
break | Return an iterator to a file-like object that yields fixed size chunks
:param file_object: a file-like object
:param file_chunk_size: maximum size of chunk | entailment |
def scrypt(password, salt, N=SCRYPT_N, r=SCRYPT_r, p=SCRYPT_p, olen=64):
"""Returns a key derived using the scrypt key-derivarion function
N must be a power of two larger than 1 but no larger than 2 ** 63 (insane)
r and p must be positive numbers such that r * p < 2 ** 30
The default values are:
N -- 2**14 (~16k)
r -- 8
p -- 1
Memory usage is proportional to N*r. Defaults require about 16 MiB.
Time taken is proportional to N*p. Defaults take <100ms of a recent x86.
The last one differs from libscrypt defaults, but matches the 'interactive'
work factor from the original paper. For long term storage where runtime of
key derivation is not a problem, you could use 16 as in libscrypt or better
yet increase N if memory is plentiful.
"""
check_args(password, salt, N, r, p, olen)
out = ctypes.create_string_buffer(olen)
ret = _libscrypt_scrypt(password, len(password), salt, len(salt),
N, r, p, out, len(out))
if ret:
raise ValueError
return out.raw | Returns a key derived using the scrypt key-derivarion function
N must be a power of two larger than 1 but no larger than 2 ** 63 (insane)
r and p must be positive numbers such that r * p < 2 ** 30
The default values are:
N -- 2**14 (~16k)
r -- 8
p -- 1
Memory usage is proportional to N*r. Defaults require about 16 MiB.
Time taken is proportional to N*p. Defaults take <100ms of a recent x86.
The last one differs from libscrypt defaults, but matches the 'interactive'
work factor from the original paper. For long term storage where runtime of
key derivation is not a problem, you could use 16 as in libscrypt or better
yet increase N if memory is plentiful. | entailment |
def scrypt_mcf(password, salt=None, N=SCRYPT_N, r=SCRYPT_r, p=SCRYPT_p,
prefix=SCRYPT_MCF_PREFIX_DEFAULT):
"""Derives a Modular Crypt Format hash using the scrypt KDF
Parameter space is smaller than for scrypt():
N must be a power of two larger than 1 but no larger than 2 ** 31
r and p must be positive numbers between 1 and 255
Salt must be a byte string 1-16 bytes long.
If no salt is given, a random salt of 128+ bits is used. (Recommended.)
"""
if (prefix != SCRYPT_MCF_PREFIX_s1 and prefix != SCRYPT_MCF_PREFIX_ANY):
return mcf_mod.scrypt_mcf(scrypt, password, salt, N, r, p, prefix)
if isinstance(password, unicode):
password = password.encode('utf8')
elif not isinstance(password, bytes):
raise TypeError('password must be a unicode or byte string')
if salt is None:
salt = os.urandom(16)
elif not (1 <= len(salt) <= 16):
raise ValueError('salt must be 1-16 bytes')
if N > 2**31:
raise ValueError('N > 2**31 not supported')
if b'\0' in password:
raise ValueError('scrypt_mcf password must not contain zero bytes')
hash = scrypt(password, salt, N, r, p)
h64 = base64.b64encode(hash)
s64 = base64.b64encode(salt)
out = ctypes.create_string_buffer(125)
ret = _libscrypt_mcf(N, r, p, s64, h64, out)
if not ret:
raise ValueError
out = out.raw.strip(b'\0')
# XXX: Hack to support old libscrypt (like in Ubuntu 14.04)
if len(out) == 123:
out = out + b'='
return out | Derives a Modular Crypt Format hash using the scrypt KDF
Parameter space is smaller than for scrypt():
N must be a power of two larger than 1 but no larger than 2 ** 31
r and p must be positive numbers between 1 and 255
Salt must be a byte string 1-16 bytes long.
If no salt is given, a random salt of 128+ bits is used. (Recommended.) | entailment |
def scrypt_mcf_check(mcf, password):
"""Returns True if the password matches the given MCF hash"""
if not isinstance(mcf, bytes):
raise TypeError('MCF must be a byte string')
if isinstance(password, unicode):
password = password.encode('utf8')
elif not isinstance(password, bytes):
raise TypeError('password must be a unicode or byte string')
if len(mcf) != 124 or b'\0' in password:
return mcf_mod.scrypt_mcf_check(scrypt, mcf, password)
mcfbuf = ctypes.create_string_buffer(mcf)
ret = _libscrypt_check(mcfbuf, password)
if ret < 0:
return mcf_mod.scrypt_mcf_check(scrypt, mcf, password)
return bool(ret) | Returns True if the password matches the given MCF hash | entailment |
def parse_mixed_delim_str(line):
"""Turns .obj face index string line into [verts, texcoords, normals] numeric tuples."""
arrs = [[], [], []]
for group in line.split(' '):
for col, coord in enumerate(group.split('/')):
if coord:
arrs[col].append(int(coord))
return [tuple(arr) for arr in arrs] | Turns .obj face index string line into [verts, texcoords, normals] numeric tuples. | entailment |
def read_objfile(fname):
"""Takes .obj filename and returns dict of object properties for each object in file."""
verts = defaultdict(list)
obj_props = []
with open(fname) as f:
lines = f.read().splitlines()
for line in lines:
if line:
split_line = line.strip().split(' ', 1)
if len(split_line) < 2:
continue
prefix, value = split_line[0], split_line[1]
if prefix == 'o':
obj_props.append({})
obj = obj_props[-1]
obj['f'] = []
obj[prefix] = value
# For files without an 'o' statement
elif prefix == 'v' and len(obj_props) < 1:
obj_props.append({})
obj = obj_props[-1]
obj['f'] = []
obj['o'] = fname
if obj_props:
if prefix[0] == 'v':
verts[prefix].append([float(val) for val in value.split(' ')])
elif prefix == 'f':
obj[prefix].append(parse_mixed_delim_str(value))
else:
obj[prefix] = value
# Reindex vertices to be in face index order, then remove face indices.
verts = {key: np.array(value) for key, value in iteritems(verts)}
for obj in obj_props:
obj['f'] = tuple(np.array(verts) if verts[0] else tuple() for verts in zip(*obj['f']))
for idx, vertname in enumerate(['v' ,'vt', 'vn']):
if vertname in verts:
obj[vertname] = verts[vertname][obj['f'][idx].flatten() - 1, :]
else:
obj[vertname] = tuple()
del obj['f']
geoms = {obj['o']:obj for obj in obj_props}
return geoms | Takes .obj filename and returns dict of object properties for each object in file. | entailment |
def read_wavefront(fname_obj):
"""Returns mesh dictionary along with their material dictionary from a wavefront (.obj and/or .mtl) file."""
fname_mtl = ''
geoms = read_objfile(fname_obj)
for line in open(fname_obj):
if line:
split_line = line.strip().split(' ', 1)
if len(split_line) < 2:
continue
prefix, data = split_line[0], split_line[1]
if 'mtllib' in prefix:
fname_mtl = data
break
if fname_mtl:
materials = read_mtlfile(path.join(path.dirname(fname_obj), fname_mtl))
for geom in geoms.values():
geom['material'] = materials[geom['usemtl']]
return geoms | Returns mesh dictionary along with their material dictionary from a wavefront (.obj and/or .mtl) file. | entailment |
def disableEnableDisk(rh, userid, vaddr, option):
"""
Disable or enable a disk.
Input:
Request Handle:
owning userid
virtual address
option ('-e': enable, '-d': disable)
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - rc from the chccwdev command or IUCV transmission.
rs - rs from the chccwdev command or IUCV transmission.
results - possible error message from the IUCV transmission.
"""
rh.printSysLog("Enter vmUtils.disableEnableDisk, userid: " + userid +
" addr: " + vaddr + " option: " + option)
results = {
'overallRC': 0,
'rc': 0,
'rs': 0,
'response': ''
}
"""
Can't guarantee the success of online/offline disk, need to wait
Until it's done because we may detach the disk after -d option
or use the disk after the -e option
"""
for secs in [0.1, 0.4, 1, 1.5, 3, 7, 15, 32, 30, 30,
60, 60, 60, 60, 60]:
strCmd = "sudo /sbin/chccwdev " + option + " " + vaddr + " 2>&1"
results = execCmdThruIUCV(rh, userid, strCmd)
if results['overallRC'] == 0:
break
elif (results['overallRC'] == 2 and results['rc'] == 8 and
results['rs'] == 1 and option == '-d'):
# Linux does not know about the disk being disabled.
# Ok, nothing to do. Treat this as a success.
results = {'overallRC': 0, 'rc': 0, 'rs': 0, 'response': ''}
break
time.sleep(secs)
rh.printSysLog("Exit vmUtils.disableEnableDisk, rc: " +
str(results['overallRC']))
return results | Disable or enable a disk.
Input:
Request Handle:
owning userid
virtual address
option ('-e': enable, '-d': disable)
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - rc from the chccwdev command or IUCV transmission.
rs - rs from the chccwdev command or IUCV transmission.
results - possible error message from the IUCV transmission. | entailment |
def execCmdThruIUCV(rh, userid, strCmd, hideInLog=[]):
"""
Send a command to a virtual machine using IUCV.
Input:
Request Handle
Userid of the target virtual machine
Command string to send
(Optional) List of strCmd words (by index) to hide in
sysLog by replacing the word with "<hidden>".
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, 2: failure
rc - RC returned from iucvclnt if overallRC != 0.
rs - RS returned from iucvclnt if overallRC != 0.
errno - Errno returned from iucvclnt if overallRC != 0.
response - Output of the iucvclnt command or this routine.
Notes:
1) This routine does not use the Request Handle printLn function.
This is because an error might be expected and we might desire
to suppress it. Instead, any error messages are put in the
response dictionary element that is returned.
"""
if len(hideInLog) == 0:
rh.printSysLog("Enter vmUtils.execCmdThruIUCV, userid: " +
userid + " cmd: " + strCmd)
else:
logCmd = strCmd.split(' ')
for i in hideInLog:
logCmd[i] = '<hidden>'
rh.printSysLog("Enter vmUtils.execCmdThruIUCV, userid: " +
userid + " cmd: " + ' '.join(logCmd))
iucvpath = '/opt/zthin/bin/IUCV/'
results = {
'overallRC': 0,
'rc': 0,
'rs': 0,
'errno': 0,
'response': [],
}
cmd = ['sudo',
iucvpath + "iucvclnt",
userid,
strCmd]
try:
results['response'] = subprocess.check_output(
cmd,
stderr=subprocess.STDOUT,
close_fds=True)
if isinstance(results['response'], bytes):
results['response'] = bytes.decode(results['response'])
except CalledProcessError as e:
msg = []
results['overallRC'] = 2
results['rc'] = e.returncode
output = bytes.decode(e.output)
match = re.search('Return code (.+?),', output)
if match:
try:
results['rc'] = int(match.group(1))
except ValueError:
# Return code in response from IUCVCLNT is not an int.
msg = msgs.msg['0311'][1] % (modId, userid, strCmd,
results['rc'], match.group(1), output)
if not msg:
# We got the rc. Now, get the rs.
match = re.search('Reason code (.+?)\.', output)
if match:
try:
results['rs'] = int(match.group(1))
except ValueError:
# Reason code in response from IUCVCLNT is not an int.
msg = msgs.msg['0312'][1] % (modId, userid, strCmd,
results['rc'], match.group(1), output)
if msg:
# Already produced an error message.
pass
elif results['rc'] == 1:
# Command was not authorized or a generic Linux error.
msg = msgs.msg['0313'][1] % (modId, userid, strCmd,
results['rc'], results['rs'], output)
elif results['rc'] == 2:
# IUCV client parameter error.
msg = msgs.msg['0314'][1] % (modId, userid, strCmd,
results['rc'], results['rs'], output)
elif results['rc'] == 4:
# IUCV socket error
msg = msgs.msg['0315'][1] % (modId, userid, strCmd,
results['rc'], results['rs'], output)
elif results['rc'] == 8:
# Executed command failed
msg = msgs.msg['0316'][1] % (modId, userid, strCmd,
results['rc'], results['rs'], output)
elif results['rc'] == 16:
# File Transport failed
msg = msgs.msg['0317'][1] % (modId, userid, strCmd,
results['rc'], results['rs'], output)
elif results['rc'] == 32:
# IUCV server file was not found on this system.
msg += msgs.msg['0318'][1] % (modId, userid, strCmd,
results['rc'], results['rs'], output)
else:
# Unrecognized IUCV client error
msg = msgs.msg['0319'][1] % (modId, userid, strCmd,
results['rc'], results['rs'], output)
results['response'] = msg
except Exception as e:
# Other exceptions from this system (i.e. not the managed system).
results = msgs.msg['0421'][0]
msg = msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e))
results['response'] = msg
rh.printSysLog("Exit vmUtils.execCmdThruIUCV, rc: " +
str(results['rc']))
return results | Send a command to a virtual machine using IUCV.
Input:
Request Handle
Userid of the target virtual machine
Command string to send
(Optional) List of strCmd words (by index) to hide in
sysLog by replacing the word with "<hidden>".
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, 2: failure
rc - RC returned from iucvclnt if overallRC != 0.
rs - RS returned from iucvclnt if overallRC != 0.
errno - Errno returned from iucvclnt if overallRC != 0.
response - Output of the iucvclnt command or this routine.
Notes:
1) This routine does not use the Request Handle printLn function.
This is because an error might be expected and we might desire
to suppress it. Instead, any error messages are put in the
response dictionary element that is returned. | entailment |
def getPerfInfo(rh, useridlist):
"""
Get the performance information for a userid
Input:
Request Handle
Userid to query <- may change this to a list later.
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - RC returned from SMCLI if overallRC = 0.
rs - RS returned from SMCLI if overallRC = 0.
errno - Errno returned from SMCLI if overallRC = 0.
response - Stripped and reformatted output of the SMCLI command.
"""
rh.printSysLog("Enter vmUtils.getPerfInfo, userid: " + useridlist)
parms = ["-T", rh.userid,
"-c", "1"]
results = invokeSMCLI(rh, "Image_Performance_Query", parms)
if results['overallRC'] != 0:
# SMCLI failed.
rh.printLn("ES", results['response'])
rh.printSysLog("Exit vmUtils.getPerfInfo, rc: " +
str(results['overallRC']))
return results
lines = results['response'].split("\n")
usedTime = 0
totalCpu = 0
totalMem = 0
usedMem = 0
try:
for line in lines:
if "Used CPU time:" in line:
usedTime = line.split()[3].strip('"')
# Value is in us, need make it seconds
usedTime = int(usedTime) / 1000000
if "Guest CPUs:" in line:
totalCpu = line.split()[2].strip('"')
if "Max memory:" in line:
totalMem = line.split()[2].strip('"')
# Value is in Kb, need to make it Mb
totalMem = int(totalMem) / 1024
if "Used memory:" in line:
usedMem = line.split()[2].strip('"')
usedMem = int(usedMem) / 1024
except Exception as e:
msg = msgs.msg['0412'][1] % (modId, type(e).__name__,
str(e), results['response'])
rh.printLn("ES", msg)
results['overallRC'] = 4
results['rc'] = 4
results['rs'] = 412
if results['overallRC'] == 0:
memstr = "Total Memory: %iM\n" % totalMem
usedmemstr = "Used Memory: %iM\n" % usedMem
procstr = "Processors: %s\n" % totalCpu
timestr = "CPU Used Time: %i sec\n" % usedTime
results['response'] = memstr + usedmemstr + procstr + timestr
rh.printSysLog("Exit vmUtils.getPerfInfo, rc: " +
str(results['rc']))
return results | Get the performance information for a userid
Input:
Request Handle
Userid to query <- may change this to a list later.
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - RC returned from SMCLI if overallRC = 0.
rs - RS returned from SMCLI if overallRC = 0.
errno - Errno returned from SMCLI if overallRC = 0.
response - Stripped and reformatted output of the SMCLI command. | entailment |
def installFS(rh, vaddr, mode, fileSystem, diskType):
"""
Install a filesystem on a virtual machine's dasd.
Input:
Request Handle:
userid - Userid that owns the disk
Virtual address as known to the owning system.
Access mode to use to get the disk.
Disk Type - 3390 or 9336
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - RC returned from SMCLI if overallRC = 0.
rs - RS returned from SMCLI if overallRC = 0.
errno - Errno returned from SMCLI if overallRC = 0.
response - Output of the SMCLI command.
"""
rh.printSysLog("Enter vmUtils.installFS, userid: " + rh.userid +
", vaddr: " + str(vaddr) + ", mode: " + mode + ", file system: " +
fileSystem + ", disk type: " + diskType)
results = {
'overallRC': 0,
'rc': 0,
'rs': 0,
'errno': 0,
}
out = ''
diskAccessed = False
# Get access to the disk.
cmd = ["sudo",
"/opt/zthin/bin/linkdiskandbringonline",
rh.userid,
vaddr,
mode]
strCmd = ' '.join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
out = subprocess.check_output(cmd, close_fds=True)
if isinstance(out, bytes):
out = bytes.decode(out)
diskAccessed = True
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
results = msgs.msg['0421'][0]
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
if results['overallRC'] == 0:
"""
sample output:
linkdiskandbringonline maint start time: 2017-03-03-16:20:48.011
Success: Userid maint vdev 193 linked at ad35 device name dasdh
linkdiskandbringonline exit time: 2017-03-03-16:20:52.150
"""
match = re.search('Success:(.+?)\n', out)
if match:
parts = match.group(1).split()
if len(parts) > 9:
device = "/dev/" + parts[9]
else:
strCmd = ' '.join(cmd)
rh.printLn("ES", msgs.msg['0416'][1] % (modId,
'Success:', 10, strCmd, out))
results = msgs.msg['0416'][0]
rh.updateResults(results)
else:
strCmd = ' '.join(cmd)
rh.printLn("ES", msgs.msg['0417'][1] % (modId,
'Success:', strCmd, out))
results = msgs.msg['0417'][0]
rh.updateResults(results)
if results['overallRC'] == 0 and diskType == "3390":
# dasdfmt the disk
cmd = ["sudo",
"/sbin/dasdfmt",
"-y",
"-b", "4096",
"-d", "cdl",
"-f", device]
strCmd = ' '.join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
out = subprocess.check_output(cmd, close_fds=True)
if isinstance(out, bytes):
out = bytes.decode(out)
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
strCmd = " ".join(cmd)
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
if results['overallRC'] == 0 and diskType == "3390":
# Settle the devices so we can do the partition.
strCmd = ("which udevadm &> /dev/null && " +
"udevadm settle || udevsettle")
rh.printSysLog("Invoking: " + strCmd)
try:
subprocess.check_output(
strCmd,
stderr=subprocess.STDOUT,
close_fds=True,
shell=True)
if isinstance(out, bytes):
out = bytes.decode(out)
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
strCmd = " ".join(cmd)
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
if results['overallRC'] == 0 and diskType == "3390":
# Prepare the partition with fdasd
cmd = ["sudo", "/sbin/fdasd", "-a", device]
strCmd = ' '.join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
out = subprocess.check_output(cmd,
stderr=subprocess.STDOUT, close_fds=True)
if isinstance(out, bytes):
out = bytes.decode(out)
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
if results['overallRC'] == 0 and diskType == "9336":
# Delete the existing partition in case the disk already
# has a partition in it.
cmd = "sudo /sbin/fdisk " + device + " << EOF\nd\nw\nEOF"
rh.printSysLog("Invoking: /sbin/fdsik " + device +
" << EOF\\nd\\nw\\nEOF ")
try:
out = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
close_fds=True,
shell=True)
if isinstance(out, bytes):
out = bytes.decode(out)
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, cmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
rh.printLn("ES", msgs.msg['0421'][1] % (modId, cmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
if results['overallRC'] == 0 and diskType == "9336":
# Prepare the partition with fdisk
cmd = "sudo /sbin/fdisk " + device + " << EOF\nn\np\n1\n\n\nw\nEOF"
rh.printSysLog("Invoking: sudo /sbin/fdisk " + device +
" << EOF\\nn\\np\\n1\\n\\n\\nw\\nEOF")
try:
out = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
close_fds=True,
shell=True)
if isinstance(out, bytes):
out = bytes.decode(out)
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, cmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
rh.printLn("ES", msgs.msg['0421'][1] % (modId, cmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
if results['overallRC'] == 0:
# Settle the devices so we can do the partition.
strCmd = ("which udevadm &> /dev/null && " +
"udevadm settle || udevsettle")
rh.printSysLog("Invoking: " + strCmd)
try:
subprocess.check_output(
strCmd,
stderr=subprocess.STDOUT,
close_fds=True,
shell=True)
if isinstance(out, bytes):
out = bytes.decode(out)
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
strCmd = " ".join(cmd)
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
if results['overallRC'] == 0:
# Install the file system into the disk.
device = device + "1" # Point to first partition
if fileSystem != 'swap':
if fileSystem == 'xfs':
cmd = ["sudo", "mkfs.xfs", "-f", device]
else:
cmd = ["sudo", "mkfs", "-F", "-t", fileSystem, device]
strCmd = ' '.join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
out = subprocess.check_output(cmd,
stderr=subprocess.STDOUT, close_fds=True)
if isinstance(out, bytes):
out = bytes.decode(out)
rh.printLn("N", "File system: " + fileSystem +
" is installed.")
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
else:
rh.printLn("N", "File system type is swap. No need to install " +
"a filesystem.")
if diskAccessed:
# Give up the disk.
cmd = ["sudo", "/opt/zthin/bin/offlinediskanddetach",
rh.userid,
vaddr]
strCmd = ' '.join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
out = subprocess.check_output(cmd, close_fds=True)
if isinstance(out, bytes):
out = bytes.decode(out)
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
rh.printSysLog("Exit vmUtils.installFS, rc: " + str(results['rc']))
return results | Install a filesystem on a virtual machine's dasd.
Input:
Request Handle:
userid - Userid that owns the disk
Virtual address as known to the owning system.
Access mode to use to get the disk.
Disk Type - 3390 or 9336
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - RC returned from SMCLI if overallRC = 0.
rs - RS returned from SMCLI if overallRC = 0.
errno - Errno returned from SMCLI if overallRC = 0.
response - Output of the SMCLI command. | entailment |
def invokeSMCLI(rh, api, parms, hideInLog=[]):
"""
Invoke SMCLI and parse the results.
Input:
Request Handle
API name,
SMCLI parms as an array
(Optional) List of parms (by index) to hide in
sysLog by replacing the parm with "<hidden>".
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - RC returned from SMCLI if overallRC = 0.
rs - RS returned from SMCLI if overallRC = 0.
errno - Errno returned from SMCLI if overallRC = 0.
response - String output of the SMCLI command.
Note:
- If the first three words of the header returned from smcli
do not do not contain words that represent valid integer
values or contain too few words then one or more error
messages are generated. THIS SHOULD NEVER OCCUR !!!!
"""
if len(hideInLog) == 0:
rh.printSysLog("Enter vmUtils.invokeSMCLI, userid: " +
rh.userid + ", function: " + api +
", parms: " + str(parms))
else:
logParms = parms
for i in hideInLog:
logParms[i] = '<hidden>'
rh.printSysLog("Enter vmUtils.invokeSMCLI, userid: " +
rh.userid + ", function: " + api +
", parms: " + str(logParms))
goodHeader = False
results = {
'overallRC': 0,
'rc': 0,
'rs': 0,
'errno': 0,
'response': [],
'strError': '',
}
cmd = []
cmd.append('sudo')
cmd.append('/opt/zthin/bin/smcli')
cmd.append(api)
cmd.append('--addRCheader')
try:
smcliResp = subprocess.check_output(cmd + parms,
close_fds=True)
if isinstance(smcliResp, bytes):
smcliResp = bytes.decode(smcliResp, errors='replace')
smcliResp = smcliResp.split('\n', 1)
results['response'] = smcliResp[1]
results['overallRC'] = 0
results['rc'] = 0
except CalledProcessError as e:
strCmd = " ".join(cmd + parms)
# Break up the RC header into its component parts.
if e.output == '':
smcliResp = ['']
else:
smcliResp = bytes.decode(e.output).split('\n', 1)
# Split the header into its component pieces.
rcHeader = smcliResp[0].split('(details)', 1)
if len(rcHeader) == 0:
rcHeader = ['', '']
elif len(rcHeader) == 1:
# No data after the details tag. Add empty [1] value.
rcHeader.append('')
codes = rcHeader[0].split(' ')
# Validate the rc, rs, and errno.
if len(codes) < 3:
# Unexpected number of codes. Need at least 3.
results = msgs.msg['0301'][0]
results['response'] = msgs.msg['0301'][1] % (modId, api,
strCmd, rcHeader[0], rcHeader[1])
else:
goodHeader = True
# Convert the first word (overall rc from SMAPI) to an int
# and set the SMT overall rc based on this value.
orcError = False
try:
results['overallRC'] = int(codes[0])
if results['overallRC'] not in [8, 24, 25]:
orcError = True
except ValueError:
goodHeader = False
orcError = True
if orcError:
results['overallRC'] = 25 # SMCLI Internal Error
results = msgs.msg['0302'][0]
results['response'] = msgs.msg['0302'][1] % (modId,
api, codes[0], strCmd, rcHeader[0], rcHeader[1])
# Convert the second word to an int and save as rc.
try:
results['rc'] = int(codes[1])
except ValueError:
goodHeader = False
results = msgs.msg['0303'][0]
results['response'] = msgs.msg['0303'][1] % (modId,
api, codes[1], strCmd, rcHeader[0], rcHeader[1])
# Convert the second word to an int and save it as either
# the rs or errno.
try:
word3 = int(codes[2])
if results['overallRC'] == 8:
results['rs'] = word3 # Must be an rs
elif results['overallRC'] == 25:
results['errno'] = word3 # Must be the errno
# We ignore word 3 for everyone else and default to 0.
except ValueError:
goodHeader = False
results = msgs.msg['0304'][0]
results['response'] = msgs.msg['0304'][1] % (modId,
api, codes[1], strCmd, rcHeader[0], rcHeader[1])
results['strError'] = rcHeader[1].lstrip()
if goodHeader:
# Produce a message that provides the error info.
results['response'] = msgs.msg['0300'][1] % (modId,
api, results['overallRC'], results['rc'],
results['rs'], results['errno'],
strCmd, smcliResp[1])
except Exception as e:
# All other exceptions.
strCmd = " ".join(cmd + parms)
results = msgs.msg['0305'][0]
results['response'] = msgs.msg['0305'][1] % (modId, strCmd,
type(e).__name__, str(e))
rh.printSysLog("Exit vmUtils.invokeSMCLI, rc: " +
str(results['overallRC']))
return results | Invoke SMCLI and parse the results.
Input:
Request Handle
API name,
SMCLI parms as an array
(Optional) List of parms (by index) to hide in
sysLog by replacing the parm with "<hidden>".
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - RC returned from SMCLI if overallRC = 0.
rs - RS returned from SMCLI if overallRC = 0.
errno - Errno returned from SMCLI if overallRC = 0.
response - String output of the SMCLI command.
Note:
- If the first three words of the header returned from smcli
do not do not contain words that represent valid integer
values or contain too few words then one or more error
messages are generated. THIS SHOULD NEVER OCCUR !!!! | entailment |
def isLoggedOn(rh, userid):
"""
Determine whether a virtual machine is logged on.
Input:
Request Handle:
userid being queried
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - 0: if we got status. Otherwise, it is the
error return code from the commands issued.
rs - Based on rc value. For rc==0, rs is:
0: if we determined it is logged on.
1: if we determined it is logged off.
"""
rh.printSysLog("Enter vmUtils.isLoggedOn, userid: " + userid)
results = {
'overallRC': 0,
'rc': 0,
'rs': 0,
}
cmd = ["sudo", "/sbin/vmcp", "query", "user", userid]
strCmd = ' '.join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
subprocess.check_output(
cmd,
close_fds=True,
stderr=subprocess.STDOUT)
except CalledProcessError as e:
search_pattern = '(^HCP\w\w\w045E|^HCP\w\w\w361E)'.encode()
match = re.search(search_pattern, e.output)
if match:
# Not logged on
results['rs'] = 1
else:
# Abnormal failure
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
except Exception as e:
# All other exceptions.
results = msgs.msg['0421'][0]
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
rh.printSysLog("Exit vmUtils.isLoggedOn, overallRC: " +
str(results['overallRC']) + " rc: " + str(results['rc']) +
" rs: " + str(results['rs']))
return results | Determine whether a virtual machine is logged on.
Input:
Request Handle:
userid being queried
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - 0: if we got status. Otherwise, it is the
error return code from the commands issued.
rs - Based on rc value. For rc==0, rs is:
0: if we determined it is logged on.
1: if we determined it is logged off. | entailment |
def punch2reader(rh, userid, fileLoc, spoolClass):
"""
Punch a file to a virtual reader of the specified virtual machine.
Input:
Request Handle - for general use and to hold the results
userid - userid of the virtual machine
fileLoc - File to send
spoolClass - Spool class
Output:
Request Handle updated with the results.
Return code - 0: ok, non-zero: error
"""
rh.printSysLog("Enter punch2reader.punchFile")
results = {}
# Setting rc to time out rc code as default and its changed during runtime
results['rc'] = 9
# Punch to the current user intially and then change the spool class.
cmd = ["sudo", "/usr/sbin/vmur", "punch", "-r", fileLoc]
strCmd = ' '.join(cmd)
for secs in [1, 2, 3, 5, 10]:
rh.printSysLog("Invoking: " + strCmd)
try:
results['response'] = subprocess.check_output(cmd,
close_fds=True,
stderr=subprocess.STDOUT)
if isinstance(results['response'], bytes):
results['response'] = bytes.decode(results['response'])
results['rc'] = 0
rh.updateResults(results)
break
except CalledProcessError as e:
results['response'] = e.output
# Check if we have concurrent instance of vmur active
to_find = "A concurrent instance of vmur is already active"
to_find = to_find.encode()
if results['response'].find(to_find) == -1:
# Failure in VMUR punch update the rc
results['rc'] = 7
break
else:
# if concurrent vmur is active try after sometime
rh.printSysLog("Punch in use. Retrying after " +
str(secs) + " seconds")
time.sleep(secs)
except Exception as e:
# All other exceptions.
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
if results['rc'] == 7:
# Failure while issuing vmur command (For eg: invalid file given)
msg = msgs.msg['0401'][1] % (modId, fileLoc, userid,
results['response'])
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0401'][0])
elif results['rc'] == 9:
# Failure due to vmur timeout
msg = msgs.msg['0406'][1] % (modId, fileLoc)
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0406'][0])
if rh.results['overallRC'] == 0:
# On VMUR success change the class of the spool file
spoolId = re.findall(r'\d+', str(results['response']))
cmd = ["sudo", "vmcp", "change", "rdr", str(spoolId[0]), "class",
spoolClass]
strCmd = " ".join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
results['response'] = subprocess.check_output(cmd,
close_fds=True,
stderr=subprocess.STDOUT)
if isinstance(results['response'], bytes):
results['response'] = bytes.decode(results['response'])
rh.updateResults(results)
except CalledProcessError as e:
msg = msgs.msg['0404'][1] % (modId,
spoolClass,
e.output)
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0404'][0])
# Class change failed
# Delete the punched file from current userid
cmd = ["sudo", "vmcp", "purge", "rdr", spoolId[0]]
strCmd = " ".join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
results['response'] = subprocess.check_output(cmd,
close_fds=True,
stderr=subprocess.STDOUT)
if isinstance(results['response'], bytes):
results['response'] = bytes.decode(results['response'])
# We only need to issue the printLn.
# Don't need to change return/reason code values
except CalledProcessError as e:
msg = msgs.msg['0403'][1] % (modId,
spoolId[0],
e.output)
rh.printLn("ES", msg)
except Exception as e:
# All other exceptions related to purge.
# We only need to issue the printLn.
# Don't need to change return/reason code values
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
except Exception as e:
# All other exceptions related to change rdr.
results = msgs.msg['0421'][0]
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
rh.updateResults(msgs.msg['0421'][0])
if rh.results['overallRC'] == 0:
# Transfer the file from current user to specified user
cmd = ["sudo", "vmcp", "transfer", "*", "rdr", str(spoolId[0]), "to",
userid, "rdr"]
strCmd = " ".join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
results['response'] = subprocess.check_output(cmd,
close_fds=True,
stderr=subprocess.STDOUT)
if isinstance(results['response'], bytes):
results['response'] = bytes.decode(results['response'])
rh.updateResults(results)
except CalledProcessError as e:
msg = msgs.msg['0424'][1] % (modId,
fileLoc,
userid, e.output)
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0424'][0])
# Transfer failed so delete the punched file from current userid
cmd = ["sudo", "vmcp", "purge", "rdr", spoolId[0]]
strCmd = " ".join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
results['response'] = subprocess.check_output(cmd,
close_fds=True,
stderr=subprocess.STDOUT)
if isinstance(results['response'], bytes):
results['response'] = bytes.decode(results['response'])
# We only need to issue the printLn.
# Don't need to change return/reason code values
except CalledProcessError as e:
msg = msgs.msg['0403'][1] % (modId,
spoolId[0],
e.output)
rh.printLn("ES", msg)
except Exception as e:
# All other exceptions related to purge.
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
except Exception as e:
# All other exceptions related to transfer.
results = msgs.msg['0421'][0]
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
rh.updateResults(msgs.msg['0421'][0])
rh.printSysLog("Exit vmUtils.punch2reader, rc: " +
str(rh.results['overallRC']))
return rh.results['overallRC'] | Punch a file to a virtual reader of the specified virtual machine.
Input:
Request Handle - for general use and to hold the results
userid - userid of the virtual machine
fileLoc - File to send
spoolClass - Spool class
Output:
Request Handle updated with the results.
Return code - 0: ok, non-zero: error | entailment |
def waitForOSState(rh, userid, desiredState, maxQueries=90, sleepSecs=5):
"""
Wait for the virtual OS to go into the indicated state.
Input:
Request Handle
userid whose state is to be monitored
Desired state, 'up' or 'down', case sensitive
Maximum attempts to wait for desired state before giving up
Sleep duration between waits
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - RC returned from execCmdThruIUCV if overallRC = 0.
rs - RS returned from execCmdThruIUCV if overallRC = 0.
errno - Errno returned from execCmdThruIUCV if overallRC = 0.
response - Updated with an error message if wait times out.
Note:
"""
rh.printSysLog("Enter vmUtils.waitForOSState, userid: " + userid +
" state: " + desiredState +
" maxWait: " + str(maxQueries) +
" sleepSecs: " + str(sleepSecs))
results = {}
strCmd = "echo 'ping'"
stateFnd = False
for i in range(1, maxQueries + 1):
results = execCmdThruIUCV(rh, rh.userid, strCmd)
if results['overallRC'] == 0:
if desiredState == 'up':
stateFnd = True
break
else:
if desiredState == 'down':
stateFnd = True
break
if i < maxQueries:
time.sleep(sleepSecs)
if stateFnd is True:
results = {
'overallRC': 0,
'rc': 0,
'rs': 0,
}
else:
maxWait = maxQueries * sleepSecs
rh.printLn("ES", msgs.msg['0413'][1] % (modId, userid,
desiredState, maxWait))
results = msgs.msg['0413'][0]
rh.printSysLog("Exit vmUtils.waitForOSState, rc: " +
str(results['overallRC']))
return results | Wait for the virtual OS to go into the indicated state.
Input:
Request Handle
userid whose state is to be monitored
Desired state, 'up' or 'down', case sensitive
Maximum attempts to wait for desired state before giving up
Sleep duration between waits
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - RC returned from execCmdThruIUCV if overallRC = 0.
rs - RS returned from execCmdThruIUCV if overallRC = 0.
errno - Errno returned from execCmdThruIUCV if overallRC = 0.
response - Updated with an error message if wait times out.
Note: | entailment |
def waitForVMState(rh, userid, desiredState, maxQueries=90, sleepSecs=5):
"""
Wait for the virtual machine to go into the indicated state.
Input:
Request Handle
userid whose state is to be monitored
Desired state, 'on' or 'off', case sensitive
Maximum attempts to wait for desired state before giving up
Sleep duration between waits
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - RC returned from SMCLI if overallRC = 0.
rs - RS returned from SMCLI if overallRC = 0.
Note:
"""
rh.printSysLog("Enter vmUtils.waitForVMState, userid: " + userid +
" state: " + desiredState +
" maxWait: " + str(maxQueries) +
" sleepSecs: " + str(sleepSecs))
results = {}
cmd = ["sudo", "/sbin/vmcp", "query", "user", userid]
strCmd = " ".join(cmd)
stateFnd = False
for i in range(1, maxQueries + 1):
rh.printSysLog("Invoking: " + strCmd)
try:
out = subprocess.check_output(
cmd,
close_fds=True,
stderr=subprocess.STDOUT)
if isinstance(out, bytes):
out = bytes.decode(out)
if desiredState == 'on':
stateFnd = True
break
except CalledProcessError as e:
match = re.search('(^HCP\w\w\w045E|^HCP\w\w\w361E)', e.output)
if match:
# Logged off
if desiredState == 'off':
stateFnd = True
break
else:
# Abnormal failure
out = e.output
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, out))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
break
except Exception as e:
# All other exceptions.
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
if i < maxQueries:
# Sleep a bit before looping.
time.sleep(sleepSecs)
if stateFnd is True:
results = {
'overallRC': 0,
'rc': 0,
'rs': 0,
}
else:
maxWait = maxQueries * sleepSecs
rh.printLn("ES", msgs.msg['0414'][1] % (modId, userid,
desiredState, maxWait))
results = msgs.msg['0414'][0]
rh.printSysLog("Exit vmUtils.waitForVMState, rc: " +
str(results['overallRC']))
return results | Wait for the virtual machine to go into the indicated state.
Input:
Request Handle
userid whose state is to be monitored
Desired state, 'on' or 'off', case sensitive
Maximum attempts to wait for desired state before giving up
Sleep duration between waits
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - RC returned from SMCLI if overallRC = 0.
rs - RS returned from SMCLI if overallRC = 0.
Note: | entailment |
def match(self, context, line):
"""Match lines prefixed with a hash ("#") mark that don't look like text."""
stripped = line.stripped
return stripped.startswith('#') and not stripped.startswith('#{') | Match lines prefixed with a hash ("#") mark that don't look like text. | entailment |
def stream(input, encoding=None, errors='strict'):
"""Safely iterate a template generator, ignoring ``None`` values and optionally stream encoding.
Used internally by ``cinje.flatten``, this allows for easy use of a template generator as a WSGI body.
"""
input = (i for i in input if i) # Omits `None` (empty wrappers) and empty chunks.
if encoding: # Automatically, and iteratively, encode the text if requested.
input = iterencode(input, encoding, errors=errors)
return input | Safely iterate a template generator, ignoring ``None`` values and optionally stream encoding.
Used internally by ``cinje.flatten``, this allows for easy use of a template generator as a WSGI body. | entailment |
def flatten(input, file=None, encoding=None, errors='strict'):
"""Return a flattened representation of a cinje chunk stream.
This has several modes of operation. If no `file` argument is given, output will be returned as a string.
The type of string will be determined by the presence of an `encoding`; if one is given the returned value is a
binary string, otherwise the native unicode representation. If a `file` is present, chunks will be written
iteratively through repeated calls to `file.write()`, and the amount of data (characters or bytes) written
returned. The type of string written will be determined by `encoding`, just as the return value is when not
writing to a file-like object. The `errors` argument is passed through when encoding.
We can highly recommend using the various stremaing IO containers available in the
[`io`](https://docs.python.org/3/library/io.html) module, though
[`tempfile`](https://docs.python.org/3/library/tempfile.html) classes are also quite useful.
"""
input = stream(input, encoding, errors)
if file is None: # Exit early if we're not writing to a file.
return b''.join(input) if encoding else ''.join(input)
counter = 0
for chunk in input:
file.write(chunk)
counter += len(chunk)
return counter | Return a flattened representation of a cinje chunk stream.
This has several modes of operation. If no `file` argument is given, output will be returned as a string.
The type of string will be determined by the presence of an `encoding`; if one is given the returned value is a
binary string, otherwise the native unicode representation. If a `file` is present, chunks will be written
iteratively through repeated calls to `file.write()`, and the amount of data (characters or bytes) written
returned. The type of string written will be determined by `encoding`, just as the return value is when not
writing to a file-like object. The `errors` argument is passed through when encoding.
We can highly recommend using the various stremaing IO containers available in the
[`io`](https://docs.python.org/3/library/io.html) module, though
[`tempfile`](https://docs.python.org/3/library/tempfile.html) classes are also quite useful. | entailment |
def fragment(string, name="anonymous", **context):
"""Translate a template fragment into a callable function.
**Note:** Use of this function is discouraged everywhere except tests, as no caching is implemented at this time.
Only one function may be declared, either manually, or automatically. If automatic defintition is chosen the
resulting function takes no arguments. Additional keyword arguments are passed through as global variables.
"""
if isinstance(string, bytes):
string = string.decode('utf-8')
if ": def" in string or ":def" in string:
code = string.encode('utf8').decode('cinje')
name = None
else:
code = ": def {name}\n\n{string}".format(
name = name,
string = string,
).encode('utf8').decode('cinje')
environ = dict(context)
exec(code, environ)
if name is None: # We need to dig it out of the `__tmpl__` list.
if __debug__ and not environ.get('__tmpl__', None):
raise RuntimeError("Template fragment does not contain a function: " + repr(environ.get('__tmpl__', None)) + \
"\n\n" + code)
return environ[environ['__tmpl__'][-1]] # Super secret sauce: you _can_ define more than one function...
return environ[name] | Translate a template fragment into a callable function.
**Note:** Use of this function is discouraged everywhere except tests, as no caching is implemented at this time.
Only one function may be declared, either manually, or automatically. If automatic defintition is chosen the
resulting function takes no arguments. Additional keyword arguments are passed through as global variables. | entailment |
def iterate(obj):
"""Loop over an iterable and track progress, including first and last state.
On each iteration yield an Iteration named tuple with the first and last flags, current element index, total
iterable length (if possible to acquire), and value, in that order.
for iteration in iterate(something):
iteration.value # Do something.
You can unpack these safely:
for first, last, index, total, value in iterate(something):
pass
If you want to unpack the values you are iterating across, you can by wrapping the nested unpacking in parenthesis:
for first, last, index, total, (foo, bar, baz) in iterate(something):
pass
Even if the length of the iterable can't be reliably determined this function will still capture the "last" state
of the final loop iteration. (Basically: this works with generators.)
This process is about 10x slower than simple enumeration on CPython 3.4, so only use it where you actually need to
track state. Use `enumerate()` elsewhere.
"""
global next, Iteration
next = next
Iteration = Iteration
total = len(obj) if isinstance(obj, Sized) else None
iterator = iter(obj)
first = True
last = False
i = 0
try:
value = next(iterator)
except StopIteration:
return
while True:
try:
next_value = next(iterator)
except StopIteration:
last = True
yield Iteration(first, last, i, total, value)
if last: return
value = next_value
i += 1
first = False | Loop over an iterable and track progress, including first and last state.
On each iteration yield an Iteration named tuple with the first and last flags, current element index, total
iterable length (if possible to acquire), and value, in that order.
for iteration in iterate(something):
iteration.value # Do something.
You can unpack these safely:
for first, last, index, total, value in iterate(something):
pass
If you want to unpack the values you are iterating across, you can by wrapping the nested unpacking in parenthesis:
for first, last, index, total, (foo, bar, baz) in iterate(something):
pass
Even if the length of the iterable can't be reliably determined this function will still capture the "last" state
of the final loop iteration. (Basically: this works with generators.)
This process is about 10x slower than simple enumeration on CPython 3.4, so only use it where you actually need to
track state. Use `enumerate()` elsewhere. | entailment |
def chunk(line, mapping={None: 'text', '${': 'escape', '#{': 'bless', '&{': 'args', '%{': 'format', '@{': 'json'}):
"""Chunkify and "tag" a block of text into plain text and code sections.
The first delimeter is blank to represent text sections, and keep the indexes aligned with the tags.
Values are yielded in the form (tag, text).
"""
skipping = 0 # How many closing parenthesis will we need to skip?
start = None # Starting position of current match.
last = 0
i = 0
text = line.line
while i < len(text):
if start is not None:
if text[i] == '{':
skipping += 1
elif text[i] == '}':
if skipping:
skipping -= 1
else:
yield line.clone(kind=mapping[text[start-2:start]], line=text[start:i])
start = None
last = i = i + 1
continue
elif text[i:i+2] in mapping:
if last is not None and last != i:
yield line.clone(kind=mapping[None], line=text[last:i])
last = None
start = i = i + 2
continue
i += 1
if last < len(text):
yield line.clone(kind=mapping[None], line=text[last:]) | Chunkify and "tag" a block of text into plain text and code sections.
The first delimeter is blank to represent text sections, and keep the indexes aligned with the tags.
Values are yielded in the form (tag, text). | entailment |
def prepare(self):
"""Prepare the ordered list of transformers and reset context state to initial."""
self.scope = 0
self.mapping = deque([0])
self._handler = [i() for i in sorted(self.handlers, key=lambda handler: handler.priority)] | Prepare the ordered list of transformers and reset context state to initial. | entailment |
def stream(self):
"""The workhorse of cinje: transform input lines and emit output lines.
After constructing an instance with a set of input lines iterate this property to generate the template.
"""
if 'init' not in self.flag:
root = True
self.prepare()
else:
root = False
# Track which lines were generated in response to which lines of source code.
# The end result is that there is one entry here for every line emitted, each integer representing the source
# line number that triggered it. If any lines are returned with missing line numbers, they're inferred from
# the last entry already in the list.
# Fun fact: this list is backwards; we optimize by using a deque and appending to the left edge. this updates
# the head of a linked list; the whole thing needs to be reversed to make sense.
mapping = self.mapping
for line in self.input:
handler = self.classify(line)
if line.kind == 'code' and line.stripped == 'end': # Exit the current child scope.
return
assert handler, "Unable to identify handler for line; this should be impossible!"
self.input.push(line) # Put it back so it can be consumed by the handler.
for line in handler(self): # This re-indents the code to match, if missing explicit scope.
if root: mapping.appendleft(line.number or mapping[0]) # Track source line number.
if line.scope is None:
line = line.clone(scope=self.scope)
yield line | The workhorse of cinje: transform input lines and emit output lines.
After constructing an instance with a set of input lines iterate this property to generate the template. | entailment |
def classify(self, line):
"""Identify the correct handler for a given line of input."""
for handler in self._handler:
if handler.match(self, line):
return handler | Identify the correct handler for a given line of input. | entailment |
def red(numbers):
"""Encode the deltas to reduce entropy."""
line = 0
deltas = []
for value in numbers:
deltas.append(value - line)
line = value
return b64encode(compress(b''.join(chr(i).encode('latin1') for i in deltas))).decode('latin1') | Encode the deltas to reduce entropy. | entailment |
def match(self, context, line):
"""Match code lines prefixed with a variety of keywords."""
return line.kind == 'code' and line.partitioned[0] in self._both | Match code lines prefixed with a variety of keywords. | entailment |
def _get_field_by_name(model_class, field_name):
"""
Compatible with old API of model_class._meta.get_field_by_name(field_name)
"""
field = model_class._meta.get_field(field_name)
return (
field, # field
field.model, # model
not field.auto_created or field.concrete, # direct
field.many_to_many # m2m
) | Compatible with old API of model_class._meta.get_field_by_name(field_name) | entailment |
def _get_remote_field(field):
"""
Compatible with Django 1.8~1.10 ('related' was renamed to 'remote_field')
"""
if hasattr(field, 'remote_field'):
return field.remote_field
elif hasattr(field, 'related'):
return field.related
else:
return None | Compatible with Django 1.8~1.10 ('related' was renamed to 'remote_field') | entailment |
def _get_all_field_names(model):
"""
100% compatible version of the old API of model._meta.get_all_field_names()
From: https://docs.djangoproject.com/en/1.9/ref/models/meta/#migrating-from-the-old-api
"""
return list(set(chain.from_iterable(
(field.name, field.attname) if hasattr(field, 'attname') else (field.name,)
for field in model._meta.get_fields()
# For complete backwards compatibility, you may want to exclude
# GenericForeignKey from the results.
if not (field.many_to_one and field.related_model is None)
))) | 100% compatible version of the old API of model._meta.get_all_field_names()
From: https://docs.djangoproject.com/en/1.9/ref/models/meta/#migrating-from-the-old-api | entailment |
def get_relation_fields_from_model(model_class):
""" Get related fields (m2m, FK, and reverse FK) """
relation_fields = []
all_fields_names = _get_all_field_names(model_class)
for field_name in all_fields_names:
field, model, direct, m2m = _get_field_by_name(model_class, field_name)
# get_all_field_names will return the same field
# both with and without _id. Ignore the duplicate.
if field_name[-3:] == '_id' and field_name[:-3] in all_fields_names:
continue
if m2m or not direct or _get_remote_field(field):
field.field_name_override = field_name
relation_fields += [field]
return relation_fields | Get related fields (m2m, FK, and reverse FK) | entailment |
def get_direct_fields_from_model(model_class):
""" Direct, not m2m, not FK """
direct_fields = []
all_fields_names = _get_all_field_names(model_class)
for field_name in all_fields_names:
field, model, direct, m2m = _get_field_by_name(model_class, field_name)
if direct and not m2m and not _get_remote_field(field):
direct_fields += [field]
return direct_fields | Direct, not m2m, not FK | entailment |
def get_model_from_path_string(root_model, path):
""" Return a model class for a related model
root_model is the class of the initial model
path is like foo__bar where bar is related to foo
"""
for path_section in path.split('__'):
if path_section:
try:
field, model, direct, m2m = _get_field_by_name(root_model, path_section)
except FieldDoesNotExist:
return root_model
if direct:
if _get_remote_field(field):
try:
root_model = _get_remote_field(field).parent_model()
except AttributeError:
root_model = _get_remote_field(field).model
else:
if hasattr(field, 'related_model'):
root_model = field.related_model
else:
root_model = field.model
return root_model | Return a model class for a related model
root_model is the class of the initial model
path is like foo__bar where bar is related to foo | entailment |
def get_fields(model_class, field_name='', path=''):
""" Get fields and meta data from a model
:param model_class: A django model class
:param field_name: The field name to get sub fields from
:param path: path of our field in format
field_name__second_field_name__ect__
:returns: Returns fields and meta data about such fields
fields: Django model fields
properties: Any properties the model has
path: Our new path
:rtype: dict
"""
fields = get_direct_fields_from_model(model_class)
app_label = model_class._meta.app_label
if field_name != '':
field, model, direct, m2m = _get_field_by_name(model_class, field_name)
path += field_name
path += '__'
if direct: # Direct field
try:
new_model = _get_remote_field(field).parent_model
except AttributeError:
new_model = _get_remote_field(field).model
else: # Indirect related field
new_model = field.related_model
fields = get_direct_fields_from_model(new_model)
app_label = new_model._meta.app_label
return {
'fields': fields,
'path': path,
'app_label': app_label,
} | Get fields and meta data from a model
:param model_class: A django model class
:param field_name: The field name to get sub fields from
:param path: path of our field in format
field_name__second_field_name__ect__
:returns: Returns fields and meta data about such fields
fields: Django model fields
properties: Any properties the model has
path: Our new path
:rtype: dict | entailment |
def get_related_fields(model_class, field_name, path=""):
""" Get fields for a given model """
if field_name:
field, model, direct, m2m = _get_field_by_name(model_class, field_name)
if direct:
# Direct field
try:
new_model = _get_remote_field(field).parent_model()
except AttributeError:
new_model = _get_remote_field(field).model
else:
# Indirect related field
if hasattr(field, 'related_model'): # Django>=1.8
new_model = field.related_model
else:
new_model = field.model()
path += field_name
path += '__'
else:
new_model = model_class
new_fields = get_relation_fields_from_model(new_model)
model_ct = ContentType.objects.get_for_model(new_model)
return (new_fields, model_ct, path) | Get fields for a given model | entailment |
def flush_template(context, declaration=None, reconstruct=True):
"""Emit the code needed to flush the buffer.
Will only emit the yield and clear if the buffer is known to be dirty.
"""
if declaration is None:
declaration = Line(0, '')
if {'text', 'dirty'}.issubset(context.flag):
yield declaration.clone(line='yield "".join(_buffer)')
context.flag.remove('text') # This will force a new buffer to be constructed.
context.flag.remove('dirty')
if reconstruct:
for i in ensure_buffer(context):
yield i
if declaration.stripped == 'yield':
yield declaration | Emit the code needed to flush the buffer.
Will only emit the yield and clear if the buffer is known to be dirty. | entailment |
def _optimize(self, context, argspec):
"""Inject speedup shortcut bindings into the argument specification for a function.
This assigns these labels to the local scope, avoiding a cascade through to globals(), saving time.
This also has some unfortunate side-effects for using these sentinels in argument default values!
"""
argspec = argspec.strip()
optimization = ", ".join(i + "=" + i for i in self.OPTIMIZE)
split = None
prefix = ''
suffix = ''
if argspec:
matches = list(self.STARARGS.finditer(argspec))
if matches:
split = matches[-1].span()[1] # Inject after, a la "*args>_<", as we're positional-only arguments.
if split != len(argspec):
prefix = ', ' if argspec[split] == ',' else ''
suffix = '' if argspec[split] == ',' else ', '
else: # Ok, we can do this a different way…
matches = list(self.STARSTARARGS.finditer(argspec))
prefix = ', *, '
suffix = ', '
if matches:
split = matches[-1].span()[0] # Inject before, a la ">_<**kwargs". We're positional-only arguments.
if split == 0:
prefix = '*, '
else:
suffix = ''
else:
split = len(argspec)
suffix = ''
else:
prefix = '*, '
if split is None:
return prefix + optimization + suffix
return argspec[:split] + prefix + optimization + suffix + argspec[split:] | Inject speedup shortcut bindings into the argument specification for a function.
This assigns these labels to the local scope, avoiding a cascade through to globals(), saving time.
This also has some unfortunate side-effects for using these sentinels in argument default values! | entailment |
def _can_change_or_view(model, user):
""" Return True iff `user` has either change or view permission
for `model`.
"""
model_name = model._meta.model_name
app_label = model._meta.app_label
can_change = user.has_perm(app_label + '.change_' + model_name)
can_view = user.has_perm(app_label + '.view_' + model_name)
return can_change or can_view | Return True iff `user` has either change or view permission
for `model`. | entailment |
def report_to_list(queryset, display_fields, user):
""" Create list from a report with all data filtering.
queryset: initial queryset to generate results
display_fields: list of field references or DisplayField models
user: requesting user
Returns list, message in case of issues.
"""
model_class = queryset.model
objects = queryset
message = ""
if not _can_change_or_view(model_class, user):
return [], 'Permission Denied'
# Convert list of strings to DisplayField objects.
new_display_fields = []
for display_field in display_fields:
field_list = display_field.split('__')
field = field_list[-1]
path = '__'.join(field_list[:-1])
if path:
path += '__' # Legacy format to append a __ here.
df = DisplayField(path, field)
new_display_fields.append(df)
display_fields = new_display_fields
# Display Values
display_field_paths = []
for i, display_field in enumerate(display_fields):
model = get_model_from_path_string(model_class, display_field.path)
if not model or _can_change_or_view(model, user):
display_field_key = display_field.path + display_field.field
display_field_paths.append(display_field_key)
else:
message += 'Error: Permission denied on access to {0}.'.format(
display_field.name
)
values_list = objects.values_list(*display_field_paths)
values_and_properties_list = [list(row) for row in values_list]
return values_and_properties_list, message | Create list from a report with all data filtering.
queryset: initial queryset to generate results
display_fields: list of field references or DisplayField models
user: requesting user
Returns list, message in case of issues. | entailment |
def list_to_workbook(data, title='report', header=None, widths=None):
""" Create just a openpxl workbook from a list of data """
wb = Workbook()
title = re.sub(r'\W+', '', title)[:30]
if isinstance(data, dict):
i = 0
for sheet_name, sheet_data in data.items():
if i > 0:
wb.create_sheet()
ws = wb.worksheets[i]
build_sheet(
sheet_data, ws, sheet_name=sheet_name, header=header)
i += 1
else:
ws = wb.worksheets[0]
build_sheet(data, ws, header=header, widths=widths)
return wb | Create just a openpxl workbook from a list of data | entailment |
def build_xlsx_response(wb, title="report"):
""" Take a workbook and return a xlsx file response """
title = generate_filename(title, '.xlsx')
myfile = BytesIO()
myfile.write(save_virtual_workbook(wb))
response = HttpResponse(
myfile.getvalue(),
content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
response['Content-Disposition'] = 'attachment; filename=%s' % title
response['Content-Length'] = myfile.tell()
return response | Take a workbook and return a xlsx file response | entailment |
def list_to_xlsx_response(data, title='report', header=None,
widths=None):
""" Make 2D list into a xlsx response for download
data can be a 2d array or a dict of 2d arrays
like {'sheet_1': [['A1', 'B1']]}
"""
wb = list_to_workbook(data, title, header, widths)
return build_xlsx_response(wb, title=title) | Make 2D list into a xlsx response for download
data can be a 2d array or a dict of 2d arrays
like {'sheet_1': [['A1', 'B1']]} | entailment |
def list_to_csv_response(data, title='report', header=None, widths=None):
""" Make 2D list into a csv response for download data.
"""
response = HttpResponse(content_type="text/csv; charset=UTF-8")
cw = csv.writer(response)
for row in chain([header] if header else [], data):
cw.writerow([force_text(s).encode(response.charset) for s in row])
return response | Make 2D list into a csv response for download data. | entailment |
def wrap(scope, lines, format=BARE_FORMAT):
"""Wrap a stream of lines in armour.
Takes a stream of lines, for example, the following single line:
Line(1, "Lorem ipsum dolor.")
Or the following multiple lines:
Line(1, "Lorem ipsum")
Line(2, "dolor")
Line(3, "sit amet.")
Provides a generator of wrapped lines. For a single line, the following format is utilized:
{format.single.prefix}{line.stripped}{format.single.suffix}
In the above multi-line example, the following format would be utilized:
{format.multiple.prefix}{line[1].stripped}{format.intra.suffix}
{format.intra.prefix}{line[2].stripped}{format.intra.suffix}
{format.intra.prefix}{line[3].stripped}{format.multiple.suffix}
"""
for line in iterate(lines):
prefix = suffix = ''
if line.first and line.last:
prefix = format.single.prefix
suffix = format.single.suffix
else:
prefix = format.multiple.prefix if line.first else format.intra.prefix
suffix = format.multiple.suffix if line.last else format.intra.suffix
yield line.value.clone(line=prefix + line.value.stripped + suffix, scope=scope + (0 if line.first else format.indent)) | Wrap a stream of lines in armour.
Takes a stream of lines, for example, the following single line:
Line(1, "Lorem ipsum dolor.")
Or the following multiple lines:
Line(1, "Lorem ipsum")
Line(2, "dolor")
Line(3, "sit amet.")
Provides a generator of wrapped lines. For a single line, the following format is utilized:
{format.single.prefix}{line.stripped}{format.single.suffix}
In the above multi-line example, the following format would be utilized:
{format.multiple.prefix}{line[1].stripped}{format.intra.suffix}
{format.intra.prefix}{line[2].stripped}{format.intra.suffix}
{format.intra.prefix}{line[3].stripped}{format.multiple.suffix} | entailment |
def gather(input):
"""Collect contiguous lines of text, preserving line numbers."""
try:
line = input.next()
except StopIteration:
return
lead = True
buffer = []
# Gather contiguous (uninterrupted) lines of template text.
while line.kind == 'text':
value = line.line.rstrip().rstrip('\\') + ('' if line.continued else '\n')
if lead and line.stripped:
yield Line(line.number, value)
lead = False
elif not lead:
if line.stripped:
for buf in buffer:
yield buf
buffer = []
yield Line(line.number, value)
else:
buffer.append(Line(line.number, value))
try:
line = input.next()
except StopIteration:
line = None
break
if line:
input.push(line) | Collect contiguous lines of text, preserving line numbers. | entailment |
def process(self, context, lines):
"""Chop up individual lines into static and dynamic parts.
Applies light optimizations, such as empty chunk removal, and calls out to other methods to process different
chunk types.
The processor protocol here requires the method to accept values by yielding resulting lines while accepting
sent chunks. Deferral of multiple chunks is possible by yielding None. The processor will be sent None to
be given a chance to yield a final line and perform any clean-up.
"""
handler = None
for line in lines:
for chunk in chunk_(line):
if 'strip' in context.flag:
chunk.line = chunk.stripped
if not chunk.line: continue # Eliminate empty chunks, i.e. trailing text segments, ${}, etc.
if not handler or handler[0] != chunk.kind:
if handler:
try:
result = next(handler[1])
except StopIteration:
result = None
if result: yield result
handler = getattr(self, 'process_' + chunk.kind, self.process_generic)(chunk.kind, context)
handler = (chunk.kind, handler)
try:
next(handler[1]) # We fast-forward to the first yield.
except StopIteration:
return
result = handler[1].send(chunk) # Send the handler the next contiguous chunk.
if result: yield result
if __debug__: # In development mode we skip the contiguous chunk compaction optimization.
handler = (None, handler[1])
# Clean up the final iteration.
if handler:
try:
result = next(handler[1])
except StopIteration:
return
if result: yield result | Chop up individual lines into static and dynamic parts.
Applies light optimizations, such as empty chunk removal, and calls out to other methods to process different
chunk types.
The processor protocol here requires the method to accept values by yielding resulting lines while accepting
sent chunks. Deferral of multiple chunks is possible by yielding None. The processor will be sent None to
be given a chance to yield a final line and perform any clean-up. | entailment |
def process_text(self, kind, context):
"""Combine multiple lines of bare text and emit as a Python string literal."""
result = None
while True:
chunk = yield None
if chunk is None:
if result:
yield result.clone(line=repr(result.line))
return
if not result:
result = chunk
continue
result.line += chunk.line | Combine multiple lines of bare text and emit as a Python string literal. | entailment |
def process_generic(self, kind, context):
"""Transform otherwise unhandled kinds of chunks by calling an underscore prefixed function by that name."""
result = None
while True:
chunk = yield result
if chunk is None:
return
result = chunk.clone(line='_' + kind + '(' + chunk.line + ')') | Transform otherwise unhandled kinds of chunks by calling an underscore prefixed function by that name. | entailment |
def process_format(self, kind, context):
"""Handle transforming format string + arguments into Python code."""
result = None
while True:
chunk = yield result
if chunk is None:
return
# We need to split the expression defining the format string from the values to pass when formatting.
# We want to allow any Python expression, so we'll need to piggyback on Python's own parser in order
# to exploit the currently available syntax. Apologies, this is probably the scariest thing in here.
split = -1
line = chunk.line
try:
ast.parse(line)
except SyntaxError as e: # We expect this, and catch it. It'll have exploded after the first expr.
split = line.rfind(' ', 0, e.offset)
result = chunk.clone(line='_bless(' + line[:split].rstrip() + ').format(' + line[split:].lstrip() + ')') | Handle transforming format string + arguments into Python code. | entailment |
def find_player(self, username: str = None):
"""Find the :class:`~.Player` with the given properties
Returns the player whose attributes match the given properties, or
``None`` if no match is found.
:param username: The username of the Player
"""
if username != None:
return next((player for player in self.players if player.name == username), None)
else:
return None | Find the :class:`~.Player` with the given properties
Returns the player whose attributes match the given properties, or
``None`` if no match is found.
:param username: The username of the Player | entailment |
def find_team(self, color: str = None):
"""Find the :class:`~.Team` with the given properties
Returns the team whose attributes match the given properties, or
``None`` if no match is found.
:param color: The :class:`~.Team.Color` of the Team
"""
if color != None:
if color is Team.Color.BLUE:
return self.blue_team
else:
return self.orange_team
else:
return None | Find the :class:`~.Team` with the given properties
Returns the team whose attributes match the given properties, or
``None`` if no match is found.
:param color: The :class:`~.Team.Color` of the Team | entailment |
def _geocode(self, pq):
"""
:arg PlaceQuery pq: PlaceQuery object to use for geocoding
:returns: list of location Candidates
"""
#: List of desired output fields
#: See `ESRI docs <https://developers.arcgis.com/rest/geocode/api-reference/geocoding-geocode-addresses.htm>_` for details
outFields = ('Loc_name',
# 'Shape',
'Score',
'Match_addr', # based on address standards for the country
# 'Address', # returned by default
# 'Country' # 3-digit ISO 3166-1 code for a country. Example: Canada = "CAN"
# 'Admin',
# 'DepAdmin',
# 'SubAdmin',
# 'Locality',
# 'Postal',
# 'PostalExt',
'Addr_type',
# 'Type',
# 'Rank',
'AddNum',
'StPreDir',
'StPreType',
'StName',
'StType',
'StDir',
# 'Side',
# 'AddNumFrom',
# 'AddNumTo',
# 'AddBldg',
'City',
'Subregion',
'Region',
'Postal',
'Country',
# 'Ymax',
# 'Ymin',
# 'Xmin',
# 'Xmax',
# 'X',
# 'Y',
'DisplayX',
'DisplayY',
# 'LangCode',
# 'Status',
)
outFields = ','.join(outFields)
query = dict(f='json', # default HTML. Other options are JSON and KMZ.
outFields=outFields,
# outSR=WKID, defaults to 4326
maxLocations=20, # default 1; max is 20
)
# Postal-code only searches work in the single-line but not multipart geocoder
# Remember that with the default postprocessors, postcode-level results will be eliminated
if pq.query == pq.address == '' and pq.postal != '':
pq.query = pq.postal
if pq.query == '': # multipart
query = dict(query,
Address=pq.address, # commonly represents the house number and street name of a complete address
Neighborhood=pq.neighborhood,
City=pq.city,
Subregion=pq.subregion,
Region=pq.state,
Postal=pq.postal,
# PostalExt=
CountryCode=pq.country, # full country name or ISO 3166-1 2- or 3-digit country code
)
else: # single-line
magic_key = pq.key if hasattr(pq, 'key') else ''
query = dict(query,
singleLine=pq.query, # This can be a street address, place name, postal code, or POI.
sourceCountry=pq.country, # full country name or ISO 3166-1 2- or 3-digit country code
)
if magic_key:
query['magicKey'] = magic_key # This is a lookup key returned from the suggest endpoint.
if pq.bounded and pq.viewbox is not None:
query = dict(query, searchExtent=pq.viewbox.to_esri_wgs_json())
if self._authenticated:
if self._token is None or self._token_expiration < datetime.utcnow():
expiration = timedelta(hours=2)
self._token = self.get_token(expiration)
self._token_expiration = datetime.utcnow() + expiration
query['token'] = self._token
if getattr(pq, 'for_storage', False):
query['forStorage'] = 'true'
endpoint = self._endpoint + '/findAddressCandidates'
response_obj = self._get_json_obj(endpoint, query)
returned_candidates = [] # this will be the list returned
try:
locations = response_obj['candidates']
for location in locations:
c = Candidate()
attributes = location['attributes']
c.match_addr = attributes['Match_addr']
c.locator = attributes['Loc_name']
c.locator_type = attributes['Addr_type']
c.score = attributes['Score']
c.x = attributes['DisplayX'] # represents the actual location of the address.
c.y = attributes['DisplayY']
c.wkid = response_obj['spatialReference']['wkid']
c.geoservice = self.__class__.__name__
# Optional address component fields.
for in_key, out_key in [('City', 'match_city'), ('Subregion', 'match_subregion'),
('Region', 'match_region'), ('Postal', 'match_postal'),
('Country', 'match_country')]:
setattr(c, out_key, attributes.get(in_key, ''))
setattr(c, 'match_streetaddr', self._street_addr_from_response(attributes))
returned_candidates.append(c)
except KeyError:
pass
return returned_candidates | :arg PlaceQuery pq: PlaceQuery object to use for geocoding
:returns: list of location Candidates | entailment |
def _street_addr_from_response(self, attributes):
"""Construct a street address (no city, region, etc.) from a geocoder response.
:param attributes: A dict of address attributes as returned by the Esri geocoder.
"""
# The exact ordering of the address component fields that should be
# used to reconstruct the full street address is not specified in the
# Esri documentation, but the examples imply that it is this.
ordered_fields = ['AddNum', 'StPreDir', 'StPreType', 'StName', 'StType', 'StDir']
result = []
for field in ordered_fields:
result.append(attributes.get(field, ''))
if any(result):
return ' '.join([s for s in result if s]) # Filter out empty strings.
else:
return '' | Construct a street address (no city, region, etc.) from a geocoder response.
:param attributes: A dict of address attributes as returned by the Esri geocoder. | entailment |
def get_token(self, expires=None):
"""
:param expires: The time until the returned token expires.
Must be an instance of :class:`datetime.timedelta`.
If not specified, the token will expire in 2 hours.
:returns: A token suitable for use with the Esri geocoding API
"""
endpoint = 'https://www.arcgis.com/sharing/rest/oauth2/token/'
query = {'client_id': self._client_id,
'client_secret': self._client_secret,
'grant_type': 'client_credentials'}
if expires is not None:
if not isinstance(expires, timedelta):
raise Exception('If expires is provided it must be a timedelta instance')
query['expiration'] = int(expires.total_seconds() / 60)
response_obj = self._get_json_obj(endpoint, query, is_post=True)
return response_obj['access_token'] | :param expires: The time until the returned token expires.
Must be an instance of :class:`datetime.timedelta`.
If not specified, the token will expire in 2 hours.
:returns: A token suitable for use with the Esri geocoding API | entailment |
def process(self, pq):
"""
:arg PlaceQuery pq: PlaceQuery instance
:returns: PlaceQuery instance with truncated address range / number
"""
pq.query = self.replace_range(pq.query)
pq.address = self.replace_range(pq.address)
return pq | :arg PlaceQuery pq: PlaceQuery instance
:returns: PlaceQuery instance with truncated address range / number | entailment |
def process(self, pq):
"""
:arg PlaceQuery pq: PlaceQuery instance
:returns: PlaceQuery instance with :py:attr:`query`
converted to individual elements
"""
if pq.query != '':
postcode = address = city = '' # define the vars we'll use
# global regex postcode search, pop off last result
postcode_matches = self.re_UK_postcode.findall(pq.query)
if len(postcode_matches) > 0:
postcode = postcode_matches[-1]
query_parts = [part.strip() for part in pq.query.split(',')]
if postcode is not '' and re.search(postcode, query_parts[0]):
# if postcode is in the first part of query_parts, there are probably no commas
# get just the part before the postcode
part_before_postcode = query_parts[0].split(postcode)[0].strip()
if self.re_blank.search(part_before_postcode) is None:
address = part_before_postcode
else:
address = query_parts[0] # perhaps it isn't really a postcode (apt num, etc)
else:
address = query_parts[0] # no postcode to worry about
for part in query_parts[1:]:
part = part.strip()
if postcode is not '' and re.search(postcode, part) is not None:
part = part.replace(postcode, '').strip() # if postcode is in part, remove it
if self.re_unit_numbered.search(part) is not None:
# test to see if part is secondary address, like "Ste 402"
address = self._comma_join(address, part)
elif self.re_unit_not_numbered.search(part) is not None:
# ! might cause problems if 'Lower' or 'Upper' is in the city name
# test to see if part is secondary address, like "Basement"
address = self._comma_join(address, part)
else:
city = self._comma_join(city, part) # it's probably a city (or "City, County")
# set pq parts if they aren't already set (we don't want to overwrite explicit params)
pq.postal = pq.postal or postcode
pq.address = pq.address or address
pq.city = pq.city or city
return pq | :arg PlaceQuery pq: PlaceQuery instance
:returns: PlaceQuery instance with :py:attr:`query`
converted to individual elements | entailment |
def process(self, pq):
"""
:arg PlaceQuery pq: PlaceQuery instance
:returns: modified PlaceQuery, or ``False`` if country is not acceptable.
"""
# Map country, but don't let map overwrite
if pq.country not in self.acceptable_countries and pq.country in self.country_map:
pq.country = self.country_map[pq.country]
if pq.country != '' and \
self.acceptable_countries != [] and \
pq.country not in self.acceptable_countries:
return False
return pq | :arg PlaceQuery pq: PlaceQuery instance
:returns: modified PlaceQuery, or ``False`` if country is not acceptable. | entailment |
def process(self, pq):
"""
:arg PlaceQuery pq: PlaceQuery instance
:returns: One of the three following values:
* unmodified PlaceQuery instance if pq.country is not empty
* PlaceQuery instance with pq.country changed to default country.
* ``False`` if pq.country is empty and self.default_country == ''.
"""
if pq.country.strip() == '':
if self.default_country == '':
return False
else:
pq.country = self.default_country
return pq | :arg PlaceQuery pq: PlaceQuery instance
:returns: One of the three following values:
* unmodified PlaceQuery instance if pq.country is not empty
* PlaceQuery instance with pq.country changed to default country.
* ``False`` if pq.country is empty and self.default_country == ''. | entailment |
def _street_addr_from_response(self, match):
"""Construct a street address (no city, region, etc.) from a geocoder response.
:param match: The match object returned by the geocoder.
"""
# Same caveat as above regarding the ordering of these fields; the
# documentation is not explicit about the correct ordering for
# reconstructing a full address, but implies that this is the ordering.
ordered_fields = ['preQualifier', 'preDirection', 'preType', 'streetName',
'suffixType', 'suffixDirection', 'suffixQualifier']
result = []
# The address components only contain a from and to address, not the
# actual number of the address that was matched, so we need to cheat a
# bit and extract it from the full address string. This is likely to
# miss some edge cases (hopefully only a few since this is a US-only
# geocoder).
addr_num_re = re.match(r'([0-9]+)', match['matchedAddress'])
if not addr_num_re: # Give up
return ''
result.append(addr_num_re.group(0))
for field in ordered_fields:
result.append(match['addressComponents'].get(field, ''))
if any(result):
return ' '.join([s for s in result if s]) # Filter out empty strings.
else:
return '' | Construct a street address (no city, region, etc.) from a geocoder response.
:param match: The match object returned by the geocoder. | entailment |
def copy(self, *args, **kwargs):
"""
Copy the request and environment object.
This only does a shallow copy, except of wsgi.input
"""
self.make_body_seekable()
env = self.environ.copy()
new_req = self.__class__(env, *args, **kwargs)
new_req.copy_body()
new_req.identity = self.identity
return new_req | Copy the request and environment object.
This only does a shallow copy, except of wsgi.input | entailment |
def add_source(self, source):
"""
Add a geocoding service to this instance.
"""
geocode_service = self._get_service_by_name(source[0])
self._sources.append(geocode_service(**source[1])) | Add a geocoding service to this instance. | entailment |
def remove_source(self, source):
"""
Remove a geocoding service from this instance.
"""
geocode_service = self._get_service_by_name(source[0])
self._sources.remove(geocode_service(**source[1])) | Remove a geocoding service from this instance. | entailment |
def set_sources(self, sources):
"""
Creates GeocodeServiceConfigs from each str source
"""
if len(sources) == 0:
raise Exception('Must declare at least one source for a geocoder')
self._sources = []
for source in sources: # iterate through a list of sources
self.add_source(source) | Creates GeocodeServiceConfigs from each str source | entailment |
def geocode(self, pq, waterfall=None, force_stats_logging=False):
"""
:arg PlaceQuery pq: PlaceQuery object (required).
:arg bool waterfall: Boolean set to True if all geocoders listed should
be used to find results, instead of stopping after
the first geocoding service with valid candidates
(defaults to self.waterfall).
:arg bool force_stats_logging: Raise exception if stats logging fails (default False).
:returns: Returns a dictionary including:
* candidates - list of Candidate objects
* upstream_response_info - list of UpstreamResponseInfo objects
"""
waterfall = self.waterfall if waterfall is None else waterfall
if type(pq) in (str, str):
pq = PlaceQuery(pq)
processed_pq = copy.copy(pq)
for p in self._preprocessors: # apply universal address preprocessing
processed_pq = p.process(processed_pq)
upstream_response_info_list = []
processed_candidates = []
for gs in self._sources: # iterate through each GeocodeService
candidates, upstream_response_info = gs.geocode(processed_pq)
if upstream_response_info is not None:
upstream_response_info_list.append(upstream_response_info)
processed_candidates += candidates # merge lists
if waterfall is False and len(processed_candidates) > 0:
break # if >= 1 good candidate, don't go to next geocoder
for p in self._postprocessors: # apply univ. candidate postprocessing
if processed_candidates == []:
break # avoid post-processing empty list
processed_candidates = p.process(processed_candidates)
result = dict(candidates=processed_candidates,
upstream_response_info=upstream_response_info_list)
stats_dict = self.convert_geocode_result_to_nested_dicts(result)
stats_dict = dict(stats_dict, original_pq=pq.__dict__)
try:
stats_logger.info(stats_dict)
except Exception as exception:
logger.error('Encountered exception while logging stats %s:\n%s', stats_dict, exception)
if force_stats_logging:
raise exception
return result | :arg PlaceQuery pq: PlaceQuery object (required).
:arg bool waterfall: Boolean set to True if all geocoders listed should
be used to find results, instead of stopping after
the first geocoding service with valid candidates
(defaults to self.waterfall).
:arg bool force_stats_logging: Raise exception if stats logging fails (default False).
:returns: Returns a dictionary including:
* candidates - list of Candidate objects
* upstream_response_info - list of UpstreamResponseInfo objects | entailment |
def _apply_param_actions(self, params, schema_params):
"""Traverse a schema and perform the updates it describes to params."""
for key, val in schema_params.items():
if key not in params:
continue
if isinstance(val, dict):
self._apply_param_actions(params[key], schema_params[key])
elif isinstance(val, ResourceId):
resource_id = val
# Callers can provide ints as ids.
# We normalize them to strings so that actions don't get confused.
params[key] = str(params[key])
resource_id.action(params, schema_params, key,
resource_id, self.state, self.options)
else:
logger.error("Invalid value in schema params: %r. schema_params: %r and params: %r",
val, schema_params, params) | Traverse a schema and perform the updates it describes to params. | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.