sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
|---|---|---|
def fetch(self, from_time, until_time=None):
"""
This method fetch data from the database according to the period
given
fetch(path, fromTime, untilTime=None)
fromTime is an datetime
untilTime is also an datetime, but defaults to now.
Returns a tuple of (timeInfo, valueList)
where timeInfo is itself a tuple of (fromTime, untilTime, step)
Returns None if no data can be returned
"""
until_time = until_time or datetime.now()
time_info, values = whisper.fetch(self.path,
from_time.strftime('%s'),
until_time.strftime('%s'))
# build up a list of (timestamp, value)
start_time, end_time, step = time_info
current = start_time
times = []
while current <= end_time:
times.append(current)
current += step
return zip(times, values)
|
This method fetch data from the database according to the period
given
fetch(path, fromTime, untilTime=None)
fromTime is an datetime
untilTime is also an datetime, but defaults to now.
Returns a tuple of (timeInfo, valueList)
where timeInfo is itself a tuple of (fromTime, untilTime, step)
Returns None if no data can be returned
|
entailment
|
def build_list_result(results, xml):
"""
构建带翻页的列表
:param results: 已获取的数据列表
:param xml: 原始页面xml
:return: {'results': list, 'count': int, 'next_start': int|None}
如果count与results长度不同,则有更多
如果next_start不为None,则可以到下一页
"""
xml_count = xml.xpath('//div[@class="paginator"]/span[@class="count"]/text()')
xml_next = xml.xpath('//div[@class="paginator"]/span[@class="next"]/a/@href')
count = int(re.search(r'\d+', xml_count[0]).group()) if xml_count else len(results)
next_start = int(re.search(r'start=(\d+)', xml_next[0]).groups()[0]) if xml_next else None
return {'results': results, 'count': count, 'next_start': next_start}
|
构建带翻页的列表
:param results: 已获取的数据列表
:param xml: 原始页面xml
:return: {'results': list, 'count': int, 'next_start': int|None}
如果count与results长度不同,则有更多
如果next_start不为None,则可以到下一页
|
entailment
|
def ADCS(self, params):
"""
ADCS [Ra,] Rb, Rc
Add Rb and Rc + the carry bit and store the result in Ra
Ra, Rb, and Rc must be low registers
if Ra is omitted, then it is assumed to be Rb
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
self.check_arguments(low_registers=(Ra, Rc))
self.match_first_two_parameters(Ra, Rb)
# ADCS Ra, Ra, Rb
def ADCS_func():
# TODO need to rethink the set_NZCV with the C flag
oper_1 = self.register[Ra]
oper_2 = self.register[Rc]
self.register[Ra] = oper_1 + oper_2
self.register[Ra] += 1 if self.is_C_set() else 0
self.set_NZCV_flags(oper_1, oper_2, self.register[Ra], 'add')
return ADCS_func
|
ADCS [Ra,] Rb, Rc
Add Rb and Rc + the carry bit and store the result in Ra
Ra, Rb, and Rc must be low registers
if Ra is omitted, then it is assumed to be Rb
|
entailment
|
def ADD(self, params):
"""
ADD [Rx,] Ry, [Rz, PC]
ADD [Rx,] [SP, PC], #imm10_4
ADD [SP,] SP, #imm9_4
Add Ry and Rz and store the result in Rx
Rx, Ry, and Rz can be any register
If Rx is omitted, then it is assumed to be Ry
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
# TODO can we have ADD SP, #imm9_4?
try:
Rx, Ry, Rz = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Ry, Rz = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Rx = Ry
if self.is_register(Rz):
# ADD Rx, Ry, Rz
self.check_arguments(any_registers=(Rx, Ry, Rz))
if Rx != Ry:
raise iarm.exceptions.RuleError("Second parameter {} does not equal first parameter {}". format(Ry, Rx))
def ADD_func():
self.register[Rx] = self.register[Ry] + self.register[Rz]
else:
if Rx == 'SP':
# ADD SP, SP, #imm9_4
self.check_arguments(imm9_4=(Rz,))
if Rx != Ry:
raise iarm.exceptions.RuleError("Second parameter {} is not SP".format(Ry))
else:
# ADD Rx, [SP, PC], #imm10_4
self.check_arguments(any_registers=(Rx,), imm10_4=(Rz,))
if Ry not in ('SP', 'PC'):
raise iarm.exceptions.RuleError("Second parameter {} is not SP or PC".format(Ry))
def ADD_func():
self.register[Rx] = self.register[Ry] + self.convert_to_integer(Rz[1:])
return ADD_func
|
ADD [Rx,] Ry, [Rz, PC]
ADD [Rx,] [SP, PC], #imm10_4
ADD [SP,] SP, #imm9_4
Add Ry and Rz and store the result in Rx
Rx, Ry, and Rz can be any register
If Rx is omitted, then it is assumed to be Ry
|
entailment
|
def CMN(self, params):
"""
CMN Ra, Rb
Add the two registers and set the NZCV flags
The result is discarded
Ra and Rb must be low registers
"""
Ra, Rb = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(low_registers=(Ra, Rb))
# CMN Ra, Rb
def CMN_func():
self.set_NZCV_flags(self.register[Ra], self.register[Rb],
self.register[Ra] + self.register[Rb], 'add')
return CMN_func
|
CMN Ra, Rb
Add the two registers and set the NZCV flags
The result is discarded
Ra and Rb must be low registers
|
entailment
|
def CMP(self, params):
"""
CMP Rm, Rn
CMP Rm, #imm8
Subtract Rn or imm8 from Rm, set the NZCV flags, and discard the result
Rm and Rn can be R0-R14
"""
Rm, Rn = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
if self.is_register(Rn):
# CMP Rm, Rn
self.check_arguments(R0_thru_R14=(Rm, Rn))
def CMP_func():
self.set_NZCV_flags(self.register[Rm], self.register[Rn],
self.register[Rm] - self.register[Rn], 'sub')
else:
# CMP Rm, #imm8
self.check_arguments(R0_thru_R14=(Rm,), imm8=(Rn,))
def CMP_func():
tmp = self.convert_to_integer(Rn[1:])
self.set_NZCV_flags(self.register[Rm], tmp,
self.register[Rm] - tmp, 'sub')
return CMP_func
|
CMP Rm, Rn
CMP Rm, #imm8
Subtract Rn or imm8 from Rm, set the NZCV flags, and discard the result
Rm and Rn can be R0-R14
|
entailment
|
def MULS(self, params):
"""
MULS Ra, Rb, Ra
Multiply Rb and Ra together and store the result in Ra.
Set the NZ flags.
Ra and Rb must be low registers
The first and last operand must be the same register
"""
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(low_registers=(Ra, Rb, Rc))
if Ra != Rc:
raise iarm.exceptions.RuleError("Third parameter {} is not the same as the first parameter {}".format(Rc, Ra))
# MULS Ra, Rb, Ra
def MULS_func():
self.register[Ra] = self.register[Rb] * self.register[Rc]
self.set_NZ_flags(self.register[Ra])
return MULS_func
|
MULS Ra, Rb, Ra
Multiply Rb and Ra together and store the result in Ra.
Set the NZ flags.
Ra and Rb must be low registers
The first and last operand must be the same register
|
entailment
|
def RSBS(self, params):
"""
RSBS [Ra,] Rb, #0
Subtract Rb from zero (0 - Rb) and store the result in Ra
Set the NZCV flags
Ra and Rb must be low registers
if Ra is omitted, then it is assumed to be Rb
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
self.check_arguments(low_registers=(Ra, Rb))
if Rc != '#0':
raise iarm.exceptions.RuleError("Third parameter {} is not #0".format(Rc))
# RSBS Ra, Rb, #0
def RSBS_func():
oper_2 = self.register[Rb]
self.register[Ra] = 0 - self.register[Rb]
self.set_NZCV_flags(0, oper_2, self.register[Ra], 'sub')
return RSBS_func
|
RSBS [Ra,] Rb, #0
Subtract Rb from zero (0 - Rb) and store the result in Ra
Set the NZCV flags
Ra and Rb must be low registers
if Ra is omitted, then it is assumed to be Rb
|
entailment
|
def SUB(self, params):
"""
SUB [SP,] SP, #imm9_4
Subtract an immediate from the Stack Pointer
The first SP is optional
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
self.check_arguments(imm9_4=(Rc,))
if Ra != 'SP':
raise iarm.exceptions.RuleError("First parameter {} is not equal to SP".format(Ra))
if Rb != 'SP':
raise iarm.exceptions.RuleError("Second parameter {} is not equal to SP".format(Rb))
# SUB SP, SP, #imm9_4
def SUB_func():
self.register[Ra] = self.register[Rb] - self.convert_to_integer(Rc[1:])
return SUB_func
|
SUB [SP,] SP, #imm9_4
Subtract an immediate from the Stack Pointer
The first SP is optional
|
entailment
|
def SUBS(self, params):
"""
SUBS [Ra,] Rb, Rc
SUBS [Ra,] Rb, #imm3
SUBS [Ra,] Ra, #imm8
Subtract Rc or an immediate from Rb and store the result in Ra
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
if self.is_register(Rc):
# SUBS Ra, Rb, Rc
self.check_arguments(low_registers=(Ra, Rb, Rc))
def SUBS_func():
oper_1 = self.register[Rb]
oper_2 = self.register[Rc]
self.register[Ra] = self.register[Rb] - self.register[Rc]
self.set_NZCV_flags(oper_1, oper_2, self.register[Ra], 'sub')
else:
if Ra == Rb:
# SUBS Ra, Ra, #imm8
self.check_arguments(low_registers=(Ra,), imm8=(Rc,))
def SUBS_func():
oper_1 = self.register[Ra]
self.register[Ra] = self.register[Ra] - self.convert_to_integer(Rc[1:])
self.set_NZCV_flags(oper_1, self.convert_to_integer(Rc[1:]), self.register[Ra], 'sub')
else:
# SUBS Ra, Rb, #imm3
self.check_arguments(low_registers=(Ra, Rb), imm3=(Rc,))
def SUBS_func():
oper_1 = self.register[Rb]
self.register[Ra] = self.register[Rb] - self.convert_to_integer(Rc[1:])
self.set_NZCV_flags(oper_1, self.convert_to_integer(Rc[1:]), self.register[Ra], 'sub')
return SUBS_func
|
SUBS [Ra,] Rb, Rc
SUBS [Ra,] Rb, #imm3
SUBS [Ra,] Ra, #imm8
Subtract Rc or an immediate from Rb and store the result in Ra
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
|
entailment
|
def initialize(template, service_name, environment='dev'):
"""Adds SERVICE_NAME, SERVICE_ENVIRONMENT, and DEFAULT_TAGS to the template
:param template:
:param service_name:
:param environment:
:return:
"""
template.SERVICE_NAME = os.getenv('SERVICE_NAME', service_name)
template.SERVICE_ENVIRONMENT = os.getenv('ENV', environment).lower()
template.DEFAULT_TAGS = troposphere.Tags(**{
'service-name': template.SERVICE_NAME,
'environment': template.SERVICE_ENVIRONMENT
})
template.add_version("2010-09-09")
template.add_description("Stack for %s microservice" % service_name)
|
Adds SERVICE_NAME, SERVICE_ENVIRONMENT, and DEFAULT_TAGS to the template
:param template:
:param service_name:
:param environment:
:return:
|
entailment
|
def get_dist(dist_name, lookup_dirs=None):
"""Get dist for installed version of dist_name avoiding pkg_resources cache
"""
# note: based on pip/utils/__init__.py, get_installed_version(...)
# Create a requirement that we'll look for inside of setuptools.
req = pkg_resources.Requirement.parse(dist_name)
# We want to avoid having this cached, so we need to construct a new
# working set each time.
if lookup_dirs is None:
working_set = pkg_resources.WorkingSet()
else:
working_set = pkg_resources.WorkingSet(lookup_dirs)
# Get the installed distribution from our working set
return working_set.find(req)
|
Get dist for installed version of dist_name avoiding pkg_resources cache
|
entailment
|
def get_package_versions(package):
"""Get the package version information (=SetuptoolsVersion) which is
comparable.
note: we use the pip list_command implementation for this
:param package: name of the package
:return: installed version, latest available version
"""
list_command = ListCommand()
options, args = list_command.parse_args([])
packages = [get_dist(package)]
dists = list_command.iter_packages_latest_infos(packages, options)
try:
dist = next(dists)
return dist.parsed_version, dist.latest_version
except StopIteration:
return None, None
|
Get the package version information (=SetuptoolsVersion) which is
comparable.
note: we use the pip list_command implementation for this
:param package: name of the package
:return: installed version, latest available version
|
entailment
|
def _load_hooks(path):
"""Load hook module and register signals.
:param path: Absolute or relative path to module.
:return: module
"""
module = imp.load_source(os.path.splitext(os.path.basename(path))[0], path)
if not check_hook_mechanism_is_intact(module):
# no hooks - do nothing
log.debug('No valid hook configuration: \'%s\'. Not using hooks!', path)
else:
if check_register_present(module):
# register the template hooks so they listen to gcdt_signals
module.register()
return module
|
Load hook module and register signals.
:param path: Absolute or relative path to module.
:return: module
|
entailment
|
def lifecycle(awsclient, env, tool, command, arguments):
"""Tool lifecycle which provides hooks into the different stages of the
command execution. See signals for hook details.
"""
log.debug('### init')
load_plugins()
context = get_context(awsclient, env, tool, command, arguments)
# every tool needs a awsclient so we provide this via the context
context['_awsclient'] = awsclient
log.debug('### context:')
log.debug(context)
if 'error' in context:
# no need to send an 'error' signal here
return 1
## initialized
gcdt_signals.initialized.send(context)
log.debug('### initialized')
if 'error' in context:
log.error(context['error'])
return 1
check_gcdt_update()
# config is "assembled" by config_reader NOT here!
config = {}
gcdt_signals.config_read_init.send((context, config))
log.debug('### config_read_init')
gcdt_signals.config_read_finalized.send((context, config))
log.debug('### config_read_finalized')
# TODO we might want to be able to override config via env variables?
# here would be the right place to do this
if 'hookfile' in config:
# load hooks from hookfile
_load_hooks(config['hookfile'])
if 'kumo' in config:
# deprecated: this needs to be removed once all old-style "cloudformation" entries are gone
fix_old_kumo_config(config)
# check_credentials
gcdt_signals.check_credentials_init.send((context, config))
log.debug('### check_credentials_init')
gcdt_signals.check_credentials_finalized.send((context, config))
log.debug('### check_credentials_finalized')
if 'error' in context:
log.error(context['error'])
gcdt_signals.error.send((context, config))
return 1
## lookup
gcdt_signals.lookup_init.send((context, config))
log.debug('### lookup_init')
gcdt_signals.lookup_finalized.send((context, config))
log.debug('### lookup_finalized')
log.debug('### config after lookup:')
log.debug(config)
## config validation
gcdt_signals.config_validation_init.send((context, config))
log.debug('### config_validation_init')
gcdt_signals.config_validation_finalized.send((context, config))
if context['command'] in \
DEFAULT_CONFIG.get(context['tool'], {}).get('non_config_commands', []):
pass # we do not require a config for this command
elif tool not in config and tool != 'gcdt':
context['error'] = 'Configuration missing for \'%s\'.' % tool
log.error(context['error'])
gcdt_signals.error.send((context, config))
return 1
log.debug('### config_validation_finalized')
## check credentials are valid (AWS services)
# DEPRECATED, use gcdt-logon plugin instead
if are_credentials_still_valid(awsclient):
context['error'] = \
'Your credentials have expired... Please renew and try again!'
log.error(context['error'])
gcdt_signals.error.send((context, config))
return 1
## bundle step
gcdt_signals.bundle_pre.send((context, config))
log.debug('### bundle_pre')
gcdt_signals.bundle_init.send((context, config))
log.debug('### bundle_init')
gcdt_signals.bundle_finalized.send((context, config))
log.debug('### bundle_finalized')
if 'error' in context:
log.error(context['error'])
gcdt_signals.error.send((context, config))
return 1
## dispatch command providing context and config (= tooldata)
gcdt_signals.command_init.send((context, config))
log.debug('### command_init')
try:
if tool == 'gcdt':
conf = config # gcdt works on the whole config
else:
conf = config.get(tool, {})
exit_code = cmd.dispatch(arguments,
context=context,
config=conf)
except GracefulExit:
raise
except Exception as e:
log.debug(traceback.format_exc())
context['error'] = str(e)
log.error(context['error'])
exit_code = 1
if exit_code:
if 'error' not in context or context['error'] == '':
context['error'] = '\'%s\' command failed with exit code 1' % command
gcdt_signals.error.send((context, config))
return 1
gcdt_signals.command_finalized.send((context, config))
log.debug('### command_finalized')
# TODO reporting (in case you want to get a summary / output to the user)
gcdt_signals.finalized.send(context)
log.debug('### finalized')
return 0
|
Tool lifecycle which provides hooks into the different stages of the
command execution. See signals for hook details.
|
entailment
|
def main(doc, tool, dispatch_only=None):
"""gcdt tools parametrized main function to initiate gcdt lifecycle.
:param doc: docopt string
:param tool: gcdt tool (gcdt, kumo, tenkai, ramuda, yugen)
:param dispatch_only: list of commands which do not use gcdt lifecycle
:return: exit_code
"""
# Use signal handler to throw exception which can be caught to allow
# graceful exit.
# here: https://stackoverflow.com/questions/26414704/how-does-a-python-process-exit-gracefully-after-receiving-sigterm-while-waiting
signal.signal(signal.SIGTERM, signal_handler) # Jenkins
signal.signal(signal.SIGINT, signal_handler) # Ctrl-C
try:
arguments = docopt(doc, sys.argv[1:])
command = get_command(arguments)
# DEBUG mode (if requested)
verbose = arguments.pop('--verbose', False)
if verbose:
logging_config['loggers']['gcdt']['level'] = 'DEBUG'
dictConfig(logging_config)
if dispatch_only is None:
dispatch_only = ['version']
assert tool in ['gcdt', 'kumo', 'tenkai', 'ramuda', 'yugen']
if command in dispatch_only:
# handle commands that do not need a lifecycle
# Note: `dispatch_only` commands do not have a check for ENV variable!
check_gcdt_update()
return cmd.dispatch(arguments)
else:
env = get_env()
if not env:
log.error('\'ENV\' environment variable not set!')
return 1
awsclient = AWSClient(botocore.session.get_session())
return lifecycle(awsclient, env, tool, command, arguments)
except GracefulExit as e:
log.info('Received %s signal - exiting command \'%s %s\'',
str(e), tool, command)
return 1
|
gcdt tools parametrized main function to initiate gcdt lifecycle.
:param doc: docopt string
:param tool: gcdt tool (gcdt, kumo, tenkai, ramuda, yugen)
:param dispatch_only: list of commands which do not use gcdt lifecycle
:return: exit_code
|
entailment
|
def MOV(self, params):
"""
MOV Rx, Ry
MOV PC, Ry
Move the value of Ry into Rx or PC
"""
Rx, Ry = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(any_registers=(Rx, Ry))
def MOV_func():
self.register[Rx] = self.register[Ry]
return MOV_func
|
MOV Rx, Ry
MOV PC, Ry
Move the value of Ry into Rx or PC
|
entailment
|
def MOVS(self, params):
"""
MOVS Ra, Rb
MOVS Ra, #imm8
Move the value of Rb or imm8 into Ra
Ra and Rb must be low registers
"""
Ra, Rb = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
if self.is_immediate(Rb):
self.check_arguments(low_registers=[Ra], imm8=[Rb])
def MOVS_func():
self.register[Ra] = self.convert_to_integer(Rb[1:])
# Set N and Z status flags
self.set_NZ_flags(self.register[Ra])
return MOVS_func
elif self.is_register(Rb):
self.check_arguments(low_registers=(Ra, Rb))
def MOVS_func():
self.register[Ra] = self.register[Rb]
self.set_NZ_flags(self.register[Ra])
return MOVS_func
else:
raise iarm.exceptions.ParsingError("Unknown parameter: {}".format(Rb))
|
MOVS Ra, Rb
MOVS Ra, #imm8
Move the value of Rb or imm8 into Ra
Ra and Rb must be low registers
|
entailment
|
def MRS(self, params):
"""
MRS Rj, Rspecial
Copy the value of Rspecial to Rj
Rspecial can be APSR, IPSR, or EPSR
"""
Rj, Rspecial = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(LR_or_general_purpose_registers=(Rj,), special_registers=(Rspecial,))
def MRS_func():
# TODO add combination registers IEPSR, IAPSR, and EAPSR
# TODO needs to use APSR, IPSR, EPSR, IEPSR, IAPSR, EAPSR, PSR, MSP, PSP, PRIMASK, or CONTROL.
# http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0553a/CHDBIBGJ.html
if Rspecial == 'PSR':
self.register[Rj] = self.register['APSR'] | self.register['IPSR'] | self.register['EPSR']
else:
self.register[Rj] = self.register[Rspecial]
return MRS_func
|
MRS Rj, Rspecial
Copy the value of Rspecial to Rj
Rspecial can be APSR, IPSR, or EPSR
|
entailment
|
def MSR(self, params):
"""
MSR Rspecial, Rj
Copy the value of Rj to Rspecial
Rspecial can be APSR, IPSR, or EPSR
"""
Rspecial, Rj = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(LR_or_general_purpose_registers=(Rj,), special_registers=(Rspecial,))
def MSR_func():
# TODO add combination registers IEPSR, IAPSR, and EAPSR
# http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0553a/CHDBIBGJ.html
# TODO update N Z C V flags
if Rspecial in ('PSR', 'APSR'):
# PSR ignores writes to IPSR and EPSR
self.register['APSR'] = self.register[Rj]
else:
# Do nothing
pass
return MSR_func
|
MSR Rspecial, Rj
Copy the value of Rj to Rspecial
Rspecial can be APSR, IPSR, or EPSR
|
entailment
|
def MVNS(self, params):
"""
MVNS Ra, Rb
Negate the value in Rb and store it in Ra
Ra and Rb must be a low register
"""
Ra, Rb = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(low_registers=(Ra, Rb))
def MVNS_func():
self.register[Ra] = ~self.register[Rb]
self.set_NZ_flags(self.register[Ra])
return MVNS_func
|
MVNS Ra, Rb
Negate the value in Rb and store it in Ra
Ra and Rb must be a low register
|
entailment
|
def REV(self, params):
"""
REV Ra, Rb
Reverse the byte order in register Rb and store the result in Ra
"""
Ra, Rb = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(low_registers=(Ra, Rb))
def REV_func():
self.register[Ra] = ((self.register[Rb] & 0xFF000000) >> 24) | \
((self.register[Rb] & 0x00FF0000) >> 8) | \
((self.register[Rb] & 0x0000FF00) << 8) | \
((self.register[Rb] & 0x000000FF) << 24)
return REV_func
|
REV Ra, Rb
Reverse the byte order in register Rb and store the result in Ra
|
entailment
|
def REV16(self, params):
"""
REV16 Ra, Rb
Reverse the byte order of the half words in register Rb and store the result in Ra
"""
Ra, Rb = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(low_registers=(Ra, Rb))
def REV16_func():
self.register[Ra] = ((self.register[Rb] & 0xFF00FF00) >> 8) | \
((self.register[Rb] & 0x00FF00FF) << 8)
return REV16_func
|
REV16 Ra, Rb
Reverse the byte order of the half words in register Rb and store the result in Ra
|
entailment
|
def REVSH(self, params):
"""
REVSH
Reverse the byte order in the lower half word in Rb and store the result in Ra.
If the result of the result is signed, then sign extend
"""
Ra, Rb = self.get_two_parameters(r'\s*([^\s,]*),\s*([^\s,]*)(,\s*[^\s,]*)*\s*', params)
self.check_arguments(low_registers=(Ra, Rb))
def REVSH_func():
self.register[Ra] = ((self.register[Rb] & 0x0000FF00) >> 8) | \
((self.register[Rb] & 0x000000FF) << 8)
if self.register[Ra] & (1 << 15):
self.register[Ra] |= 0xFFFF0000
return REVSH_func
|
REVSH
Reverse the byte order in the lower half word in Rb and store the result in Ra.
If the result of the result is signed, then sign extend
|
entailment
|
def SXTB(self, params):
"""
STXB Ra, Rb
Sign extend the byte in Rb and store the result in Ra
"""
Ra, Rb = self.get_two_parameters(r'\s*([^\s,]*),\s*([^\s,]*)(,\s*[^\s,]*)*\s*', params)
self.check_arguments(low_registers=(Ra, Rb))
def SXTB_func():
if self.register[Rb] & (1 << 7):
self.register[Ra] = 0xFFFFFF00 + (self.register[Rb] & 0xFF)
else:
self.register[Ra] = (self.register[Rb] & 0xFF)
return SXTB_func
|
STXB Ra, Rb
Sign extend the byte in Rb and store the result in Ra
|
entailment
|
def SXTH(self, params):
"""
STXH Ra, Rb
Sign extend the half word in Rb and store the result in Ra
"""
Ra, Rb = self.get_two_parameters(r'\s*([^\s,]*),\s*([^\s,]*)(,\s*[^\s,]*)*\s*', params)
self.check_arguments(low_registers=(Ra, Rb))
def SXTH_func():
if self.register[Rb] & (1 << 15):
self.register[Ra] = 0xFFFF0000 + (self.register[Rb] & 0xFFFF)
else:
self.register[Ra] = (self.register[Rb] & 0xFFFF)
return SXTH_func
|
STXH Ra, Rb
Sign extend the half word in Rb and store the result in Ra
|
entailment
|
def UXTB(self, params):
"""
UTXB Ra, Rb
Zero extend the byte in Rb and store the result in Ra
"""
Ra, Rb = self.get_two_parameters(r'\s*([^\s,]*),\s*([^\s,]*)(,\s*[^\s,]*)*\s*', params)
self.check_arguments(low_registers=(Ra, Rb))
def UXTB_func():
self.register[Ra] = (self.register[Rb] & 0xFF)
return UXTB_func
|
UTXB Ra, Rb
Zero extend the byte in Rb and store the result in Ra
|
entailment
|
def UXTH(self, params):
"""
UTXH Ra, Rb
Zero extend the half word in Rb and store the result in Ra
"""
Ra, Rb = self.get_two_parameters(r'\s*([^\s,]*),\s*([^\s,]*)(,\s*[^\s,]*)*\s*', params)
self.check_arguments(low_registers=(Ra, Rb))
def UXTH_func():
self.register[Ra] = (self.register[Rb] & 0xFFFF)
return UXTH_func
|
UTXH Ra, Rb
Zero extend the half word in Rb and store the result in Ra
|
entailment
|
def _get_event_type(evt_source):
"""Get type of event e.g. 's3', 'events', 'kinesis',...
:param evt_source:
:return:
"""
if 'schedule' in evt_source:
return 'events'
elif 'pattern' in evt_source:
return 'events'
elif 'log_group_name_prefix' in evt_source:
return 'cloudwatch_logs'
else:
arn = evt_source['arn']
_, _, svc, _ = arn.split(':', 3)
return svc
|
Get type of event e.g. 's3', 'events', 'kinesis',...
:param evt_source:
:return:
|
entailment
|
def _get_event_source_obj(awsclient, evt_source):
"""
Given awsclient, event_source dictionary item
create an event_source object of the appropriate event type
to schedule this event, and return the object.
"""
event_source_map = {
'dynamodb': event_source.dynamodb_stream.DynamoDBStreamEventSource,
'kinesis': event_source.kinesis.KinesisEventSource,
's3': event_source.s3.S3EventSource,
'sns': event_source.sns.SNSEventSource,
'events': event_source.cloudwatch.CloudWatchEventSource,
'cloudfront': event_source.cloudfront.CloudFrontEventSource,
'cloudwatch_logs': event_source.cloudwatch_logs.CloudWatchLogsEventSource,
}
evt_type = _get_event_type(evt_source)
event_source_func = event_source_map.get(evt_type, None)
if not event_source:
raise ValueError('Unknown event source: {0}'.format(
evt_source['arn']))
return event_source_func(awsclient, evt_source)
|
Given awsclient, event_source dictionary item
create an event_source object of the appropriate event type
to schedule this event, and return the object.
|
entailment
|
def _add_event_source(awsclient, evt_source, lambda_arn):
"""
Given an event_source dictionary, create the object and add the event source.
"""
event_source_obj = _get_event_source_obj(awsclient, evt_source)
# (where zappa goes like remove, add)
# we go with update and add like this:
if event_source_obj.exists(lambda_arn):
event_source_obj.update(lambda_arn)
else:
event_source_obj.add(lambda_arn)
|
Given an event_source dictionary, create the object and add the event source.
|
entailment
|
def _remove_event_source(awsclient, evt_source, lambda_arn):
"""
Given an event_source dictionary, create the object and remove the event source.
"""
event_source_obj = _get_event_source_obj(awsclient, evt_source)
if event_source_obj.exists(lambda_arn):
event_source_obj.remove(lambda_arn)
|
Given an event_source dictionary, create the object and remove the event source.
|
entailment
|
def _get_event_source_status(awsclient, evt_source, lambda_arn):
"""
Given an event_source dictionary, create the object and get the event source status.
"""
event_source_obj = _get_event_source_obj(awsclient, evt_source)
return event_source_obj.status(lambda_arn)
|
Given an event_source dictionary, create the object and get the event source status.
|
entailment
|
def unwire(awsclient, events, lambda_name, alias_name=ALIAS_NAME):
"""Unwire a list of event from an AWS Lambda function.
'events' is a list of dictionaries, where the dict must contains the
'schedule' of the event as string, and an optional 'name' and 'description'.
:param awsclient:
:param events: list of events
:param lambda_name:
:param alias_name:
:return: exit_code
"""
if not lambda_exists(awsclient, lambda_name):
log.error(colored.red('The function you try to wire up doesn\'t ' +
'exist... Bailing out...'))
return 1
client_lambda = awsclient.get_client('lambda')
lambda_function = client_lambda.get_function(FunctionName=lambda_name)
lambda_arn = client_lambda.get_alias(FunctionName=lambda_name,
Name=alias_name)['AliasArn']
log.info('UN-wiring lambda_arn %s ' % lambda_arn)
# TODO why load the policies here?
'''
policies = None
try:
result = client_lambda.get_policy(FunctionName=lambda_name,
Qualifier=alias_name)
policies = json.loads(result['Policy'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
log.warn("Permission policies not found")
else:
raise e
'''
if lambda_function is not None:
#_unschedule_events(awsclient, events, lambda_arn)
for event in events:
evt_source = event['event_source']
_remove_event_source(awsclient, evt_source, lambda_arn)
return 0
|
Unwire a list of event from an AWS Lambda function.
'events' is a list of dictionaries, where the dict must contains the
'schedule' of the event as string, and an optional 'name' and 'description'.
:param awsclient:
:param events: list of events
:param lambda_name:
:param alias_name:
:return: exit_code
|
entailment
|
def wire_deprecated(awsclient, function_name, s3_event_sources=None,
time_event_sources=None,
alias_name=ALIAS_NAME):
"""Deprecated! Please use wire!
:param awsclient:
:param function_name:
:param s3_event_sources: dictionary
:param time_event_sources:
:param alias_name:
:return: exit_code
"""
if not lambda_exists(awsclient, function_name):
log.error(colored.red('The function you try to wire up doesn\'t ' +
'exist... Bailing out...'))
return 1
client_lambda = awsclient.get_client('lambda')
lambda_function = client_lambda.get_function(FunctionName=function_name)
lambda_arn = client_lambda.get_alias(FunctionName=function_name,
Name=alias_name)['AliasArn']
log.info('wiring lambda_arn %s ...' % lambda_arn)
if lambda_function is not None:
s3_events_ensure_exists, s3_events_ensure_absent = filter_events_ensure(
s3_event_sources)
cloudwatch_events_ensure_exists, cloudwatch_events_ensure_absent = \
filter_events_ensure(time_event_sources)
for s3_event_source in s3_events_ensure_absent:
_ensure_s3_event(awsclient, s3_event_source, function_name,
alias_name, lambda_arn, s3_event_source['ensure'])
for s3_event_source in s3_events_ensure_exists:
_ensure_s3_event(awsclient, s3_event_source, function_name,
alias_name, lambda_arn, s3_event_source['ensure'])
for time_event in cloudwatch_events_ensure_absent:
_ensure_cloudwatch_event(awsclient, time_event, function_name,
alias_name, lambda_arn,
time_event['ensure'])
for time_event in cloudwatch_events_ensure_exists:
_ensure_cloudwatch_event(awsclient, time_event, function_name,
alias_name, lambda_arn,
time_event['ensure'])
return 0
|
Deprecated! Please use wire!
:param awsclient:
:param function_name:
:param s3_event_sources: dictionary
:param time_event_sources:
:param alias_name:
:return: exit_code
|
entailment
|
def unwire_deprecated(awsclient, function_name, s3_event_sources=None,
time_event_sources=None, alias_name=ALIAS_NAME):
"""Deprecated! Please use unwire!
:param awsclient:
:param function_name:
:param s3_event_sources: dictionary
:param time_event_sources:
:param alias_name:
:return: exit_code
"""
if not lambda_exists(awsclient, function_name):
log.error(colored.red('The function you try to wire up doesn\'t ' +
'exist... Bailing out...'))
return 1
client_lambda = awsclient.get_client('lambda')
lambda_function = client_lambda.get_function(FunctionName=function_name)
lambda_arn = client_lambda.get_alias(FunctionName=function_name,
Name=alias_name)['AliasArn']
log.info('UN-wiring lambda_arn %s ' % lambda_arn)
policies = None
try:
result = client_lambda.get_policy(FunctionName=function_name,
Qualifier=alias_name)
policies = json.loads(result['Policy'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
log.warn("Permission policies not found")
else:
raise e
if lambda_function is not None:
#### S3 Events
# for every permission - delete it and corresponding rule (if exists)
if policies:
for statement in policies['Statement']:
if statement['Principal']['Service'] == 's3.amazonaws.com':
source_bucket = get_bucket_from_s3_arn(
statement['Condition']['ArnLike']['AWS:SourceArn'])
log.info('\tRemoving S3 permission {} invoking {}'.format(
source_bucket, lambda_arn))
_remove_permission(awsclient, function_name,
statement['Sid'], alias_name)
log.info('\tRemoving All S3 events {} invoking {}'.format(
source_bucket, lambda_arn))
_remove_events_from_s3_bucket(awsclient, source_bucket,
lambda_arn)
# Case: s3 events without permissions active "safety measure"
for s3_event_source in s3_event_sources:
bucket_name = s3_event_source.get('bucket')
_remove_events_from_s3_bucket(awsclient, bucket_name, lambda_arn)
#### CloudWatch Events
# for every permission - delete it and corresponding rule (if exists)
if policies:
for statement in policies['Statement']:
if statement['Principal']['Service'] == 'events.amazonaws.com':
rule_name = get_rule_name_from_event_arn(
statement['Condition']['ArnLike']['AWS:SourceArn'])
log.info(
'\tRemoving Cloudwatch permission {} invoking {}'.format(
rule_name, lambda_arn))
_remove_permission(awsclient, function_name,
statement['Sid'], alias_name)
log.info('\tRemoving Cloudwatch rule {} invoking {}'.format(
rule_name, lambda_arn))
_remove_cloudwatch_rule_event(awsclient, rule_name,
lambda_arn)
# Case: rules without permissions active, "safety measure"
for time_event in time_event_sources:
rule_name = time_event.get('ruleName')
_remove_cloudwatch_rule_event(awsclient, rule_name, lambda_arn)
return 0
|
Deprecated! Please use unwire!
:param awsclient:
:param function_name:
:param s3_event_sources: dictionary
:param time_event_sources:
:param alias_name:
:return: exit_code
|
entailment
|
def _lambda_add_s3_event_source(awsclient, arn, event, bucket, prefix,
suffix):
"""Use only prefix OR suffix
:param arn:
:param event:
:param bucket:
:param prefix:
:param suffix:
:return:
"""
json_data = {
'LambdaFunctionConfigurations': [{
'LambdaFunctionArn': arn,
'Id': str(uuid.uuid1()),
'Events': [event]
}]
}
filter_rules = build_filter_rules(prefix, suffix)
json_data['LambdaFunctionConfigurations'][0].update({
'Filter': {
'Key': {
'FilterRules': filter_rules
}
}
})
# http://docs.aws.amazon.com/cli/latest/reference/s3api/put-bucket-notification-configuration.html
# http://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html
client_s3 = awsclient.get_client('s3')
bucket_configurations = client_s3.get_bucket_notification_configuration(
Bucket=bucket)
bucket_configurations.pop('ResponseMetadata')
if 'LambdaFunctionConfigurations' in bucket_configurations:
bucket_configurations['LambdaFunctionConfigurations'].append(
json_data['LambdaFunctionConfigurations'][0]
)
else:
bucket_configurations['LambdaFunctionConfigurations'] = json_data[
'LambdaFunctionConfigurations']
response = client_s3.put_bucket_notification_configuration(
Bucket=bucket,
NotificationConfiguration=bucket_configurations
)
# TODO don't return a table, but success state
return json2table(response)
|
Use only prefix OR suffix
:param arn:
:param event:
:param bucket:
:param prefix:
:param suffix:
:return:
|
entailment
|
def find_eigen(hint=None):
r'''
Try to find the Eigen library. If successful the include directory is returned.
'''
# search with pkgconfig
# ---------------------
try:
import pkgconfig
if pkgconfig.installed('eigen3','>3.0.0'):
return pkgconfig.parse('eigen3')['include_dirs'][0]
except:
pass
# manual search
# -------------
search_dirs = [] if hint is None else hint
search_dirs += [
"/usr/local/include/eigen3",
"/usr/local/homebrew/include/eigen3",
"/opt/local/var/macports/software/eigen3",
"/opt/local/include/eigen3",
"/usr/include/eigen3",
"/usr/include/local",
"/usr/include",
]
for d in search_dirs:
path = os.path.join(d, "Eigen", "Dense")
if os.path.exists(path):
vf = os.path.join(d, "Eigen", "src", "Core", "util", "Macros.h")
if not os.path.exists(vf):
continue
src = open(vf, "r").read()
v1 = re.findall("#define EIGEN_WORLD_VERSION (.+)", src)
v2 = re.findall("#define EIGEN_MAJOR_VERSION (.+)", src)
v3 = re.findall("#define EIGEN_MINOR_VERSION (.+)", src)
if not len(v1) or not len(v2) or not len(v3):
continue
v = "{0}.{1}.{2}".format(v1[0], v2[0], v3[0])
print("Found Eigen version {0} in: {1}".format(v, d))
return d
return None
|
r'''
Try to find the Eigen library. If successful the include directory is returned.
|
entailment
|
def check_and_format_logs_params(start, end, tail):
"""Helper to read the params for the logs command"""
def _decode_duration_type(duration_type):
durations = {'m': 'minutes', 'h': 'hours', 'd': 'days', 'w': 'weeks'}
return durations[duration_type]
if not start:
if tail:
start_dt = maya.now().subtract(seconds=300).datetime(naive=True)
else:
start_dt = maya.now().subtract(days=1).datetime(naive=True)
elif start and start[-1] in ['m', 'h', 'd', 'w']:
value = int(start[:-1])
start_dt = maya.now().subtract(
**{_decode_duration_type(start[-1]): value}).datetime(naive=True)
elif start:
start_dt = maya.parse(start).datetime(naive=True)
if end and end[-1] in ['m', 'h', 'd', 'w']:
value = int(end[:-1])
end_dt = maya.now().subtract(
**{_decode_duration_type(end[-1]): value}).datetime(naive=True)
elif end:
end_dt = maya.parse(end).datetime(naive=True)
else:
end_dt = None
return start_dt, end_dt
|
Helper to read the params for the logs command
|
entailment
|
def upload_file_to_s3(awsclient, bucket, key, filename):
"""Upload a file to AWS S3 bucket.
:param awsclient:
:param bucket:
:param key:
:param filename:
:return:
"""
client_s3 = awsclient.get_client('s3')
transfer = S3Transfer(client_s3)
# Upload /tmp/myfile to s3://bucket/key and print upload progress.
transfer.upload_file(filename, bucket, key)
response = client_s3.head_object(Bucket=bucket, Key=key)
etag = response.get('ETag')
version_id = response.get('VersionId', None)
return etag, version_id
|
Upload a file to AWS S3 bucket.
:param awsclient:
:param bucket:
:param key:
:param filename:
:return:
|
entailment
|
def remove_file_from_s3(awsclient, bucket, key):
"""Remove a file from an AWS S3 bucket.
:param awsclient:
:param bucket:
:param key:
:return:
"""
client_s3 = awsclient.get_client('s3')
response = client_s3.delete_object(Bucket=bucket, Key=key)
|
Remove a file from an AWS S3 bucket.
:param awsclient:
:param bucket:
:param key:
:return:
|
entailment
|
def ls(awsclient, bucket, prefix=None):
"""List bucket contents
:param awsclient:
:param bucket:
:param prefix:
:return:
"""
# this works until 1000 keys!
params = {'Bucket': bucket}
if prefix:
params['Prefix'] = prefix
client_s3 = awsclient.get_client('s3')
objects = client_s3.list_objects_v2(**params)
if objects['KeyCount'] > 0:
keys = [k['Key'] for k in objects['Contents']]
return keys
|
List bucket contents
:param awsclient:
:param bucket:
:param prefix:
:return:
|
entailment
|
def ORRS(self, params):
"""
ORRS [Ra,] Ra, Rb
OR Ra and Rb together and store the result in Ra
The equivalent of `Ra = Ra | Rc`
Updates NZ flags
Ra and Rb must be low registers
The first register is optional
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
self.check_arguments(low_registers=(Ra, Rc))
self.match_first_two_parameters(Ra, Rb)
# ORRS Ra, Ra, Rb
def ORRS_func():
self.register[Ra] = self.register[Ra] | self.register[Rc]
self.set_NZ_flags(self.register[Ra])
return ORRS_func
|
ORRS [Ra,] Ra, Rb
OR Ra and Rb together and store the result in Ra
The equivalent of `Ra = Ra | Rc`
Updates NZ flags
Ra and Rb must be low registers
The first register is optional
|
entailment
|
def TST(self, params):
"""
TST Ra, Rb
AND Ra and Rb together and update the NZ flag. The result is not set
The equivalent of `Ra & Rc`
Ra and Rb must be low registers
"""
Ra, Rb = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(low_registers=(Ra, Rb))
def TST_func():
result = self.register[Ra] & self.register[Rb]
self.set_NZ_flags(result)
return TST_func
|
TST Ra, Rb
AND Ra and Rb together and update the NZ flag. The result is not set
The equivalent of `Ra & Rc`
Ra and Rb must be low registers
|
entailment
|
def render_to_mail(template, context, **kwargs):
"""
Renders a mail and returns the resulting ``EmailMultiAlternatives``
instance
* ``template``: The base name of the text and HTML (optional) version of
the mail.
* ``context``: The context used to render the mail. This context instance
should contain everything required.
* Additional keyword arguments are passed to the ``EmailMultiAlternatives``
instantiation. Use those to specify the ``to``, ``headers`` etc.
arguments.
Usage example::
# Render the template myproject/hello_mail.txt (first non-empty line
# contains the subject, third to last the body) and optionally the
# template myproject/hello_mail.html containing the alternative HTML
# representation.
message = render_to_mail('myproject/hello_mail', {}, to=[email])
message.send()
"""
lines = iter(
line.rstrip()
for line in render_to_string("%s.txt" % template, context).splitlines()
)
subject = ""
try:
while True:
line = next(lines)
if line:
subject = line
break
except StopIteration: # if lines is empty
pass
body = "\n".join(lines).strip("\n")
message = EmailMultiAlternatives(subject=subject, body=body, **kwargs)
try:
message.attach_alternative(
render_to_string("%s.html" % template, context), "text/html"
)
except TemplateDoesNotExist:
pass
return message
|
Renders a mail and returns the resulting ``EmailMultiAlternatives``
instance
* ``template``: The base name of the text and HTML (optional) version of
the mail.
* ``context``: The context used to render the mail. This context instance
should contain everything required.
* Additional keyword arguments are passed to the ``EmailMultiAlternatives``
instantiation. Use those to specify the ``to``, ``headers`` etc.
arguments.
Usage example::
# Render the template myproject/hello_mail.txt (first non-empty line
# contains the subject, third to last the body) and optionally the
# template myproject/hello_mail.html containing the alternative HTML
# representation.
message = render_to_mail('myproject/hello_mail', {}, to=[email])
message.send()
|
entailment
|
def get_confirmation_url(email, request, name="email_registration_confirm", **kwargs):
"""
Returns the confirmation URL
"""
return request.build_absolute_uri(
reverse(name, kwargs={"code": get_confirmation_code(email, request, **kwargs)})
)
|
Returns the confirmation URL
|
entailment
|
def send_registration_mail(email, *, request, **kwargs):
"""send_registration_mail(email, *, request, **kwargs)
Sends the registration mail
* ``email``: The email address where the registration link should be
sent to.
* ``request``: A HTTP request instance, used to construct the complete
URL (including protocol and domain) for the registration link.
* Additional keyword arguments for ``get_confirmation_url`` respectively
``get_confirmation_code``.
The mail is rendered using the following two templates:
* ``registration/email_registration_email.txt``: The first line of this
template will be the subject, the third to the last line the body of the
email.
* ``registration/email_registration_email.html``: The body of the HTML
version of the mail. This template is **NOT** available by default and
is not required either.
"""
render_to_mail(
"registration/email_registration_email",
{"url": get_confirmation_url(email, request, **kwargs)},
to=[email],
).send()
|
send_registration_mail(email, *, request, **kwargs)
Sends the registration mail
* ``email``: The email address where the registration link should be
sent to.
* ``request``: A HTTP request instance, used to construct the complete
URL (including protocol and domain) for the registration link.
* Additional keyword arguments for ``get_confirmation_url`` respectively
``get_confirmation_code``.
The mail is rendered using the following two templates:
* ``registration/email_registration_email.txt``: The first line of this
template will be the subject, the third to the last line the body of the
email.
* ``registration/email_registration_email.html``: The body of the HTML
version of the mail. This template is **NOT** available by default and
is not required either.
|
entailment
|
def decode(code, *, max_age):
"""decode(code, *, max_age)
Decodes the code from the registration link and returns a tuple consisting
of the verified email address and the payload which was passed through to
``get_confirmation_code``.
The maximum age in seconds of the link has to be specified as ``max_age``.
This method raises ``ValidationError`` exceptions when anything goes wrong
when verifying the signature or the expiry timeout.
"""
try:
data = get_signer().unsign(code, max_age=max_age)
except signing.SignatureExpired:
raise ValidationError(
_("The link is expired. Please request another registration link."),
code="email_registration_expired",
)
except signing.BadSignature:
raise ValidationError(
_(
"Unable to verify the signature. Please request a new"
" registration link."
),
code="email_registration_signature",
)
return data.split(":", 1)
|
decode(code, *, max_age)
Decodes the code from the registration link and returns a tuple consisting
of the verified email address and the payload which was passed through to
``get_confirmation_code``.
The maximum age in seconds of the link has to be specified as ``max_age``.
This method raises ``ValidationError`` exceptions when anything goes wrong
when verifying the signature or the expiry timeout.
|
entailment
|
def do_some_work(
self,
work_dict):
"""do_some_work
:param work_dict: dictionary for key/values
"""
label = "do_some_work"
log.info(("task - {} - start "
"work_dict={}")
.format(label,
work_dict))
ret_data = {
"job_results": ("some response key={}").format(
str(uuid.uuid4()))
}
log.info(("task - {} - result={} done")
.format(
ret_data,
label))
return ret_data
|
do_some_work
:param work_dict: dictionary for key/values
|
entailment
|
def create_record(awsclient, name_prefix, instance_reference, type="A", host_zone_name=None):
"""
Builds route53 record entries enabling DNS names for services
Note: gcdt.route53 create_record(awsclient, ...)
is used in dataplatform cloudformation.py templates!
:param name_prefix: The sub domain prefix to use
:param instance_reference: The EC2 troposphere reference which's private IP should be linked to
:param type: The type of the record A or CNAME (default: A)
:param host_zone_name: The host zone name to use (like preprod.ds.glomex.cloud. - DO NOT FORGET THE DOT!)
:return: RecordSetType
"""
# Only fetch the host zone from the COPS stack if nessary
if host_zone_name is None:
host_zone_name = _retrieve_stack_host_zone_name(awsclient)
if not (type == "A" or type == "CNAME"):
raise Exception("Record set type is not supported!")
name_of_record = name_prefix \
.replace('.', '') \
.replace('-', '') \
.title() + "HostRecord"
# Reference EC2 instance automatically to their private IP
if isinstance(instance_reference, Instance):
resource_record = troposphere.GetAtt(
instance_reference,
"PrivateIp"
)
else:
resource_record = instance_reference
return RecordSetType(
name_of_record,
HostedZoneName=host_zone_name,
Name=troposphere.Join("", [
name_prefix + ".",
host_zone_name,
]),
Type=type,
TTL=TTL_DEFAULT,
ResourceRecords=[
resource_record
],
)
|
Builds route53 record entries enabling DNS names for services
Note: gcdt.route53 create_record(awsclient, ...)
is used in dataplatform cloudformation.py templates!
:param name_prefix: The sub domain prefix to use
:param instance_reference: The EC2 troposphere reference which's private IP should be linked to
:param type: The type of the record A or CNAME (default: A)
:param host_zone_name: The host zone name to use (like preprod.ds.glomex.cloud. - DO NOT FORGET THE DOT!)
:return: RecordSetType
|
entailment
|
def _retrieve_stack_host_zone_name(awsclient, default_stack_name=None):
"""
Use service discovery to get the host zone name from the default stack
:return: Host zone name as string
"""
global _host_zone_name
if _host_zone_name is not None:
return _host_zone_name
env = get_env()
if env is None:
print("Please set environment...")
# TODO: why is there a sys.exit in library code used by cloudformation!!!
sys.exit()
if default_stack_name is None:
# TODO why 'dp-<env>'? - this should not be hardcoded!
default_stack_name = 'dp-%s' % env
default_stack_output = get_outputs_for_stack(awsclient, default_stack_name)
if HOST_ZONE_NAME__STACK_OUTPUT_NAME not in default_stack_output:
print("Please debug why default stack '{}' does not contain '{}'...".format(
default_stack_name,
HOST_ZONE_NAME__STACK_OUTPUT_NAME,
))
# TODO: why is there a sys.exit in library code used by cloudformation!!!
sys.exit()
_host_zone_name = default_stack_output[HOST_ZONE_NAME__STACK_OUTPUT_NAME] + "."
return _host_zone_name
|
Use service discovery to get the host zone name from the default stack
:return: Host zone name as string
|
entailment
|
def load_plugins(group='gcdt10'):
"""Load and register installed gcdt plugins.
"""
# on using entrypoints:
# http://stackoverflow.com/questions/774824/explain-python-entry-points
# TODO: make sure we do not have conflicting generators installed!
for ep in pkg_resources.iter_entry_points(group, name=None):
plugin = ep.load() # load the plugin
if check_hook_mechanism_is_intact(plugin):
if check_register_present(plugin):
plugin.register() # register the plugin so it listens to gcdt_signals
else:
log.warning('No valid hook configuration: %s. Not using hooks!', plugin)
|
Load and register installed gcdt plugins.
|
entailment
|
def get_plugin_versions(group='gcdt10'):
"""Load and register installed gcdt plugins.
"""
versions = {}
for ep in pkg_resources.iter_entry_points(group, name=None):
versions[ep.dist.project_name] = ep.dist.version
return versions
|
Load and register installed gcdt plugins.
|
entailment
|
def delete_log_group(awsclient, log_group_name):
"""Delete the specified log group
:param log_group_name: log group name
:return:
"""
client_logs = awsclient.get_client('logs')
response = client_logs.delete_log_group(
logGroupName=log_group_name
)
|
Delete the specified log group
:param log_group_name: log group name
:return:
|
entailment
|
def put_retention_policy(awsclient, log_group_name, retention_in_days):
"""Sets the retention of the specified log group
if the log group does not yet exist than it will be created first.
:param log_group_name: log group name
:param retention_in_days: log group name
:return:
"""
try:
# Note: for AWS Lambda the log_group is created once the first
# log event occurs. So if the log_group does not exist we create it
create_log_group(awsclient, log_group_name)
except GracefulExit:
raise
except Exception:
# TODO check that it is really a ResourceAlreadyExistsException
pass
client_logs = awsclient.get_client('logs')
response = client_logs.put_retention_policy(
logGroupName=log_group_name,
retentionInDays=retention_in_days
)
|
Sets the retention of the specified log group
if the log group does not yet exist than it will be created first.
:param log_group_name: log group name
:param retention_in_days: log group name
:return:
|
entailment
|
def filter_log_events(awsclient, log_group_name, start_ts, end_ts=None):
"""
Note: this is used to retrieve logs in ramuda.
:param log_group_name: log group name
:param start_ts: timestamp
:param end_ts: timestamp
:return: list of log entries
"""
client_logs = awsclient.get_client('logs')
# TODO use all_pages instead!
logs = []
next_token = None
while True:
request = {
'logGroupName': log_group_name,
'startTime': start_ts
}
if end_ts:
request['endTime'] = end_ts
if next_token:
request['nextToken'] = next_token
response = client_logs.filter_log_events(**request)
logs.extend(
[{'timestamp': e['timestamp'], 'message': e['message']}
for e in response['events']]
)
if 'nextToken' not in response:
break
next_token = response['nextToken']
return logs
|
Note: this is used to retrieve logs in ramuda.
:param log_group_name: log group name
:param start_ts: timestamp
:param end_ts: timestamp
:return: list of log entries
|
entailment
|
def describe_log_group(awsclient, log_group_name):
"""Get info on the specified log group
:param log_group_name: log group name
:return:
"""
client_logs = awsclient.get_client('logs')
request = {
'logGroupNamePrefix': log_group_name,
'limit': 1
}
response = client_logs.describe_log_groups(**request)
if response['logGroups']:
return response['logGroups'][0]
else:
return
|
Get info on the specified log group
:param log_group_name: log group name
:return:
|
entailment
|
def describe_log_stream(awsclient, log_group_name, log_stream_name):
"""Get info on the specified log stream
:param log_group_name: log group name
:param log_stream_name: log stream
:return:
"""
client_logs = awsclient.get_client('logs')
response = client_logs.describe_log_streams(
logGroupName=log_group_name,
logStreamNamePrefix=log_stream_name,
limit=1
)
if response['logStreams']:
return response['logStreams'][0]
else:
return
|
Get info on the specified log stream
:param log_group_name: log group name
:param log_stream_name: log stream
:return:
|
entailment
|
def create_log_group(awsclient, log_group_name):
"""Creates a log group with the specified name.
:param log_group_name: log group name
:return:
"""
client_logs = awsclient.get_client('logs')
response = client_logs.create_log_group(
logGroupName=log_group_name,
)
|
Creates a log group with the specified name.
:param log_group_name: log group name
:return:
|
entailment
|
def create_log_stream(awsclient, log_group_name, log_stream_name):
"""Creates a log stream for the specified log group.
:param log_group_name: log group name
:param log_stream_name: log stream name
:return:
"""
client_logs = awsclient.get_client('logs')
response = client_logs.create_log_stream(
logGroupName=log_group_name,
logStreamName=log_stream_name
)
|
Creates a log stream for the specified log group.
:param log_group_name: log group name
:param log_stream_name: log stream name
:return:
|
entailment
|
def put_log_events(awsclient, log_group_name, log_stream_name, log_events,
sequence_token=None):
"""Put log events for the specified log group and stream.
:param log_group_name: log group name
:param log_stream_name: log stream name
:param log_events: [{'timestamp': 123, 'message': 'string'}, ...]
:param sequence_token: the sequence token
:return: next_token
"""
client_logs = awsclient.get_client('logs')
request = {
'logGroupName': log_group_name,
'logStreamName': log_stream_name,
'logEvents': log_events
}
if sequence_token:
request['sequenceToken'] = sequence_token
response = client_logs.put_log_events(**request)
if 'rejectedLogEventsInfo' in response:
log.warn(response['rejectedLogEventsInfo'])
if 'nextSequenceToken' in response:
return response['nextSequenceToken']
|
Put log events for the specified log group and stream.
:param log_group_name: log group name
:param log_stream_name: log stream name
:param log_events: [{'timestamp': 123, 'message': 'string'}, ...]
:param sequence_token: the sequence token
:return: next_token
|
entailment
|
def get_log_events(awsclient, log_group_name, log_stream_name, start_ts=None):
"""Get log events for the specified log group and stream.
this is used in tenkai output instance diagnostics
:param log_group_name: log group name
:param log_stream_name: log stream name
:param start_ts: timestamp
:return:
"""
client_logs = awsclient.get_client('logs')
request = {
'logGroupName': log_group_name,
'logStreamName': log_stream_name
}
if start_ts:
request['startTime'] = start_ts
# TODO exhaust the events!
# TODO use all_pages !
response = client_logs.get_log_events(**request)
if 'events' in response and response['events']:
return [{'timestamp': e['timestamp'], 'message': e['message']}
for e in response['events']]
|
Get log events for the specified log group and stream.
this is used in tenkai output instance diagnostics
:param log_group_name: log group name
:param log_stream_name: log stream name
:param start_ts: timestamp
:return:
|
entailment
|
def check_log_stream_exists(awsclient, log_group_name, log_stream_name):
"""Check
:param log_group_name: log group name
:param log_stream_name: log stream name
:return: True / False
"""
lg = describe_log_group(awsclient, log_group_name)
if lg and lg['logGroupName'] == log_group_name:
stream = describe_log_stream(awsclient, log_group_name, log_stream_name)
if stream and stream['logStreamName'] == log_stream_name:
return True
return False
|
Check
:param log_group_name: log group name
:param log_stream_name: log stream name
:return: True / False
|
entailment
|
def decode_format_timestamp(timestamp):
"""Convert unix timestamp (millis) into date & time we use in logs output.
:param timestamp: unix timestamp in millis
:return: date, time in UTC
"""
dt = maya.MayaDT(timestamp / 1000).datetime(naive=True)
return dt.strftime('%Y-%m-%d'), dt.strftime('%H:%M:%S')
|
Convert unix timestamp (millis) into date & time we use in logs output.
:param timestamp: unix timestamp in millis
:return: date, time in UTC
|
entailment
|
def reload(self):
"""Reload the configuration from disk returning True if the
configuration has changed from the previous values.
"""
config = self._default_configuration()
if self._file_path:
config.update(self._load_config_file())
if config != self._values:
self._values = config
return True
return False
|
Reload the configuration from disk returning True if the
configuration has changed from the previous values.
|
entailment
|
def _load_config_file(self):
"""Load the configuration file into memory, returning the content.
"""
LOGGER.info('Loading configuration from %s', self._file_path)
if self._file_path.endswith('json'):
config = self._load_json_config()
else:
config = self._load_yaml_config()
for key, value in [(k, v) for k, v in config.items()]:
if key.title() != key:
config[key.title()] = value
del config[key]
return flatdict.FlatDict(config)
|
Load the configuration file into memory, returning the content.
|
entailment
|
def _load_json_config(self):
"""Load the configuration file in JSON format
:rtype: dict
"""
try:
return json.loads(self._read_config())
except ValueError as error:
raise ValueError(
'Could not read configuration file: {}'.format(error))
|
Load the configuration file in JSON format
:rtype: dict
|
entailment
|
def _load_yaml_config(self):
"""Loads the configuration file from a .yaml or .yml file
:type: dict
"""
try:
config = self._read_config()
except OSError as error:
raise ValueError('Could not read configuration file: %s' % error)
try:
return yaml.safe_load(config)
except yaml.YAMLError as error:
message = '\n'.join([' > %s' % line
for line in str(error).split('\n')])
sys.stderr.write('\n\n Error in the configuration file:\n\n'
'{}\n\n'.format(message))
sys.stderr.write(' Configuration should be a valid YAML file.\n')
sys.stderr.write(' YAML format validation available at '
'http://yamllint.com\n')
raise ValueError(error)
|
Loads the configuration file from a .yaml or .yml file
:type: dict
|
entailment
|
def _normalize_file_path(file_path):
"""Normalize the file path value.
:param str file_path: The file path as passed in
:rtype: str
"""
if not file_path:
return None
elif file_path.startswith('s3://') or \
file_path.startswith('http://') or \
file_path.startswith('https://'):
return file_path
return path.abspath(file_path)
|
Normalize the file path value.
:param str file_path: The file path as passed in
:rtype: str
|
entailment
|
def _read_config(self):
"""Read the configuration from the various places it may be read from.
:rtype: str
:raises: ValueError
"""
if not self._file_path:
return None
elif self._file_path.startswith('s3://'):
return self._read_s3_config()
elif self._file_path.startswith('http://') or \
self._file_path.startswith('https://'):
return self._read_remote_config()
elif not path.exists(self._file_path):
raise ValueError(
'Configuration file not found: {}'.format(self._file_path))
with open(self._file_path, 'r') as handle:
return handle.read()
|
Read the configuration from the various places it may be read from.
:rtype: str
:raises: ValueError
|
entailment
|
def _read_remote_config(self):
"""Read a remote config via URL.
:rtype: str
:raises: ValueError
"""
try:
import requests
except ImportError:
requests = None
if not requests:
raise ValueError(
'Remote config URL specified but requests not installed')
result = requests.get(self._file_path)
if not result.ok:
raise ValueError(
'Failed to retrieve remote config: {}'.format(
result.status_code))
return result.text
|
Read a remote config via URL.
:rtype: str
:raises: ValueError
|
entailment
|
def _read_s3_config(self):
"""Read in the value of the configuration file in Amazon S3.
:rtype: str
:raises: ValueError
"""
try:
import boto3
import botocore.exceptions
except ImportError:
boto3, botocore = None, None
if not boto3:
raise ValueError(
's3 URL specified for configuration but boto3 not installed')
parsed = parse.urlparse(self._file_path)
try:
response = boto3.client(
's3', endpoint_url=os.environ.get('S3_ENDPOINT')).get_object(
Bucket=parsed.netloc, Key=parsed.path.lstrip('/'))
except botocore.exceptions.ClientError as e:
raise ValueError(
'Failed to download configuration from S3: {}'.format(e))
return response['Body'].read().decode('utf-8')
|
Read in the value of the configuration file in Amazon S3.
:rtype: str
:raises: ValueError
|
entailment
|
def update(self, configuration, debug=None):
"""Update the internal configuration values, removing debug_only
handlers if debug is False. Returns True if the configuration has
changed from previous configuration values.
:param dict configuration: The logging configuration
:param bool debug: Toggles use of debug_only loggers
:rtype: bool
"""
if self.config != dict(configuration) and debug != self.debug:
self.config = dict(configuration)
self.debug = debug
self.configure()
return True
return False
|
Update the internal configuration values, removing debug_only
handlers if debug is False. Returns True if the configuration has
changed from previous configuration values.
:param dict configuration: The logging configuration
:param bool debug: Toggles use of debug_only loggers
:rtype: bool
|
entailment
|
def configure(self):
"""Configure the Python stdlib logger"""
if self.debug is not None and not self.debug:
self._remove_debug_handlers()
self._remove_debug_only()
logging.config.dictConfig(self.config)
try:
logging.captureWarnings(True)
except AttributeError:
pass
|
Configure the Python stdlib logger
|
entailment
|
def _remove_debug_handlers(self):
"""Remove any handlers with an attribute of debug_only that is True and
remove the references to said handlers from any loggers that are
referencing them.
"""
remove = list()
for handler in self.config[self.HANDLERS]:
if self.config[self.HANDLERS][handler].get('debug_only'):
remove.append(handler)
for handler in remove:
del self.config[self.HANDLERS][handler]
for logger in self.config[self.LOGGERS].keys():
logger = self.config[self.LOGGERS][logger]
if handler in logger[self.HANDLERS]:
logger[self.HANDLERS].remove(handler)
self._remove_debug_only()
|
Remove any handlers with an attribute of debug_only that is True and
remove the references to said handlers from any loggers that are
referencing them.
|
entailment
|
def _remove_debug_only(self):
"""Iterate through each handler removing the invalid dictConfig key of
debug_only.
"""
LOGGER.debug('Removing debug only from handlers')
for handler in self.config[self.HANDLERS]:
if self.DEBUG_ONLY in self.config[self.HANDLERS][handler]:
del self.config[self.HANDLERS][handler][self.DEBUG_ONLY]
|
Iterate through each handler removing the invalid dictConfig key of
debug_only.
|
entailment
|
def as_dictionary(self):
"""
Convert this object to a dictionary with formatting appropriate for a PIF.
:returns: Dictionary with the content of this object formatted for a PIF.
"""
return {to_camel_case(i): Serializable._convert_to_dictionary(self.__dict__[i])
for i in self.__dict__ if self.__dict__[i] is not None}
|
Convert this object to a dictionary with formatting appropriate for a PIF.
:returns: Dictionary with the content of this object formatted for a PIF.
|
entailment
|
def _convert_to_dictionary(obj):
"""
Convert obj to a dictionary with formatting appropriate for a PIF. This function attempts to treat obj as
a Pio object and otherwise returns obj.
:param obj: Object to convert to a dictionary.
:returns: Input object as a dictionary or the original object.
"""
if isinstance(obj, list):
return [Serializable._convert_to_dictionary(i) for i in obj]
elif hasattr(obj, 'as_dictionary'):
return obj.as_dictionary()
else:
return obj
|
Convert obj to a dictionary with formatting appropriate for a PIF. This function attempts to treat obj as
a Pio object and otherwise returns obj.
:param obj: Object to convert to a dictionary.
:returns: Input object as a dictionary or the original object.
|
entailment
|
def _get_object(class_, obj):
"""
Helper function that returns an object, or if it is a dictionary, initializes it from class_.
:param class_: Class to use to instantiate object.
:param obj: Object to process.
:return: One or more objects.
"""
if isinstance(obj, list):
return [Serializable._get_object(class_, i) for i in obj]
elif isinstance(obj, dict):
return class_(**keys_to_snake_case(obj))
else:
return obj
|
Helper function that returns an object, or if it is a dictionary, initializes it from class_.
:param class_: Class to use to instantiate object.
:param obj: Object to process.
:return: One or more objects.
|
entailment
|
def damping(temp, relhum, freq, pres=101325):
"""
Calculates the damping factor for sound in dB/m
depending on temperature, humidity and sound frequency.
Source: http://www.sengpielaudio.com/LuftdaempfungFormel.htm
temp: Temperature in degrees celsius
relhum: Relative humidity as percentage, e.g. 50
freq: Sound frequency in herz
pres: Atmospheric pressure in kilopascal
"""
temp += 273.15 # convert to kelvin
pres = pres / 101325.0 # convert to relative pressure
c_humid = 4.6151 - 6.8346 * pow((273.15 / temp), 1.261)
hum = relhum * pow(10.0, c_humid) * pres
tempr = temp / 293.15 # convert to relative air temp (re 20 deg C)
frO = pres * (24.0 + 4.04e4 * hum * (0.02 + hum) / (0.391 + hum))
frN = (pres * pow(tempr, -0.5) * (9.0 + 280.0 * hum * math.exp(-4.17 *
(pow(tempr, (-1.0 / 3.0)) - 1.0))))
damp = (8.686 * freq * freq * (
1.84e-11 * (1.0 / pres) * math.sqrt(tempr) +
pow(tempr, -2.5) *
(
0.01275 * (math.exp(-2239.1 / temp) * 1.0 /
(frO + freq * freq / frO)) +
0.1068 * (
math.exp(-3352 / temp) * 1.0 /
(frN + freq * freq / frN)
)
)
)
)
return damp
|
Calculates the damping factor for sound in dB/m
depending on temperature, humidity and sound frequency.
Source: http://www.sengpielaudio.com/LuftdaempfungFormel.htm
temp: Temperature in degrees celsius
relhum: Relative humidity as percentage, e.g. 50
freq: Sound frequency in herz
pres: Atmospheric pressure in kilopascal
|
entailment
|
def total_level(source_levels):
"""
Calculates the total sound pressure level based on multiple source levels
"""
sums = 0.0
for l in source_levels:
if l is None:
continue
if l == 0:
continue
sums += pow(10.0, float(l) / 10.0)
level = 10.0 * math.log10(sums)
return level
|
Calculates the total sound pressure level based on multiple source levels
|
entailment
|
def total_rated_level(octave_frequencies):
"""
Calculates the A-rated total sound pressure level
based on octave band frequencies
"""
sums = 0.0
for band in OCTAVE_BANDS.keys():
if band not in octave_frequencies:
continue
if octave_frequencies[band] is None:
continue
if octave_frequencies[band] == 0:
continue
sums += pow(10.0, ((float(octave_frequencies[band]) + OCTAVE_BANDS[band][1]) / 10.0))
level = 10.0 * math.log10(sums)
return level
|
Calculates the A-rated total sound pressure level
based on octave band frequencies
|
entailment
|
def leq3(levels):
"""
Calculates the energy-equivalent (Leq3) value
given a regular measurement interval.
"""
n = float(len(levels))
sums = 0.0
if sum(levels) == 0.0:
return 0.0
for l in levels:
if l == 0:
continue
sums += pow(10.0, float(l) / 10.0)
leq3 = 10.0 * math.log10((1.0 / n) * sums)
leq3 = max(0.0, leq3)
return leq3
|
Calculates the energy-equivalent (Leq3) value
given a regular measurement interval.
|
entailment
|
def distant_level(reference_level, distance, reference_distance=1.0):
"""
Calculates the sound pressure level
in dependence of a distance
where a perfect ball-shaped source and spread is assumed.
reference_level: Sound pressure level in reference distance in dB
distance: Distance to calculate sound pressure level for, in meters
reference_distance: reference distance in meters (defaults to 1)
"""
rel_dist = float(reference_distance) / float(distance)
level = float(reference_level) + 20.0 * (math.log(rel_dist) / math.log(10))
return level
|
Calculates the sound pressure level
in dependence of a distance
where a perfect ball-shaped source and spread is assumed.
reference_level: Sound pressure level in reference distance in dB
distance: Distance to calculate sound pressure level for, in meters
reference_distance: reference distance in meters (defaults to 1)
|
entailment
|
def distant_total_damped_rated_level(
octave_frequencies,
distance,
temp,
relhum,
reference_distance=1.0):
"""
Calculates the damped, A-rated total sound pressure level
in a given distance, temperature and relative humidity
from octave frequency sound pressure levels in a reference distance
"""
damping_distance = distance - reference_distance
sums = 0.0
for band in OCTAVE_BANDS.keys():
if band not in octave_frequencies:
continue
if octave_frequencies[band] is None:
continue
# distance-adjusted level per band
distant_val = distant_level(
reference_level=float(octave_frequencies[band]),
distance=distance,
reference_distance=reference_distance
)
# damping
damp_per_meter = damping(
temp=temp,
relhum=relhum,
freq=OCTAVE_BANDS[band][0])
distant_val = distant_val - (damping_distance * damp_per_meter)
# applyng A-rating
distant_val += OCTAVE_BANDS[band][1]
sums += pow(10.0, (distant_val / 10.0))
level = 10.0 * math.log10(sums)
return level
|
Calculates the damped, A-rated total sound pressure level
in a given distance, temperature and relative humidity
from octave frequency sound pressure levels in a reference distance
|
entailment
|
def ASRS(self, params):
"""
ASRS [Ra,] Ra, Rc
ASRS [Ra,] Rb, #imm5_counting
Arithmetic shift right Rb by Rc or imm5_counting and store the result in Ra
imm5 counting is [1, 32]
In the register shift, the first two operands must be the same register
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
if self.is_register(Rc):
# ASRS Ra, Ra, Rb
self.check_arguments(low_registers=(Ra, Rc))
self.match_first_two_parameters(Ra, Rb)
def ASRS_func():
# Set the C flag, or the last shifted out bit
if (self.register[Rc] > 0) and (self.register[Rb] & (1 << (self.register[Rc] - 1))):
self.set_APSR_flag_to_value('C', 1)
else:
self.set_APSR_flag_to_value('C', 0)
if self.register[Ra] & (1 << (self._bit_width - 1)):
self.register[Ra] = (self.register[Ra] >> self.register[Rc]) | (
int('1' * self.register[Rc], 2) << (self._bit_width - self.register[Rc]))
else:
self.register[Ra] = self.register[Ra] >> self.register[Rc]
self.set_NZ_flags(self.register[Ra])
else:
# ASRS Ra, Rb, #imm5_counting
self.check_arguments(low_registers=(Ra, Rb), imm5_counting=(Rc,))
shift_amount = self.check_immediate(Rc)
def ASRS_func():
# Set the C flag, or the last shifted out bit
if self.register[Rb] & (1 << (shift_amount - 1)):
self.set_APSR_flag_to_value('C', 1)
else:
self.set_APSR_flag_to_value('C', 0)
if self.register[Ra] & (1 << (self._bit_width - 1)):
self.register[Ra] = (self.register[Ra] >> shift_amount) | (
int('1' * shift_amount, 2) << (self._bit_width - shift_amount))
else:
self.register[Ra] = self.register[Rb] >> shift_amount
self.set_NZ_flags(self.register[Ra])
return ASRS_func
|
ASRS [Ra,] Ra, Rc
ASRS [Ra,] Rb, #imm5_counting
Arithmetic shift right Rb by Rc or imm5_counting and store the result in Ra
imm5 counting is [1, 32]
In the register shift, the first two operands must be the same register
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
|
entailment
|
def LSLS(self, params):
"""
LSLS [Ra,] Ra, Rc
LSLS [Ra,] Rb, #imm5
Logical shift left Rb by Rc or imm5 and store the result in Ra
imm5 is [0, 31]
In the register shift, the first two operands must be the same register
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
if self.is_register(Rc):
# LSLS Ra, Ra, Rb
self.check_arguments(low_registers=(Ra, Rc))
self.match_first_two_parameters(Ra, Rb)
def LSLS_func():
# Set the C flag, or the last shifted out bit
if (self.register[Rc] < self._bit_width) and (self.register[Ra] & (1 << (self._bit_width - self.register[Rc]))):
self.set_APSR_flag_to_value('C', 1)
else:
self.set_APSR_flag_to_value('C', 0)
self.register[Ra] = self.register[Ra] << self.register[Rc]
self.set_NZ_flags(self.register[Ra])
else:
# LSLS Ra, Rb, #imm5
self.check_arguments(low_registers=(Ra, Rb), imm5=(Rc,))
shift_amount = self.check_immediate(Rc)
def LSLS_func():
# Set the C flag, or the last shifted out bit
if (shift_amount < self._bit_width) and (self.register[Rb] & (1 << (self._bit_width - shift_amount))):
self.set_APSR_flag_to_value('C', 1)
else:
self.set_APSR_flag_to_value('C', 0)
self.register[Ra] = self.register[Rb] << shift_amount
self.set_NZ_flags(self.register[Ra])
return LSLS_func
|
LSLS [Ra,] Ra, Rc
LSLS [Ra,] Rb, #imm5
Logical shift left Rb by Rc or imm5 and store the result in Ra
imm5 is [0, 31]
In the register shift, the first two operands must be the same register
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
|
entailment
|
def LSRS(self, params):
"""
LSRS [Ra,] Ra, Rc
LSRS [Ra,] Rb, #imm5_counting
Logical shift right Rb by Rc or imm5 and store the result in Ra
imm5 counting is [1, 32]
In the register shift, the first two operands must be the same register
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
if self.is_register(Rc):
# LSRS Ra, Ra, Rb
self.check_arguments(low_registers=(Ra, Rc))
self.match_first_two_parameters(Ra, Rb)
def LSRS_func():
# Set the C flag, or the last shifted out bit
if (self.register[Rc] > 0) and (self.register[Rb] & (1 << (self.register[Rc] - 1))):
self.set_APSR_flag_to_value('C', 1)
else:
self.set_APSR_flag_to_value('C', 0)
self.register[Ra] = self.register[Ra] >> self.register[Rc]
self.set_NZ_flags(self.register[Ra])
else:
# LSRS Ra, Rb, #imm5_counting
self.check_arguments(low_registers=(Ra, Rb), imm5_counting=(Rc,))
shift_amount = self.check_immediate(Rc)
def LSRS_func():
# Set the C flag, or the last shifted out bit
if self.register[Rb] & (1 << (shift_amount - 1)):
self.set_APSR_flag_to_value('C', 1)
else:
self.set_APSR_flag_to_value('C', 0)
self.register[Ra] = self.register[Rb] >> shift_amount
self.set_NZ_flags(self.register[Ra])
return LSRS_func
|
LSRS [Ra,] Ra, Rc
LSRS [Ra,] Rb, #imm5_counting
Logical shift right Rb by Rc or imm5 and store the result in Ra
imm5 counting is [1, 32]
In the register shift, the first two operands must be the same register
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
|
entailment
|
def RORS(self, params):
"""
RORS [Ra,] Ra, Rc
Rotate shift right Rb by Rc or imm5 and store the result in Ra
The first two operands must be the same register
Ra and Rc must be low registers
The first register is optional
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
# TODO implement this function
# TODO figure out the last shifted bit
# TODO figure out how to wrap bits around
raise iarm.exceptions.NotImplementedError
# RORS Ra, Ra, Rb
self.check_arguments(low_registers=(Ra, Rc))
self.match_first_two_parameters(Ra, Rb)
def RORS_func():
raise NotImplementedError
return RORS_func
|
RORS [Ra,] Ra, Rc
Rotate shift right Rb by Rc or imm5 and store the result in Ra
The first two operands must be the same register
Ra and Rc must be low registers
The first register is optional
|
entailment
|
def _parse_topic_table(self, xml, tds='title,created,comment,group', selector='//table[@class="olt"]//tr'):
"""
解析话题列表
:internal
:param xml: 页面XML
:param tds: 每列的含义,可以是title, created, comment, group, updated, author, time, rec
:param selector: 表在页面中的位置
:return:
"""
xml_results = xml.xpath(selector)
results = []
tds = tds.split(',')
for item in xml_results:
try:
result = {}
index = 0
for td in tds:
index += 1
if td == 'title':
xml_title = item.xpath('.//td[position()=%s]/a' % index)[0]
url = xml_title.get('href')
tid = int(slash_right(url))
title = xml_title.text
result.update({'id': tid, 'url': url, 'title': title})
elif td == 'created':
xml_created = item.xpath('.//td[position()=%s]/a' % index) \
or item.xpath('.//td[position()=%s]' % index)
created_at = xml_created[0].get('title')
result['created_at'] = created_at
elif td == 'comment':
xml_comment = item.xpath('.//td[position()=%s]/span' % index) \
or item.xpath('.//td[position()=%s]' % index)
comment_count = int(re.match(r'\d+', xml_comment[0].text).group())
result['comment_count'] = comment_count
elif td == 'group':
xml_group = item.xpath('.//td[position()=%s]/a' % index)[0]
group_url = xml_group.get('href')
group_alias = slash_right(group_url)
group_name = xml_group.text
result.update({'group_alias': group_alias, 'group_url': group_url, 'group_name': group_name})
elif td == 'author':
xml_author = item.xpath('.//td[position()=%s]/a' % index)[0]
author_url = xml_author.get('href')
author_alias = slash_right(author_url)
author_nickname = xml_author.text
result.update({
'author_url': author_url,
'author_alias': author_alias,
'author_nickname': author_nickname,
})
elif td == 'updated':
result['updated_at'] = item.xpath('.//td[position()=%s]/text()' % index)[0]
elif td == 'time':
result['time'] = item.xpath('.//td[position()=%s]/text()' % index)[0]
elif td == 'rec':
xml_rec = item.xpath('.//td[position()=%s]//a[@class="lnk-remove"]/@href' % (index - 1))[0]
result['rec_id'] = re.search(r'rec_id=(\d+)', xml_rec).groups()[0]
results.append(result)
except Exception as e:
self.api.api.logger.exception('parse topic table exception: %s' % e)
return results
|
解析话题列表
:internal
:param xml: 页面XML
:param tds: 每列的含义,可以是title, created, comment, group, updated, author, time, rec
:param selector: 表在页面中的位置
:return:
|
entailment
|
def search_groups(self, keyword, start=0):
"""
搜索小组
:param keyword: 搜索的关键字
:param start: 翻页
:return: 含总数的列表
"""
xml = self.api.xml(API_GROUP_SEARCH_GROUPS % (start, keyword))
xml_results = xml.xpath('//div[@class="groups"]/div[@class="result"]')
results = []
for item in xml_results:
try:
url = item.xpath('.//h3/a/@href')[0]
info = item.xpath('.//div[@class="content"]/div[@class="info"]/text()')[0].strip(' ')
onclick = item.xpath('.//h3/a/@onclick')[0]
meta = {
'icon': item.xpath('.//img/@src')[0],
'id': re.search(r'sid[^\d]+(\d+)', onclick).groups()[0],
'url': url,
'alias': url.rstrip('/').rsplit('/', 1)[1],
'name': item.xpath('.//h3/a/text()')[0],
'user_count': int(re.match(r'\d+', info).group()),
'user_alias': re.search(r'个(.+)\s*在此', info).groups()[0],
}
results.append(meta)
except Exception as e:
self.api.logger.exception('parse search groups result error: %s' % e)
return build_list_result(results, xml)
|
搜索小组
:param keyword: 搜索的关键字
:param start: 翻页
:return: 含总数的列表
|
entailment
|
def list_joined_groups(self, user_alias=None):
"""
已加入的小组列表
:param user_alias: 用户名,默认为当前用户名
:return: 单页列表
"""
xml = self.api.xml(API_GROUP_LIST_JOINED_GROUPS % (user_alias or self.api.user_alias))
xml_results = xml.xpath('//div[@class="group-list group-cards"]/ul/li')
results = []
for item in xml_results:
try:
icon = item.xpath('.//img/@src')[0]
link = item.xpath('.//div[@class="title"]/a')[0]
url = link.get('href')
name = link.text
alias = url.rstrip('/').rsplit('/', 1)[1]
user_count = int(item.xpath('.//span[@class="num"]/text()')[0][1:-1])
results.append({
'icon': icon,
'alias': alias,
'url': url,
'name': name,
'user_count': user_count,
})
except Exception as e:
self.api.logger.exception('parse joined groups exception: %s' % e)
return build_list_result(results, xml)
|
已加入的小组列表
:param user_alias: 用户名,默认为当前用户名
:return: 单页列表
|
entailment
|
def join_group(self, group_alias, message=None):
"""
加入小组
:param group_alias: 小组ID
:param message: 如果要验证,留言信息
:return: 枚举
- joined: 加入成功
- waiting: 等待审核
- initial: 加入失败
"""
xml = self.api.xml(API_GROUP_GROUP_HOME % group_alias, params={
'action': 'join',
'ck': self.api.ck(),
})
misc = xml.xpath('//div[@class="group-misc"]')[0]
intro = misc.xpath('string(.)') or ''
if intro.find('退出小组') > -1:
return 'joined'
elif intro.find('你已经申请加入小组') > -1:
return 'waiting'
elif intro.find('申请加入小组') > -1:
res = self.api.xml(API_GROUP_GROUP_HOME % group_alias, 'post', data={
'ck': self.api.ck(),
'action': 'request_join',
'message': message,
'send': '发送',
})
misc = res.xpath('//div[@class="group-misc"]')[0]
intro = misc.xpath('string(.)') or ''
if intro.find('你已经申请加入小组') > -1:
return 'waiting'
else:
return 'initial'
else:
return 'initial'
|
加入小组
:param group_alias: 小组ID
:param message: 如果要验证,留言信息
:return: 枚举
- joined: 加入成功
- waiting: 等待审核
- initial: 加入失败
|
entailment
|
def leave_group(self, group_alias):
"""
退出小组
:param group_alias: 小组ID
:return:
"""
return self.api.req(API_GROUP_GROUP_HOME % group_alias, params={
'action': 'quit',
'ck': self.api.ck(),
})
|
退出小组
:param group_alias: 小组ID
:return:
|
entailment
|
def search_topics(self, keyword, sort='relevance', start=0):
"""
搜索话题
:param keyword: 关键字
:param sort: 排序方式 relevance/newest
:param start: 翻页
:return: 带总数的列表
"""
xml = self.api.xml(API_GROUP_SEARCH_TOPICS % (start, sort, keyword))
return build_list_result(self._parse_topic_table(xml), xml)
|
搜索话题
:param keyword: 关键字
:param sort: 排序方式 relevance/newest
:param start: 翻页
:return: 带总数的列表
|
entailment
|
def list_topics(self, group_alias, _type='', start=0):
"""
小组内话题列表
:param group_alias: 小组ID
:param _type: 类型 默认最新,hot:最热
:param start: 翻页
:return: 带下一页的列表
"""
xml = self.api.xml(API_GROUP_LIST_GROUP_TOPICS % group_alias, params={
'start': start,
'type': _type,
})
return build_list_result(self._parse_topic_table(xml, 'title,author,comment,updated'), xml)
|
小组内话题列表
:param group_alias: 小组ID
:param _type: 类型 默认最新,hot:最热
:param start: 翻页
:return: 带下一页的列表
|
entailment
|
def list_joined_topics(self, start=0):
"""
已加入的所有小组的话题列表
:param start: 翻页
:return: 带下一页的列表
"""
xml = self.api.xml(API_GROUP_HOME, params={'start': start})
return build_list_result(self._parse_topic_table(xml, 'title,comment,created,group'), xml)
|
已加入的所有小组的话题列表
:param start: 翻页
:return: 带下一页的列表
|
entailment
|
def list_user_topics(self, start=0):
"""
发表的话题
:param start: 翻页
:return: 带下一页的列表
"""
xml = self.api.xml(API_GROUP_LIST_USER_PUBLISHED_TOPICS % self.api.user_alias, params={'start': start})
return build_list_result(self._parse_topic_table(xml, 'title,comment,created,group'), xml)
|
发表的话题
:param start: 翻页
:return: 带下一页的列表
|
entailment
|
def list_commented_topics(self, start=0):
"""
回复过的话题列表
:param start: 翻页
:return: 带下一页的列表
"""
xml = self.api.xml(API_GROUP_LIST_USER_COMMENTED_TOPICS % self.api.user_alias, params={'start': start})
return build_list_result(self._parse_topic_table(xml, 'title,comment,time,group'), xml)
|
回复过的话题列表
:param start: 翻页
:return: 带下一页的列表
|
entailment
|
def list_liked_topics(self, user_alias=None, start=0):
"""
喜欢过的话题
:param user_alias: 指定用户,默认当前
:param start: 翻页
:return: 带下一页的列表
"""
user_alias = user_alias or self.api.user_alias
xml = self.api.xml(API_GROUP_LIST_USER_LIKED_TOPICS % user_alias, params={'start': start})
return build_list_result(self._parse_topic_table(xml, 'title,comment,time,group'), xml)
|
喜欢过的话题
:param user_alias: 指定用户,默认当前
:param start: 翻页
:return: 带下一页的列表
|
entailment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.