signature
stringlengths
8
3.44k
body
stringlengths
0
1.41M
docstring
stringlengths
1
122k
id
stringlengths
5
17
def _get_class(self):
class_parts = [<EOL>self._prefix,<EOL>self._known_keys[_InstrumentationKnownStatusKeys.CLASS]<EOL>]<EOL>return '<STR_LIT:.>'.join(filter(None, class_parts))<EOL>
Gets the class name of the test method for the instrumentation method block. Returns: A string containing the class name of the instrumentation test method's test or empty string if no name was parsed. If a prefix was specified, then the prefix will be prepended to the class name.
f7529:c8:m2
def _get_full_name(self):
full_name_parts = [self._get_class(), self._get_name()]<EOL>return '<STR_LIT:#>'.join(filter(None, full_name_parts))<EOL>
Gets the qualified name of the test method corresponding to the instrumentation block. Returns: A string containing the fully qualified name of the instrumentation test method. If parts are missing, then degrades steadily.
f7529:c8:m3
def _get_details(self):
detail_parts = [self._get_full_name(), self._error_message]<EOL>return '<STR_LIT:\n>'.join(filter(None, detail_parts))<EOL>
Gets the ouput for the detail section of the TestResultRecord. Returns: A string to set for a TestResultRecord's details.
f7529:c8:m4
def _get_extras(self):
<EOL>extra_parts = ['<STR_LIT>']<EOL>for value in self._unknown_keys.values():<EOL><INDENT>extra_parts.append(value)<EOL><DEDENT>extra_parts.append(<EOL>self._known_keys[_InstrumentationKnownStatusKeys.STREAM])<EOL>extra_parts.append(<EOL>self._known_keys[_InstrumentationKnownResultKeys.SHORTMSG])<EOL>extra_parts.append(<EOL>self._known_keys[_InstrumentationKnownResultKeys.LONGMSG])<EOL>extra_parts.append(<EOL>self._known_keys[_InstrumentationKnownStatusKeys.ERROR])<EOL>if self._known_keys[<EOL>_InstrumentationKnownStatusKeys.STACK] not in self._known_keys[<EOL>_InstrumentationKnownStatusKeys.STREAM]:<EOL><INDENT>extra_parts.append(<EOL>self._known_keys[_InstrumentationKnownStatusKeys.STACK])<EOL><DEDENT>return '<STR_LIT:\n>'.join(filter(None, extra_parts))<EOL>
Gets the output for the extras section of the TestResultRecord. Returns: A string to set for a TestResultRecord's extras.
f7529:c8:m5
def _is_failed(self):
if self._status_code in _InstrumentationStatusCodeCategories.FAIL:<EOL><INDENT>return True<EOL><DEDENT>elif (self._known_keys[_InstrumentationKnownStatusKeys.STACK]<EOL>and self._status_code !=<EOL>_InstrumentationStatusCodes.ASSUMPTION_FAILURE):<EOL><INDENT>return True<EOL><DEDENT>elif self._known_keys[_InstrumentationKnownStatusKeys.ERROR]:<EOL><INDENT>return True<EOL><DEDENT>elif self._known_keys[_InstrumentationKnownResultKeys.SHORTMSG]:<EOL><INDENT>return True<EOL><DEDENT>elif self._known_keys[_InstrumentationKnownResultKeys.LONGMSG]:<EOL><INDENT>return True<EOL><DEDENT>else:<EOL><INDENT>return False<EOL><DEDENT>
Determines if the test corresponding to the instrumentation block failed. This method can not be used to tell if a test method passed and should not be used for such a purpose. Returns: A boolean indicating if the test method failed.
f7529:c8:m6
def has_completed_result_block_format(self, error_message):
extras = self._get_extras()<EOL>if _InstrumentationResultSignals.PASS in extras:<EOL><INDENT>return True<EOL><DEDENT>elif _InstrumentationResultSignals.FAIL in extras:<EOL><INDENT>return False<EOL><DEDENT>else:<EOL><INDENT>raise signals.TestError(details=error_message, extras=extras)<EOL><DEDENT>
Checks the instrumentation result block for a signal indicating normal completion. Args: error_message: string, the error message to give if the instrumentation run did not complete successfully.- Returns: A boolean indicating whether or not the instrumentation run passed or failed overall. Raises: signals.TestError: Error raised if the instrumentation run did not complete because of a crash or some other issue.
f7529:c8:m8
def _previous_block_never_completed(self, current_block, previous_block,<EOL>new_state):
if previous_block:<EOL><INDENT>previously_timing_block = (<EOL>previous_block.status_code in<EOL>_InstrumentationStatusCodeCategories.TIMING)<EOL>currently_new_block = (<EOL>current_block.status_code == _InstrumentationStatusCodes.START<EOL>or new_state == _InstrumentationBlockStates.RESULT)<EOL>return all([previously_timing_block, currently_new_block])<EOL><DEDENT>else:<EOL><INDENT>return False<EOL><DEDENT>
Checks if the previous instrumentation method block completed. Args: current_block: _InstrumentationBlock, the current instrumentation block to check for being a different instrumentation test method. previous_block: _InstrumentationBlock, rhe previous instrumentation block to check for an incomplete status. new_state: _InstrumentationBlockStates, the next state for the parser, used to check for the instrumentation run ending with an incomplete test. Returns: A boolean indicating whether the previous instrumentation block completed executing.
f7529:c9:m0
def _create_formatters(self, instrumentation_block, new_state):
formatters = []<EOL>if self._previous_block_never_completed(<EOL>current_block=instrumentation_block,<EOL>previous_block=instrumentation_block.<EOL>previous_instrumentation_block,<EOL>new_state=new_state):<EOL><INDENT>instrumentation_block.previous_instrumentation_block.set_error_message(<EOL>self.DEFAULT_INSTRUMENTATION_ERROR_MESSAGE)<EOL>formatters.append(<EOL>_InstrumentationBlockFormatter(<EOL>instrumentation_block.previous_instrumentation_block))<EOL><DEDENT>if not instrumentation_block.is_empty:<EOL><INDENT>formatters.append(<EOL>_InstrumentationBlockFormatter(instrumentation_block))<EOL><DEDENT>return formatters<EOL>
Creates the _InstrumentationBlockFormatters for outputting the instrumentation method block that have finished parsing. Args: instrumentation_block: _InstrumentationBlock, the current instrumentation method block to create formatters based upon. new_state: _InstrumentationBlockState, the next state that the parser will transition to. Returns: A list of the formatters tha need to create and add TestResultRecords to the test results.
f7529:c9:m1
def _transition_instrumentation_block(<EOL>self,<EOL>instrumentation_block,<EOL>new_state=_InstrumentationBlockStates.UNKNOWN):
formatters = self._create_formatters(instrumentation_block, new_state)<EOL>for formatter in formatters:<EOL><INDENT>test_record = formatter.create_test_record(self.TAG)<EOL>if test_record:<EOL><INDENT>self.results.add_record(test_record)<EOL>self.summary_writer.dump(test_record.to_dict(),<EOL>records.TestSummaryEntryType.RECORD)<EOL><DEDENT><DEDENT>return instrumentation_block.transition_state(new_state=new_state)<EOL>
Transitions and finishes the current instrumentation block. Args: instrumentation_block: _InstrumentationBlock, the current instrumentation block to finish. new_state: _InstrumentationBlockState, the next state for the parser to transition to. Returns: The new instrumentation block to use for storing parsed instrumentation ouput.
f7529:c9:m2
def _parse_method_block_line(self, instrumentation_block, line):
if line.startswith(_InstrumentationStructurePrefixes.STATUS):<EOL><INDENT>instrumentation_block.set_key(<EOL>_InstrumentationStructurePrefixes.STATUS, line)<EOL>return instrumentation_block<EOL><DEDENT>elif line.startswith(_InstrumentationStructurePrefixes.STATUS_CODE):<EOL><INDENT>instrumentation_block.set_status_code(line)<EOL>return self._transition_instrumentation_block(<EOL>instrumentation_block)<EOL><DEDENT>elif line.startswith(_InstrumentationStructurePrefixes.RESULT):<EOL><INDENT>instrumentation_block.set_key(<EOL>_InstrumentationStructurePrefixes.RESULT, line)<EOL>return self._parse_result_line(<EOL>self._transition_instrumentation_block(<EOL>instrumentation_block,<EOL>new_state=_InstrumentationBlockStates.RESULT,<EOL>),<EOL>line,<EOL>)<EOL><DEDENT>else:<EOL><INDENT>instrumentation_block.add_value(line)<EOL>return instrumentation_block<EOL><DEDENT>
Parses the instrumnetation method block's line. Args: instrumentation_block: _InstrumentationBlock, the current instrumentation method block. line: string, the raw instrumentation output line to parse. Returns: The next instrumentation block, which should be used to continue parsing instrumentation output.
f7529:c9:m3
def _parse_result_block_line(self, instrumentation_block, line):
instrumentation_block.add_value(line)<EOL>return instrumentation_block<EOL>
Parses the instrumentation result block's line. Args: instrumentation_block: _InstrumentationBlock, the instrumentation result block for the instrumentation run. line: string, the raw instrumentation output to add to the instrumenation result block's _InstrumentationResultBlocki object. Returns: The instrumentation result block for the instrumentation run.
f7529:c9:m4
def _parse_unknown_block_line(self, instrumentation_block, line):
if line.startswith(_InstrumentationStructurePrefixes.STATUS):<EOL><INDENT>return self._parse_method_block_line(<EOL>self._transition_instrumentation_block(<EOL>instrumentation_block,<EOL>new_state=_InstrumentationBlockStates.METHOD,<EOL>),<EOL>line,<EOL>)<EOL><DEDENT>elif (line.startswith(_InstrumentationStructurePrefixes.RESULT)<EOL>or _InstrumentationStructurePrefixes.FAILED in line):<EOL><INDENT>return self._parse_result_block_line(<EOL>self._transition_instrumentation_block(<EOL>instrumentation_block,<EOL>new_state=_InstrumentationBlockStates.RESULT,<EOL>),<EOL>line,<EOL>)<EOL><DEDENT>else:<EOL><INDENT>instrumentation_block.add_value(line)<EOL>return instrumentation_block<EOL><DEDENT>
Parses a line from the instrumentation output from the UNKNOWN parser state. Args: instrumentation_block: _InstrumentationBlock, the current instrumenation block, where the correct categorization it noti yet known. line: string, the raw instrumenation output line to be used to deteremine the correct categorization. Returns: The next instrumentation block to continue parsing with. Usually, this is the same instrumentation block but with the state transitioned appropriately.
f7529:c9:m5
def _parse_line(self, instrumentation_block, line):
if instrumentation_block.state == _InstrumentationBlockStates.METHOD:<EOL><INDENT>return self._parse_method_block_line(instrumentation_block, line)<EOL><DEDENT>elif instrumentation_block.state == _InstrumentationBlockStates.RESULT:<EOL><INDENT>return self._parse_result_block_line(instrumentation_block, line)<EOL><DEDENT>else:<EOL><INDENT>return self._parse_unknown_block_line(instrumentation_block, line)<EOL><DEDENT>
Parses an arbitary line from the instrumentation output based upon the current parser state. Args: instrumentation_block: _InstrumentationBlock, an instrumentation block with any of the possible parser states. line: string, the raw instrumentation output line to parse appropriately. Returns: The next instrumenation block to continue parsing with.
f7529:c9:m6
def _finish_parsing(self, instrumentation_block):
formatter = _InstrumentationBlockFormatter(instrumentation_block)<EOL>return formatter.has_completed_result_block_format(<EOL>self.DEFAULT_INSTRUMENTATION_ERROR_MESSAGE)<EOL>
Finishes parsing the instrumentation result block for the final instrumentation run status. Args: instrumentation_block: _InstrumentationBlock, the instrumentation result block for the instrumenation run. Potentially, thisi could actually be method block if the instrumentation outputi is malformed. Returns: A boolean indicating whether the instrumentation run completed with all the tests passing. Raises: signals.TestError: Error raised if the instrumentation failed to complete with either a pass or fail status.
f7529:c9:m7
def parse_instrumentation_options(self, parameters=None):
if parameters is None:<EOL><INDENT>return {}<EOL><DEDENT>filtered_parameters = {}<EOL>for parameter_key, parameter_value in parameters.items():<EOL><INDENT>if parameter_key.startswith(<EOL>self.DEFAULT_INSTRUMENTATION_OPTION_PREFIX):<EOL><INDENT>option_key = parameter_key[len(<EOL>self.DEFAULT_INSTRUMENTATION_OPTION_PREFIX):]<EOL>filtered_parameters[option_key] = parameter_value<EOL><DEDENT><DEDENT>return filtered_parameters<EOL>
Returns the options for the instrumentation test from user_params. By default, this method assume that the correct instrumentation options all start with DEFAULT_INSTRUMENTATION_OPTION_PREFIX. Args: parameters: dict, the key value pairs representing an assortment of parameters including instrumentation options. Usually, this argument will be from self.user_params. Returns: A dictionary of options/parameters for the instrumentation tst.
f7529:c9:m8
def _parse_logline_timestamp(t):
date, time = t.split('<STR_LIT:U+0020>')<EOL>month, day = date.split('<STR_LIT:->')<EOL>h, m, s = time.split('<STR_LIT::>')<EOL>s, ms = s.split('<STR_LIT:.>')<EOL>return (month, day, h, m, s, ms)<EOL>
Parses a logline timestamp into a tuple. Args: t: Timestamp in logline format. Returns: An iterable of date and time elements in the order of month, day, hour, minute, second, microsecond.
f7530:m0
def logline_timestamp_comparator(t1, t2):
dt1 = _parse_logline_timestamp(t1)<EOL>dt2 = _parse_logline_timestamp(t2)<EOL>for u1, u2 in zip(dt1, dt2):<EOL><INDENT>if u1 < u2:<EOL><INDENT>return -<NUM_LIT:1><EOL><DEDENT>elif u1 > u2:<EOL><INDENT>return <NUM_LIT:1><EOL><DEDENT><DEDENT>return <NUM_LIT:0><EOL>
Comparator for timestamps in logline format. Args: t1: Timestamp in logline format. t2: Timestamp in logline format. Returns: -1 if t1 < t2; 1 if t1 > t2; 0 if t1 == t2.
f7530:m2
def epoch_to_log_line_timestamp(epoch_time, time_zone=None):
s, ms = divmod(epoch_time, <NUM_LIT:1000>)<EOL>d = datetime.datetime.fromtimestamp(s, tz=time_zone)<EOL>return d.strftime('<STR_LIT>') + str(ms)<EOL>
Converts an epoch timestamp in ms to log line timestamp format, which is readible for humans. Args: epoch_time: integer, an epoch timestamp in ms. time_zone: instance of tzinfo, time zone information. Using pytz rather than python 3.2 time_zone implementation for python 2 compatibility reasons. Returns: A string that is the corresponding timestamp in log line timestamp format.
f7530:m4
def get_log_line_timestamp(delta=None):
return _get_timestamp('<STR_LIT>', delta)<EOL>
Returns a timestamp in the format used by log lines. Default is current time. If a delta is set, the return value will be the current time offset by delta seconds. Args: delta: Number of seconds to offset from current time; can be negative. Returns: A timestamp in log line format with an offset.
f7530:m5
def get_log_file_timestamp(delta=None):
return _get_timestamp('<STR_LIT>', delta)<EOL>
Returns a timestamp in the format used for log file names. Default is current time. If a delta is set, the return value will be the current time offset by delta seconds. Args: delta: Number of seconds to offset from current time; can be negative. Returns: A timestamp in log filen name format with an offset.
f7530:m6
def normalize_log_line_timestamp(log_line_timestamp):
norm_tp = log_line_timestamp.replace('<STR_LIT:U+0020>', '<STR_LIT:_>')<EOL>norm_tp = norm_tp.replace('<STR_LIT::>', '<STR_LIT:->')<EOL>return norm_tp<EOL>
Replace special characters in log line timestamp with normal characters. Args: log_line_timestamp: A string in the log line timestamp format. Obtained with get_log_line_timestamp. Returns: A string representing the same time as input timestamp, but without special characters.
f7530:m11
def values():
result = {}<EOL>try:<EOL><INDENT>span = g.get("<STR_LIT>") if "<STR_LIT>" in g else g<EOL>for header in b3_headers:<EOL><INDENT>result[header] = span.get(header)<EOL><DEDENT><DEDENT>except RuntimeError:<EOL><INDENT>for header in b3_headers:<EOL><INDENT>result[header] = None<EOL><DEDENT><DEDENT>return result<EOL>
Get the full current set of B3 values. :return: A dict containing the keys "X-B3-TraceId", "X-B3-ParentSpanId", "X-B3-SpanId", "X-B3-Sampled" and "X-B3-Flags" for the current span or subspan. NB some of the values are likely be None, but all keys will be present.
f7534:m0
def start_span(request_headers=None):
global debug<EOL>try:<EOL><INDENT>headers = request_headers if request_headers else request.headers<EOL><DEDENT>except RuntimeError:<EOL><INDENT>headers = {}<EOL><DEDENT>trace_id = headers.get(b3_trace_id)<EOL>parent_span_id = headers.get(b3_parent_span_id)<EOL>span_id = headers.get(b3_span_id)<EOL>sampled = headers.get(b3_sampled)<EOL>flags = headers.get(b3_flags)<EOL>root_span = not trace_id<EOL>setattr(g, b3_trace_id, trace_id or _generate_identifier())<EOL>setattr(g, b3_parent_span_id, parent_span_id)<EOL>setattr(g, b3_span_id, span_id or g.get(b3_trace_id))<EOL>setattr(g, b3_sampled, sampled)<EOL>setattr(g, b3_flags, "<STR_LIT:1>" if debug else flags)<EOL>_info("<STR_LIT>" if trace_id else "<STR_LIT>")<EOL>_log.debug("<STR_LIT>".format(values=values()))<EOL>
Collects incoming B3 headers and sets up values for this request as needed. The collected/computed values are stored on the application context g using the defined http header names as keys. :param request_headers: Incoming request headers can be passed explicitly. If not passed, Flask request.headers will be used. This enables you to pass this function to Flask.before_request().
f7534:m1
def end_span(response=None):
_end_subspan()<EOL>_info("<STR_LIT>")<EOL>return response<EOL>
Logs the end of a span. This function can be passed to Flask.after_request() if you'd like a log message to confirm the end of a span. :param response: If this furction is passed to Flask.after_request(), this will be passed by the framework. :return: the response parameter is returned as passed.
f7534:m2
def span(route):
@wraps(route)<EOL>def route_decorator(*args, **kwargs):<EOL><INDENT>start_span()<EOL>try:<EOL><INDENT>return route(*args, **kwargs)<EOL><DEDENT>finally:<EOL><INDENT>end_span()<EOL><DEDENT><DEDENT>return route_decorator<EOL>
Optional decorator for Flask routes. If you don't want to trace all routes using `Flask.before_request()' and 'Flask.after_request()' you can use this decorator as an alternative way to handle incoming B3 headers: @app.route('/instrumented') @span def instrumented(): ... ... ... NB @span needs to come after (not before) @app.route.
f7534:m3
def _start_subspan(headers=None):
b3 = values()<EOL>g.subspan = {<EOL>b3_trace_id: b3[b3_trace_id],<EOL>b3_span_id: _generate_identifier(),<EOL>b3_parent_span_id: b3[b3_span_id],<EOL>b3_sampled: b3[b3_sampled],<EOL>b3_flags: b3[b3_flags],<EOL>}<EOL>result = dict(headers or {})<EOL>result.update({<EOL>b3_trace_id: g.subspan[b3_trace_id],<EOL>b3_span_id: g.subspan[b3_span_id],<EOL>b3_parent_span_id: g.subspan[b3_parent_span_id],<EOL>})<EOL>if g.subspan[b3_sampled]:<EOL><INDENT>result[b3_sampled] = g.subspan[b3_sampled]<EOL><DEDENT>if g.subspan[b3_flags]:<EOL><INDENT>result[b3_flags] = g.subspan[b3_flags]<EOL><DEDENT>_info("<STR_LIT>")<EOL>_log.debug("<STR_LIT>".format(b3_headers=values()))<EOL>_log.debug("<STR_LIT>".format(b3_headers=result))<EOL>return result<EOL>
Sets up a new span to contact a downstream service. This is used when making a downstream service call. It returns a dict containing the required sub-span headers. Each downstream call you make is handled as a new span, so call this every time you need to contact another service. This temporarily updates what's returned by values() to match the sub-span, so it can can also be used when calling e.g. a database that doesn't support B3. You'll still be able to record the client side of an interaction, even if the downstream server doesn't use the propagated trace information. You'll need to call end_subspan when you're done. You can do this using the `SubSpan` class: with SubSpan([headers]) as headers_b3: ... log.debug("Client start: calling downstream service") ... requests.get(<downstream service>, headers=headers_b3) ... log.debug("Client receive: downstream service responded") For the specification, see: https://github.com/openzipkin/b3-propagation :param headers: The headers dict. Headers will be added to this as needed. :return: A dict containing header values for a downstream request. This can be passed directly to e.g. requests.get(...).
f7534:m4
def _end_subspan():
try:<EOL><INDENT>if g.get("<STR_LIT>"):<EOL><INDENT>_info("<STR_LIT>")<EOL>g.pop("<STR_LIT>", None)<EOL><DEDENT><DEDENT>except RuntimeError:<EOL><INDENT>pass<EOL><DEDENT>
Removes the headers for a sub-span. You should call this in e.g. a finally block when you have finished making a downstream service call. For the specification, see: https://github.com/openzipkin/b3-propagation
f7534:m5
def _generate_identifier():
bit_length = <NUM_LIT:64><EOL>byte_length = int(bit_length / <NUM_LIT:8>)<EOL>identifier = os.urandom(byte_length)<EOL>return hexlify(identifier).decode('<STR_LIT:ascii>')<EOL>
Generates a new, random identifier in B3 format. :return: A 64-bit random identifier, rendered as a hex String.
f7534:m6
def _info(message):
span = values()<EOL>_log.debug(message + "<STR_LIT>".format(<EOL>span=span.get(b3_span_id),<EOL>trace=span.get(b3_trace_id),<EOL>parent=span.get(b3_parent_span_id),<EOL>))<EOL>
Convenience function to log current span values.
f7534:m7
def replace(self, infile):
gf = infile[<NUM_LIT>:]<EOL>same_size_index = []<EOL>while len(same_size_index) <= <NUM_LIT:1>:<EOL><INDENT>index = random.randint(<NUM_LIT:0>,len(gf)-<NUM_LIT:1>)<EOL>index_len = len(gf[index])<EOL>same_size_index = [i for (i,g) in enumerate(gf) if len(g) == index_len]<EOL><DEDENT>else:<EOL><INDENT>same_size_index = random.choice(same_size_index[:])<EOL><DEDENT>gf[index], gf[same_size_index] = gf[same_size_index], gf[index]<EOL>return infile[:<NUM_LIT>] + gf<EOL>
Replace: 任意の箇所のバイト列と 同サイズの任意のバイト列を入れ換える
f7537:c0:m8
def increase(self, infile):
gf = infile[<NUM_LIT>:]<EOL>index = gf.index(random.choice(gf))<EOL>index_len = len(gf[index])<EOL>large_size_index = random.choice([gf.index(g) for g in gf if len(g) > index_len])<EOL>gf[index], gf[large_size_index] = gf[large_size_index], gf[index]<EOL>return infile[:<NUM_LIT>] + gf<EOL>
Increase: 任意の箇所のバイト列と それより大きなサイズの任意のバイト列と入れ換える
f7537:c0:m9
def decrease(self, infile):
gf = infile[<NUM_LIT>:]<EOL>try:<EOL><INDENT>index = random.randint(len(gf)-<NUM_LIT:1>, <NUM_LIT>)<EOL><DEDENT>except ValueError:<EOL><INDENT>return infile<EOL><DEDENT>gf = gf[:index] + gf[index+<NUM_LIT:1>:]<EOL>return infile[:<NUM_LIT>] + gf<EOL>
Decrease: 任意の箇所のバイト列を 削除する
f7537:c0:m10
def swap(self, infile):
gf = infile[<NUM_LIT>:]<EOL>index = gf.index(random.choice(gf))<EOL>another = gf.index(random.choice(gf))<EOL>gf[index], gf[another] = gf[another], gf[index]<EOL>return infile[:<NUM_LIT>] + gf<EOL>
Swap: 任意の箇所のバイト列と 他の任意の箇所のバイト列を入れ換える
f7537:c0:m11
def changiling(self, infile):
gf = infile[<NUM_LIT>:]<EOL>baby, fetch = (self.word_toaster() for _ in range(<NUM_LIT:2>))<EOL>gf = [g.replace(baby, fetch) for g in gf]<EOL>return infile[:<NUM_LIT>] + gf<EOL>
Changiling: 任意のバイト文字を 他の任意のバイト文字に置き換える
f7537:c0:m12
def main(stack_name, template, mustache_variables):
template_data = _parse_template(template, mustache_variables)<EOL>params = {<EOL>'<STR_LIT>': stack_name,<EOL>'<STR_LIT>': template_data<EOL>}<EOL>try:<EOL><INDENT>if _stack_exists(stack_name):<EOL><INDENT>print('<STR_LIT>'.format(stack_name))<EOL>stack_result = cf.update_stack(<EOL>**params,<EOL>Capabilities=['<STR_LIT>', '<STR_LIT>'])<EOL>waiter = cf.get_waiter('<STR_LIT>')<EOL>waiter.wait(StackName=stack_name)<EOL><DEDENT>else:<EOL><INDENT>print('<STR_LIT>'.format(stack_name))<EOL>stack_result = cf.create_stack(<EOL>**params,<EOL>Capabilities=['<STR_LIT>', '<STR_LIT>'])<EOL>try:<EOL><INDENT>waiter = cf.get_waiter('<STR_LIT>')<EOL>print("<STR_LIT>")<EOL>waiter.wait(StackName=stack_name)<EOL><DEDENT>except Exception as ex:<EOL><INDENT>print(ex)<EOL>print("""<STR_LIT>""")<EOL>exit(<NUM_LIT:1>)<EOL><DEDENT><DEDENT><DEDENT>except botocore.exceptions.ClientError as ex:<EOL><INDENT>error_message = ex.response['<STR_LIT>']['<STR_LIT>']<EOL>if error_message == '<STR_LIT>':<EOL><INDENT>print("<STR_LIT>")<EOL><DEDENT>else:<EOL><INDENT>raise<EOL><DEDENT><DEDENT>else:<EOL><INDENT>print(json.dumps(<EOL>cf.describe_stacks(StackName=stack_result['<STR_LIT>']),<EOL>indent=<NUM_LIT:2>,<EOL>default=json_serial<EOL>))<EOL><DEDENT>
Update or create stack
f7543:m0
def json_serial(obj):
if isinstance(obj, datetime):<EOL><INDENT>serial = obj.isoformat()<EOL>return serial<EOL><DEDENT>raise TypeError("<STR_LIT>")<EOL>
JSON serializer for objects not serializable by default json code
f7543:m4
def get_module_parser(mod, modname, parents=[], add_help=True):
return argparse.ArgumentParser(<EOL>usage=configuration.EXECUTABLE_NAME + '<STR_LIT:U+0020>' + modname + '<STR_LIT>',<EOL>description=mod.get_description(), parents=parents,<EOL>add_help=add_help)<EOL>
Returns an argument parser for the sub-command's CLI. :param mod: the sub-command's python module :param modnam: the string name of the python module :return: ArgumentParser
f7545:m0
def get_application_parser(commands):
parser = argparse.ArgumentParser(<EOL>description=configuration.APPLICATION_DESCRIPTION,<EOL>usage =configuration.EXECUTABLE_NAME + '<STR_LIT>',<EOL>add_help=False)<EOL>parser.add_argument(<EOL>'<STR_LIT>',<EOL>choices=[name for name in commands],<EOL>nargs="<STR_LIT:?>")<EOL>parser.add_argument("<STR_LIT>", "<STR_LIT>", action="<STR_LIT:store_true>")<EOL>return parser<EOL>
Builds an argument parser for the application's CLI. :param commands: :return: ArgumentParser
f7545:m1
def get_module(name):
return importlib.import_module("<STR_LIT>" + name)<EOL>
Convenience method for importing a module (i.e. sub-command) from a string :param name: module name to import :return: the module object
f7545:m2
def __init__(self, fmt=None, datefmt=None, style='<STR_LIT:%>', colorize=True):
self._colorize = bool(colorize)<EOL>self._color_reset = CL_TXTRST if self._colorize else '<STR_LIT>'<EOL>self._color = {<EOL>True: {<EOL>'<STR_LIT>': CL_DRKWHT,<EOL>'<STR_LIT>': CL_TXTCYN,<EOL>'<STR_LIT>': CL_TXTGRN,<EOL>'<STR_LIT:value>': CL_TXTRST,<EOL>'<STR_LIT>': CL_TXTBLU,<EOL>'<STR_LIT:info>': CL_TXTGRN,<EOL>'<STR_LIT>': CL_TXTYLW,<EOL>'<STR_LIT:error>': CL_TXTRED,<EOL>'<STR_LIT>': CL_DRKRED<EOL>},<EOL>False: {}<EOL>}<EOL>basefmt = fmt or self.__BASE_FORMAT.format(cl_dtm=self._color[self._colorize].get('<STR_LIT>', '<STR_LIT>'),<EOL>cl_rst=self._color_reset)<EOL>if self._colorize:<EOL><INDENT>p = re.compile(r'<STR_LIT>')<EOL>res = p.search(basefmt)<EOL>if res is not None:<EOL><INDENT>ln_color = max([len(i) for i in self._color[True]<EOL>if i in ['<STR_LIT>', '<STR_LIT:info>', '<STR_LIT>', '<STR_LIT:error>', '<STR_LIT>']])<EOL>ln_color += len(self._color_reset) - <NUM_LIT:1><EOL>ln = int(res.group(<NUM_LIT:1>) or <NUM_LIT:0>)<EOL>ln = ln + ln_color if ln > <NUM_LIT:0> else ln - ln_color<EOL>basefmt = p.sub(str(ln), basefmt, re.VERBOSE)<EOL><DEDENT><DEDENT>super(TextFormatter, self).__init__(fmt=basefmt, datefmt=datefmt, style=style)<EOL>
Initialize the formatter with specified format strings. :param fmt: Format of string :type fmt: str :param datefmt: Date format (set as 'Z' to get the Zulu format) :type datefmt: str :param style: Use a style parameter of '%', '{' or '$' to specify that you want to use one of %-formatting, :meth:`str.format` (``{}``) formatting or :class:`string.Template` formatting in your format string. :type style: str :param colorize: If ``True``, output will be colorized :type colorize: bool
f7549:c0:m0
def override_colors(self, colors):
if not isinstance(colors, dict):<EOL><INDENT>return<EOL><DEDENT>for key in self._color[True]:<EOL><INDENT>if key in colors:<EOL><INDENT>self._color[True][key] = colors[key]<EOL><DEDENT><DEDENT>
Override default color of elements. :param colors: New color value for given elements :type colors: dict
f7549:c0:m2
@abc.abstractmethod<EOL><INDENT>def withFields(self, fields=None):<DEDENT>
Add custom fields in log. :param fields: List of custom fields :type fields: dict :return: New instance of logger adapter :rtype: CustomAdapter
f7550:c0:m0
@abc.abstractmethod<EOL><INDENT>def withPrefix(self, prefix=None):<DEDENT>
Add prefix to log message. :param prefix: Prefix of log message :type prefix: str :return: New instance of logger adapter :rtype: CustomAdapter
f7550:c0:m1
def __init__(self, logger, extra=None, prefix=None):
self._logger = logger<EOL>self._extra = self._normalize(extra)<EOL>self._prefix = prefix<EOL>super(CustomAdapter, self).__init__(self._logger, {'<STR_LIT>': self._extra, '<STR_LIT>': self._prefix})<EOL>
Logger modifier. :param logger: Logger instance :type logger: PyLogrus :param extra: Custom fields :type extra: dict | None :param prefix: Prefix of log message :type prefix: str | None
f7550:c2:m0
def formatTime(self, record, datefmt=None):
ct = self.converter(record.created)<EOL>if datefmt:<EOL><INDENT>if datefmt == '<STR_LIT>':<EOL><INDENT>t = time.strftime("<STR_LIT>", ct)<EOL>s = "<STR_LIT>".format(t, record.msecs)<EOL><DEDENT>else:<EOL><INDENT>s = time.strftime(datefmt, ct)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>t = time.strftime(self.default_time_format, ct)<EOL>s = self.default_msec_format % (t, record.msecs)<EOL><DEDENT>return s<EOL>
Return the creation time of the specified LogRecord as formatted text. If ``datefmt`` (a string) is specified, it is used to format the creation time of the record. If ``datefmt`` is 'Z' then creation time of the record will be in Zulu Time Zone. Otherwise, the ISO8601 format is used.
f7550:c3:m1
def override_level_names(self, mapping):
if not isinstance(mapping, dict):<EOL><INDENT>return<EOL><DEDENT>for key, val in mapping.items():<EOL><INDENT>if key in self._level_names:<EOL><INDENT>self._level_names[key] = val<EOL><DEDENT><DEDENT>
Rename level names. :param mapping: Mapping level names to new ones :type mapping: dict
f7550:c3:m2
def __init__(self, datefmt=None, enabled_fields=None, indent=None, sort_keys=False):
super(JsonFormatter, self).__init__(datefmt=datefmt)<EOL>self._indent = indent<EOL>self._sort_keys = sort_keys<EOL>self.__compose_record = partial(self.__prepare_record, enabled_fields=enabled_fields or self.__BASIC_FIELDS)<EOL>
Initialize the formatter with specified fields and date format. :param datefmt: Date format (set as 'Z' to get the Zulu format) :type datefmt: str :param enabled_fields: List of enabled fields. Field should be represented by string (field name) or tuple ((field name, new name)) :type enabled_fields: list :param indent: Format JSON string with the given indent :type indent: int :param sort_keys: Sort keys in log record :type sort_keys: bool :return: Log record as JSON string :rtype: str
f7551:c0:m0
def __prepare_record(self, record, enabled_fields):
message = record.getMessage()<EOL>if hasattr(record, '<STR_LIT>'):<EOL><INDENT>message = "<STR_LIT>".format((str(record.prefix) + '<STR_LIT:U+0020>') if record.prefix else '<STR_LIT>', message)<EOL><DEDENT>obj = {<EOL>'<STR_LIT:name>': record.name,<EOL>'<STR_LIT>': self.formatTime(record, self.datefmt),<EOL>'<STR_LIT>': record.created,<EOL>'<STR_LIT>': record.msecs,<EOL>'<STR_LIT>': record.relativeCreated,<EOL>'<STR_LIT>': record.levelno,<EOL>'<STR_LIT>': self._level_names[record.levelname],<EOL>'<STR_LIT>': record.thread,<EOL>'<STR_LIT>': record.threadName,<EOL>'<STR_LIT>': record.process,<EOL>'<STR_LIT>': record.pathname,<EOL>'<STR_LIT:filename>': record.filename,<EOL>'<STR_LIT>': record.module,<EOL>'<STR_LIT>': record.lineno,<EOL>'<STR_LIT>': record.funcName,<EOL>'<STR_LIT:message>': message,<EOL>'<STR_LIT>': record.exc_info[<NUM_LIT:0>].__name__ if record.exc_info else None,<EOL>'<STR_LIT>': record.exc_text,<EOL>}<EOL>if not isinstance(enabled_fields, list):<EOL><INDENT>enabled_fields = [str(enabled_fields)]<EOL><DEDENT>ef = {}<EOL>for item in enabled_fields:<EOL><INDENT>if not isinstance(item, (str, tuple)):<EOL><INDENT>continue<EOL><DEDENT>if not isinstance(item, tuple):<EOL><INDENT>ef[item] = item<EOL><DEDENT>else:<EOL><INDENT>ef[item[<NUM_LIT:0>]] = item[<NUM_LIT:1>]<EOL><DEDENT><DEDENT>result = {}<EOL>for key, val in obj.items():<EOL><INDENT>if key in ef:<EOL><INDENT>result[ef[key]] = val<EOL><DEDENT><DEDENT>return result<EOL>
Prepare log record with given fields.
f7551:c0:m1
def __obj2json(self, obj):
return json.dumps(obj, indent=self._indent, sort_keys=self._sort_keys)<EOL>
Serialize obj to a JSON formatted string. This is useful for pretty printing log records in the console.
f7551:c0:m2
@staticmethod<EOL><INDENT>def _check_classifier(classifier):<DEDENT>
predict = getattr(classifier, "<STR_LIT>", None)<EOL>if not callable(predict):<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>predict_proba = getattr(classifier, "<STR_LIT>", None)<EOL>if not callable(predict_proba):<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>
Check if the classifier implements predict and predict_proba methods.
f7559:c0:m1
@staticmethod<EOL><INDENT>def extract_pixels(X):<DEDENT>
if len(X.shape) != <NUM_LIT:4>:<EOL><INDENT>raise ValueError('<STR_LIT>'<EOL>'<STR_LIT>')<EOL><DEDENT>new_shape = (X.shape[<NUM_LIT:0>] * X.shape[<NUM_LIT:1>] * X.shape[<NUM_LIT:2>], X.shape[<NUM_LIT:3>],)<EOL>pixels = X.reshape(new_shape)<EOL>return pixels<EOL>
Extract pixels from array X :param X: Array of images to be classified. :type X: numpy array, shape = [n_images, n_pixels_y, n_pixels_x, n_bands] :return: Reshaped 2D array :rtype: numpy array, [n_samples*n_pixels_y*n_pixels_x,n_bands] :raises: ValueError is input array has wrong dimensions
f7559:c0:m2
def image_predict(self, X):
pixels = self.extract_pixels(X)<EOL>predictions = self.classifier.predict(pixels)<EOL>return predictions.reshape(X.shape[<NUM_LIT:0>], X.shape[<NUM_LIT:1>], X.shape[<NUM_LIT:2>])<EOL>
Predicts class label for the entire image. :param X: Array of images to be classified. :type X: numpy array, shape = [n_images, n_pixels_y, n_pixels_x, n_bands] :return: raster classification map :rtype: numpy array, [n_samples, n_pixels_y, n_pixels_x]
f7559:c0:m3
def image_predict_proba(self, X):
pixels = self.extract_pixels(X)<EOL>probabilities = self.classifier.predict_proba(pixels)<EOL>return probabilities.reshape(X.shape[<NUM_LIT:0>], X.shape[<NUM_LIT:1>], X.shape[<NUM_LIT:2>], probabilities.shape[<NUM_LIT:1>])<EOL>
Predicts class probabilities for the entire image. :param X: Array of images to be classified. :type X: numpy array, shape = [n_images, n_pixels_y, n_pixels_x, n_bands] :return: classification probability map :rtype: numpy array, [n_samples, n_pixels_y, n_pixels_x]
f7559:c0:m4
def _load_classifier(self, filename):
self.classifier = PixelClassifier(joblib.load(filename))<EOL>
Loads the classifier.
f7562:c0:m1
def get_cloud_probability_maps(self, X):
band_num = X.shape[-<NUM_LIT:1>]<EOL>exp_bands = <NUM_LIT> if self.all_bands else len(self.BAND_IDXS)<EOL>if band_num != exp_bands:<EOL><INDENT>raise ValueError("<STR_LIT>"<EOL>"<STR_LIT>".format(self.all_bands, exp_bands, band_num))<EOL><DEDENT>if self.all_bands:<EOL><INDENT>X = X[..., self.BAND_IDXS]<EOL><DEDENT>return self.classifier.image_predict_proba(X)[..., <NUM_LIT:1>]<EOL>
Runs the cloud detection on the input images (dimension n_images x n x m x 10 or n_images x n x m x 13) and returns an array of cloud probability maps (dimension n_images x n x m). Pixel values close to 0 indicate clear-sky-like pixels, while values close to 1 indicate pixels covered with clouds. :param X: input Sentinel-2 image obtained with Sentinel-Hub's WMS/WCS request (see https://github.com/sentinel-hub/sentinelhub-py) :type X: numpy array (shape n_images x n x m x 10 or n x m x 13) :return: cloud probability map :rtype: numpy array (shape n_images x n x m)
f7562:c0:m2
def get_cloud_masks(self, X):
cloud_probs = self.get_cloud_probability_maps(X)<EOL>return self.get_mask_from_prob(cloud_probs)<EOL>
Runs the cloud detection on the input images (dimension n_images x n x m x 10 or n_images x n x m x 13) and returns the raster cloud mask (dimension n_images x n x m). Pixel values equal to 0 indicate pixels classified as clear-sky, while values equal to 1 indicate pixels classified as clouds. :param X: input Sentinel-2 image obtained with Sentinel-Hub's WMS/WCS request (see https://github.com/sentinel-hub/sentinelhub-py) :type X: numpy array (shape n_images x n x m x 10 or n x m x 13) :return: raster cloud mask :rtype: numpy array (shape n_images x n x m)
f7562:c0:m3
def get_mask_from_prob(self, cloud_probs, threshold=None):
threshold = self.threshold if threshold is None else threshold<EOL>if self.average_over:<EOL><INDENT>cloud_masks = np.asarray([convolve(cloud_prob, self.conv_filter) > threshold<EOL>for cloud_prob in cloud_probs], dtype=np.int8)<EOL><DEDENT>else:<EOL><INDENT>cloud_masks = (cloud_probs > threshold).astype(np.int8)<EOL><DEDENT>if self.dilation_size:<EOL><INDENT>cloud_masks = np.asarray([dilation(cloud_mask, self.dilation_filter) for cloud_mask in cloud_masks],<EOL>dtype=np.int8)<EOL><DEDENT>return cloud_masks<EOL>
Returns cloud mask by applying morphological operations -- convolution and dilation -- to input cloud probabilities. :param cloud_probs: cloud probability map :type cloud_probs: numpy array of cloud probabilities (shape n_images x n x m) :param threshold: A float from [0,1] specifying threshold :type threshold: float :return: raster cloud mask :rtype: numpy array (shape n_images x n x m)
f7562:c0:m4
def _prepare_ogc_request_params(self):
self.ogc_request.image_format = MimeType.TIFF_d32f<EOL>if self.ogc_request.custom_url_params is None:<EOL><INDENT>self.ogc_request.custom_url_params = {}<EOL><DEDENT>self.ogc_request.custom_url_params.update({<EOL>CustomUrlParam.SHOWLOGO: False,<EOL>CustomUrlParam.TRANSPARENT: True,<EOL>CustomUrlParam.EVALSCRIPT: S2_BANDS_EVALSCRIPT if self.all_bands else MODEL_EVALSCRIPT,<EOL>CustomUrlParam.ATMFILTER: '<STR_LIT>'<EOL>})<EOL>self.ogc_request.create_request(reset_wfs_iterator=False)<EOL>
Method makes sure that correct parameters will be used for download of S-2 bands.
f7562:c1:m1
def get_dates(self):
return self.ogc_request.get_dates()<EOL>
Get the list of dates from within date range for which data of the bbox is available. :return: A list of dates :rtype: list(datetime.datetime)
f7562:c1:m4
def get_data(self):
if self.bands is None:<EOL><INDENT>self._set_band_and_valid_mask()<EOL><DEDENT>return self.bands<EOL>
Returns downloaded bands :return: numpy array of shape `(times, height, width, bands)` :rtype: numpy.ndarray
f7562:c1:m5
def get_valid_data(self):
if self.valid_data is None:<EOL><INDENT>self._set_band_and_valid_mask()<EOL><DEDENT>return self.valid_data<EOL>
Returns valid data mask. :return: numpy array of shape `(times, height, width)` :rtype: numpy.ndarray
f7562:c1:m6
def _set_band_and_valid_mask(self):
data = np.asarray(self.ogc_request.get_data())<EOL>self.bands = data[..., :-<NUM_LIT:1>]<EOL>self.valid_data = (data[..., -<NUM_LIT:1>] == <NUM_LIT:1.0>).astype(np.bool)<EOL>
Downloads band data and valid mask. Sets parameters self.bands, self.valid_data
f7562:c1:m7
def get_probability_masks(self, non_valid_value=<NUM_LIT:0>):
if self.probability_masks is None:<EOL><INDENT>self.get_data()<EOL>self.probability_masks = self.cloud_detector.get_cloud_probability_maps(self.bands)<EOL><DEDENT>self.probability_masks[~self.valid_data] = non_valid_value<EOL>return self.probability_masks<EOL>
Get probability maps of areas for each available date. The pixels without valid data are assigned non_valid_value. :param non_valid_value: Value to be assigned to non valid data pixels :type non_valid_value: float :return: Probability map of shape `(times, height, width)` and `dtype=numpy.float64` :rtype: numpy.ndarray
f7562:c1:m8
def get_cloud_masks(self, threshold=None, non_valid_value=False):
self.get_probability_masks()<EOL>cloud_masks = self.cloud_detector.get_mask_from_prob(self.probability_masks, threshold)<EOL>cloud_masks[~self.valid_data] = non_valid_value<EOL>return cloud_masks<EOL>
The binary cloud mask is computed on the fly. Be cautious. The pixels without valid data are assigned non_valid_value. :param threshold: A float from [0,1] specifying threshold :type threshold: float :param non_valid_value: Value which will be assigned to pixels without valid data :type non_valid_value: int in range `[-254, 255]` :return: Binary cloud masks of shape `(times, height, width)` and `dtype=numpy.int8` :rtype: numpy.ndarray
f7562:c1:m9
def __call__(self, id=None, format=None, url_override=None):
<EOL>if id is None and format is None and url_override is None:<EOL><INDENT>return self<EOL><DEDENT>kwargs = copy_kwargs(self._store)<EOL>if id is not None:<EOL><INDENT>kwargs["<STR_LIT>"] = url_join(self._store["<STR_LIT>"], id)<EOL><DEDENT>if format is not None:<EOL><INDENT>kwargs["<STR_LIT>"] = format<EOL><DEDENT>if url_override is not None:<EOL><INDENT>kwargs["<STR_LIT>"] = url_override<EOL><DEDENT>kwargs["<STR_LIT>"] = self._store["<STR_LIT>"]<EOL>return self._get_resource(**kwargs)<EOL>
Returns a new instance of self modified by one or more of the available parameters. These allows us to do things like override format for a specific request, and enables the api.resource(ID).get() syntax to get a specific resource by it's ID.
f7569:c1:m1
def url_join(base, *args):
scheme, netloc, path, query, fragment = urlsplit(base)<EOL>path = path if len(path) else "<STR_LIT:/>"<EOL>path = posixpath.join(path, *[('<STR_LIT:%s>' % x) for x in args])<EOL>return urlunsplit([scheme, netloc, path, query, fragment])<EOL>
Helper function to join an arbitrary number of url segments together.
f7570:m0
def iterator(d):
try:<EOL><INDENT>return d.iteritems()<EOL><DEDENT>except AttributeError:<EOL><INDENT>return d.items()<EOL><DEDENT>
Helper to get and a proper dict iterator with Py2k and Py3k
f7570:m2
def equalizer(self, frequency, q=<NUM_LIT:1.0>, db=-<NUM_LIT>):
self.command.append('<STR_LIT>')<EOL>self.command.append(frequency)<EOL>self.command.append(str(q) + '<STR_LIT:q>')<EOL>self.command.append(db)<EOL>return self<EOL>
equalizer takes three parameters: filter center frequency in Hz, "q" or band-width (default=1.0), and a signed number for gain or attenuation in dB. Beware of clipping when using positive gain.
f7577:c0:m1
def bandpass(self, frequency, q=<NUM_LIT:1.0>):
self.command.append('<STR_LIT>')<EOL>self.command.append(frequency)<EOL>self.command.append(str(q) + '<STR_LIT:q>')<EOL>return self<EOL>
bandpass takes 2 parameters: filter center frequency in Hz and "q" or band-width (default=1.0). It gradually removes frequencies outside the band specified.
f7577:c0:m2
def bandreject(self, frequency, q=<NUM_LIT:1.0>):
self.command.append('<STR_LIT>')<EOL>self.command.append(frequency)<EOL>self.command.append(str(q) + '<STR_LIT:q>')<EOL>return self<EOL>
bandreject takes 2 parameters: filter center frequency in Hz and "q" or band-width (default=1.0). It gradually removes frequencies within the band specified.
f7577:c0:m3
def lowshelf(self, gain=-<NUM_LIT>, frequency=<NUM_LIT:100>, slope=<NUM_LIT:0.5>):
self.command.append('<STR_LIT>')<EOL>self.command.append(gain)<EOL>self.command.append(frequency)<EOL>self.command.append(slope)<EOL>return self<EOL>
lowshelf takes 3 parameters: a signed number for gain or attenuation in dB, filter frequency in Hz and slope (default=0.5, maximum=1.0). Beware of Clipping when using positive gain.
f7577:c0:m4
def highshelf(self, gain=-<NUM_LIT>, frequency=<NUM_LIT>, slope=<NUM_LIT:0.5>):
self.command.append('<STR_LIT>')<EOL>self.command.append(gain)<EOL>self.command.append(frequency)<EOL>self.command.append(slope)<EOL>return self<EOL>
highshelf takes 3 parameters: a signed number for gain or attenuation in dB, filter frequency in Hz and slope (default=0.5). Beware of clipping when using positive gain.
f7577:c0:m5
def highpass(self, frequency, q=<NUM_LIT>):
self.command.append('<STR_LIT>')<EOL>self.command.append(frequency)<EOL>self.command.append(str(q) + '<STR_LIT:q>')<EOL>return self<EOL>
highpass takes 2 parameters: filter frequency in Hz below which frequencies will be attenuated and q (default=0.707). Beware of clipping when using high q values.
f7577:c0:m6
def lowpass(self, frequency, q=<NUM_LIT>):
self.command.append('<STR_LIT>')<EOL>self.command.append(frequency)<EOL>self.command.append(str(q) + '<STR_LIT:q>')<EOL>return self<EOL>
lowpass takes 2 parameters: filter frequency in Hz above which frequencies will be attenuated and q (default=0.707). Beware of clipping when using high q values.
f7577:c0:m7
def limiter(self, gain=<NUM_LIT>):
self.command.append('<STR_LIT>')<EOL>self.command.append('<STR_LIT>')<EOL>self.command.append(gain)<EOL>return self<EOL>
limiter takes one parameter: gain in dB. Beware of adding too much gain, as it can cause audible distortion. See the compand effect for a more capable limiter.
f7577:c0:m8
def normalize(self):
self.command.append('<STR_LIT>')<EOL>self.command.append('<STR_LIT>')<EOL>return self<EOL>
normalize has no parameters. It boosts level so that the loudest part of your file reaches maximum, without clipping.
f7577:c0:m9
def compand(self, attack=<NUM_LIT>, decay=<NUM_LIT:1>, soft_knee=<NUM_LIT>, threshold=-<NUM_LIT:20>, db_from=-<NUM_LIT>, db_to=-<NUM_LIT>):
self.command.append('<STR_LIT>')<EOL>self.command.append(str(attack) + '<STR_LIT:U+002C>' + str(decay))<EOL>self.command.append(str(soft_knee) + '<STR_LIT::>' + str(threshold) + '<STR_LIT:U+002C>' + str(db_from) + '<STR_LIT:U+002C>' + str(db_to))<EOL>return self<EOL>
compand takes 6 parameters: attack (seconds), decay (seconds), soft_knee (ex. 6 results in 6:1 compression ratio), threshold (a negative value in dB), the level below which the signal will NOT be companded (a negative value in dB), the level above which the signal will NOT be companded (a negative value in dB). This effect manipulates dynamic range of the input file.
f7577:c0:m10
def sinc(self,<EOL>high_pass_frequency=None,<EOL>low_pass_frequency=None,<EOL>left_t=None,<EOL>left_n=None,<EOL>right_t=None,<EOL>right_n=None,<EOL>attenuation=None,<EOL>beta=None,<EOL>phase=None,<EOL>M=None,<EOL>I=None,<EOL>L=None):
self.command.append("<STR_LIT>")<EOL>if not mutually_exclusive(attenuation, beta):<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>if attenuation is not None and beta is None:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL>self.command.append(str(attenuation))<EOL><DEDENT>elif attenuation is None and beta is not None:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL>self.command.append(str(beta))<EOL><DEDENT>if not mutually_exclusive(phase, M, I, L):<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>if phase is not None:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL>self.command.append(str(phase))<EOL><DEDENT>elif M is not None:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL><DEDENT>elif I is not None:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL><DEDENT>elif L is not None:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL><DEDENT>if not mutually_exclusive(left_t, left_t):<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>if left_t is not None:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL>self.command.append(str(left_t))<EOL><DEDENT>if left_n is not None:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL>self.command.append(str(left_n))<EOL><DEDENT>if high_pass_frequency is not None and low_pass_frequency is None:<EOL><INDENT>self.command.append(str(high_pass_frequency))<EOL><DEDENT>elif high_pass_frequency is not None and low_pass_frequency is not None:<EOL><INDENT>self.command.append(str(high_pass_frequency) + '<STR_LIT:->' + str(low_pass_frequency))<EOL><DEDENT>elif high_pass_frequency is None and low_pass_frequency is not None:<EOL><INDENT>self.command.append(str(low_pass_frequency))<EOL><DEDENT>if not mutually_exclusive(right_t, right_t):<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>if right_t is not None:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL>self.command.append(str(right_t))<EOL><DEDENT>if right_n is not None:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL>self.command.append(str(right_n))<EOL><DEDENT>return self<EOL>
sinc takes 12 parameters: high_pass_frequency in Hz, low_pass_frequency in Hz, left_t, left_n, right_t, right_n, attenuation in dB, beta, phase, M, I, L This effect creates a steep bandpass or bandreject filter. You may specify as few as the first two parameters. Setting the high-pass parameter to a lower value than the low-pass creates a band-reject filter.
f7577:c0:m11
def bend(self, bends, frame_rate=None, over_sample=None):
self.command.append("<STR_LIT>")<EOL>if frame_rate is not None and isinstance(frame_rate, int):<EOL><INDENT>self.command.append('<STR_LIT>' % frame_rate)<EOL><DEDENT>if over_sample is not None and isinstance(over_sample, int):<EOL><INDENT>self.command.append('<STR_LIT>' % over_sample)<EOL><DEDENT>for bend in bends:<EOL><INDENT>self.command.append('<STR_LIT:U+002C>'.join(bend))<EOL><DEDENT>return self<EOL>
TODO Add docstring.
f7577:c0:m12
def chorus(self, gain_in, gain_out, decays):
self.command.append("<STR_LIT>")<EOL>self.command.append(gain_in)<EOL>self.command.append(gain_out)<EOL>for decay in decays:<EOL><INDENT>modulation = decay.pop()<EOL>numerical = decay<EOL>self.command.append('<STR_LIT:U+0020>'.join(map(str, numerical)) + '<STR_LIT>' + modulation)<EOL><DEDENT>return self<EOL>
TODO Add docstring.
f7577:c0:m13
def delay(self,<EOL>gain_in=<NUM_LIT>,<EOL>gain_out=<NUM_LIT:0.5>,<EOL>delays=list((<NUM_LIT:1000>, <NUM_LIT>)),<EOL>decays=list((<NUM_LIT>, <NUM_LIT>)),<EOL>parallel=False):
self.command.append('<STR_LIT>' + ('<STR_LIT:s>' if parallel else '<STR_LIT>'))<EOL>self.command.append(gain_in)<EOL>self.command.append(gain_out)<EOL>self.command.extend(list(sum(zip(delays, decays), ())))<EOL>return self<EOL>
delay takes 4 parameters: input gain (max 1), output gain and then two lists, delays and decays. Each list is a pair of comma seperated values within parenthesis.
f7577:c0:m14
def echo(self, **kwargs):
self.delay(**kwargs)<EOL>
TODO Add docstring.
f7577:c0:m15
def fade(self):
raise NotImplementedError()<EOL>
TODO Add docstring.
f7577:c0:m16
def flanger(self, delay=<NUM_LIT:0>, depth=<NUM_LIT:2>, regen=<NUM_LIT:0>, width=<NUM_LIT>, speed=<NUM_LIT:0.5>, shape='<STR_LIT>', phase=<NUM_LIT>, interp='<STR_LIT>'):
raise NotImplementedError()<EOL>
TODO Add docstring.
f7577:c0:m17
def gain(self, db):
self.command.append('<STR_LIT>')<EOL>self.command.append(db)<EOL>return self<EOL>
gain takes one paramter: gain in dB.
f7577:c0:m18
def mcompand(self):
raise NotImplementedError()<EOL>
TODO Add docstring.
f7577:c0:m19
def noise_reduction(self, amount=<NUM_LIT:0.5>):
<EOL>raise NotImplementedError()<EOL>
TODO Add docstring.
f7577:c0:m20
def oops(self):
raise NotImplementedError()<EOL>
TODO Add docstring.
f7577:c0:m21
def overdrive(self, gain=<NUM_LIT:20>, colour=<NUM_LIT:20>):
self.command.append('<STR_LIT>')<EOL>self.command.append(gain)<EOL>self.command.append(colour)<EOL>return self<EOL>
overdrive takes 2 parameters: gain in dB and colour which effects the character of the distortion effet. Both have a default value of 20. TODO - changing color does not seem to have an audible effect
f7577:c0:m22
def phaser(self,<EOL>gain_in=<NUM_LIT>,<EOL>gain_out=<NUM_LIT>,<EOL>delay=<NUM_LIT:1>,<EOL>decay=<NUM_LIT>,<EOL>speed=<NUM_LIT:2>,<EOL>triangular=False):
self.command.append("<STR_LIT>")<EOL>self.command.append(gain_in)<EOL>self.command.append(gain_out)<EOL>self.command.append(delay)<EOL>self.command.append(decay)<EOL>self.command.append(speed)<EOL>if triangular:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL><DEDENT>return self<EOL>
phaser takes 6 parameters: input gain (max 1.0), output gain (max 1.0), delay, decay, speed and LFO shape=trianglar (which must be set to True or False)
f7577:c0:m23
def pitch(self, shift,<EOL>use_tree=False,<EOL>segment=<NUM_LIT>,<EOL>search=<NUM_LIT>,<EOL>overlap=<NUM_LIT:12>):
self.command.append("<STR_LIT>")<EOL>if use_tree:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL><DEDENT>self.command.append(shift)<EOL>self.command.append(segment)<EOL>self.command.append(search)<EOL>self.command.append(overlap)<EOL>return self<EOL>
pitch takes 4 parameters: user_tree (True or False), segment, search and overlap.
f7577:c0:m24
def loop(self):
self.command.append('<STR_LIT>')<EOL>self.command.append('<STR_LIT:->')<EOL>return self<EOL>
TODO Add docstring.
f7577:c0:m25
def reverb(self,<EOL>reverberance=<NUM_LIT:50>,<EOL>hf_damping=<NUM_LIT:50>,<EOL>room_scale=<NUM_LIT:100>,<EOL>stereo_depth=<NUM_LIT:100>,<EOL>pre_delay=<NUM_LIT:20>,<EOL>wet_gain=<NUM_LIT:0>,<EOL>wet_only=False):
self.command.append('<STR_LIT>')<EOL>if wet_only:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL><DEDENT>self.command.append(reverberance)<EOL>self.command.append(hf_damping)<EOL>self.command.append(room_scale)<EOL>self.command.append(stereo_depth)<EOL>self.command.append(pre_delay)<EOL>self.command.append(wet_gain)<EOL>return self<EOL>
reverb takes 7 parameters: reverberance, high-freqnency damping, room scale, stereo depth, pre-delay, wet gain and wet only (Truce or False)
f7577:c0:m26
def reverse(self):
self.command.append("<STR_LIT>")<EOL>return self<EOL>
reverse takes no parameters. It plays the input sound backwards.
f7577:c0:m27
def speed(self, factor, use_semitones=False):
self.command.append("<STR_LIT>")<EOL>self.command.append(factor if not use_semitones else str(factor) + "<STR_LIT:c>")<EOL>return self<EOL>
speed takes 2 parameters: factor and use-semitones (True or False). When use-semitones = False, a factor of 2 doubles the speed and raises the pitch an octave. The same result is achieved with factor = 1200 and use semitones = True.
f7577:c0:m28
def tempo(self,<EOL>factor,<EOL>use_tree=False,<EOL>opt_flag=None,<EOL>segment=<NUM_LIT>,<EOL>search=<NUM_LIT>,<EOL>overlap=<NUM_LIT:12>):
self.command.append("<STR_LIT>")<EOL>if use_tree:<EOL><INDENT>self.command.append('<STR_LIT>')<EOL><DEDENT>if opt_flag in ('<STR_LIT:l>', '<STR_LIT:m>', '<STR_LIT:s>'):<EOL><INDENT>self.command.append('<STR_LIT>' % opt_flag)<EOL><DEDENT>self.command.append(factor)<EOL>self.command.append(segment)<EOL>self.command.append(search)<EOL>self.command.append(overlap)<EOL>return self<EOL>
tempo takes 6 parameters: factor, use tree (True or False), option flag, segment, search and overlap). This effect changes the duration of the sound without modifying pitch.
f7577:c0:m30
def tremolo(self, freq, depth=<NUM_LIT>):
self.command.append("<STR_LIT>")<EOL>self.command.append(freq)<EOL>self.command.append(depth)<EOL>return self<EOL>
tremolo takes two parameters: frequency and depth (max 100)
f7577:c0:m31
def trim(self, positions):
self.command.append("<STR_LIT>")<EOL>for position in positions:<EOL><INDENT>self.command.append(position)<EOL><DEDENT>return self<EOL>
TODO Add docstring.
f7577:c0:m32
def upsample(self, factor):
self.command.append("<STR_LIT>")<EOL>self.command.append(factor)<EOL>return self<EOL>
TODO Add docstring.
f7577:c0:m33
def vol(self, gain, type="<STR_LIT>", limiter_gain=None):
self.command.append("<STR_LIT>")<EOL>if type in ["<STR_LIT>", "<STR_LIT>", "<STR_LIT>"]:<EOL><INDENT>self.command.append(type)<EOL><DEDENT>else:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>if limiter_gain is not None:<EOL><INDENT>self.command.append(str(limiter_gain))<EOL><DEDENT>print(self.command)<EOL>return self<EOL>
vol takes three parameters: gain, gain-type (amplitude, power or dB) and limiter gain.
f7577:c0:m35