INSTRUCTION
stringlengths
1
8.43k
RESPONSE
stringlengths
75
104k
Gets the filesize of a remote file
def get_remote_file_size(self, url): """Gets the filesize of a remote file """ try: req = urllib.request.urlopen(url) return int(req.getheader('Content-Length').strip()) except urllib.error.HTTPError as error: logger.error('Error retrieving size of the remote file %s' % error) print('Error retrieving size of the remote file %s' % error) self.connect_earthexplorer() self.get_remote_file_size(url)
Download remote. tar. bz file.
def download(self, bands=None, download_dir=None, metadata=False): """Download remote .tar.bz file.""" if not download_dir: download_dir = DOWNLOAD_DIR if bands is None: bands = list(range(1, 12)) + ['BQA'] else: self.validate_bands(bands) pattern = re.compile('^[^\s]+_(.+)\.tiff?', re.I) band_list = ['B%i' % (i,) if isinstance(i, int) else i for i in bands] image_list = [] # Connect Earth explore self.connect_earthexplorer() # tgz name tgzname = self.sceneInfo.name + '.tgz' dest_dir = check_create_folder(join(download_dir, self.sceneInfo.name)) # Download File downloaded = self.download_file(self.url, dest_dir, tgzname) # Log logger.debug('Status downloaded %s' % downloaded) print('\n Status downloaded %s' % downloaded) if downloaded['sucess']: # Log print('\n Downloaded sucess') logger.debug('Downloaded sucess of scene: %s' % self.sceneInfo.name) try: tar = tarfile.open(downloaded['file_path'], 'r') folder_path = join(download_dir, self.sceneInfo.name) tar.extractall(folder_path) remove(downloaded['file_path']) images_path = listdir(folder_path) for image_path in images_path: matched = pattern.match(image_path) file_path = join(folder_path, image_path) if matched and matched.group(1) in band_list: image_list.append([file_path, getsize(file_path)]) elif matched: remove(file_path) except tarfile.ReadError as error: print('\nError when extracting files. %s' % error) logger.error('Error when extracting files. %s' % error) return image_list else: logger.debug('Info downloaded: %s' % downloaded) print('\n Info downloaded: %s' % downloaded) return downloaded
Validate bands parameter.
def validate_bands(bands): """Validate bands parameter.""" if not isinstance(bands, list): raise TypeError('Parameter bands must be a "list"') valid_bands = list(range(1, 12)) + ['BQA'] for band in bands: if band not in valid_bands: raise InvalidBandError('%s is not a valid band' % band)
Connection to Earth explorer without proxy
def connect_earthexplorer(self): """ Connection to Earth explorer without proxy """ logger.info("Establishing connection to Earthexplorer") print("\n Establishing connection to Earthexplorer") try: opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor()) urllib.request.install_opener(opener) params = urllib.parse.urlencode(dict(username=self.user, password=self.password)) params = params.encode('utf-8') f = opener.open("https://ers.cr.usgs.gov/login", params) data = f.read().decode('utf-8') f.close() if data.find( 'You must sign in as a registered user to download data or place orders for USGS EROS products') > 0: print("\n Authentification failed") logger.error("Authentification failed") raise AutenticationUSGSFailed('Authentification USGS failed') print('User %s connected with USGS' % self.user) logger.debug('User %s connected with USGS' % self.user) return except Exception as e: print('\nError when trying to connect USGS: %s' % e) raise logger.error('Error when trying to connect USGS: %s' % e)
Downloads large files in pieces
def download_file(self, url, download_dir, sceneName): """ Downloads large files in pieces """ try: # Log logger.info('\nStarting download..') print('\n Starting download..\n') # Request req = urllib.request.urlopen(url) try: if req.info().get_content_type() == 'text/html': logger.error("error : the file format is html") lines = req.read() if lines.find('Download Not Found') > 0: raise TypeError('Download USGS not found for scene: %s' % self.sceneInfo.name) else: print(lines) print(sys.exit(-1)) except Exception as e: logger.error('Erro in USGS download for scene %s error: %s' % (self.sceneInfo.name, e)) raise CredentialsUsgsError('User or Password invalid ! ') total_size = int(req.getheader('Content-Length').strip()) if total_size < 50000: logger.error("Error: The file is too small to be a Landsat Image for scene %s" % self.sceneInfo.name) raise SmallLandsatImageError("Error: The file is too small to be a Landsat Image") total_size_fmt = sizeof_fmt(total_size) downloaded = 0 CHUNK = 1024 * 1024 * 8 with open(download_dir + '/' + sceneName, 'wb') as fp: start = time.clock() logger.debug('Downloading {0} ({1}):'.format(self.sceneInfo.name, total_size_fmt)) print('Downloading {0} ({1}):'.format(self.sceneInfo.name, total_size_fmt)) while True: chunk = req.read(CHUNK) downloaded += len(chunk) done = int(50 * downloaded / total_size) print('\r[{1}{2}]{0:3.0f}% {3}ps'.format(floor((float(downloaded) / total_size) * 100), '-' * done, ' ' * (50 - done), sizeof_fmt((downloaded // (time.clock() - start)) / 8))) if not chunk: logger.debug('Download {0} completed({1}):'.format(self.sceneInfo.name, total_size_fmt)) break fp.write(chunk) except urllib.error.HTTPError as e: if e.code == 500: logger.error("File doesn't exist") print("\n File doesn't exist: %s " % e) raise RemoteFileDoesntExist("File doesn't exist") elif e.code == 403: # Log celery logger.error("HTTP Error:", e.code, url) logger.debug('\n trying to download it again scene: %s' % self.sceneInfo.name) # Log shell print("\n HTTP Error:", e.code, url) print('\n trying to download it again scene: %s' % self.sceneInfo.name) self.connect_earthexplorer() self.download_file(url, download_dir, sceneName) else: logger.error("HTTP Error:", e) print("HTTP Error:", e.code, url) raise e except urllib.error.URLError as e: print("URL Error:", e.reason, url) logger.error("URL Error: %s in %s" % (e, url)) raise e except ConnectionResetError as e: print('Error ConnectionResetError: %s' % e) logger.error('Error ConnectionResetError: %s' % e) print('\n trying to download it again scene: %s' % self.sceneInfo.name) logger.debug('trying to download it again scene: %s' % self.sceneInfo.name) self.download_file(url, download_dir, sceneName) except urllib.error.HTTPError as e: print('\n HttpError: %s' % e) print('\n trying to download it again scene: %s' % self.sceneInfo.name) logger.error('HttpError: %s' % e) logger.debug('trying to download it again scene: %s' % self.sceneInfo.name) self.download_file(url, download_dir, sceneName) except Exception as error: logger.error('Error unknown %s in download %s at scene: %s' % (error, url, self.sceneInfo.name)) print('Error unknown %s in download % at scene: %s' % (error, url, self.sceneInfo.name)) logger.debug('trying to download it again scene: %s' % self.sceneInfo.name) self.download_file(url, download_dir, sceneName) percent = floor((float(downloaded) / total_size) * 100) or 0 if percent != 100: logger.debug('trying to download it again scene: %s' % self.sceneInfo.name) logger.error('Download interrupted in %s%%, trying to download it again scene: %s' % ( percent, self.sceneInfo.name)) print('\n Download interrupted in %s%%, trying to download it again scene: %s' % ( percent, self.sceneInfo.name)) self.download_file(url, download_dir, sceneName) path_item = download_dir + '/' + sceneName info = {'total_size': total_size_fmt, 'scene': self.sceneInfo.name, 'sucess': verify_sucess(total_size, path_item), 'file_path': path_item} return info
Make a callable returning True for names starting with the given prefix.
def prefixed_by(prefix): """ Make a callable returning True for names starting with the given prefix. The returned callable takes two arguments, the attribute or name of the object, and possibly its corresponding value (which is ignored), as suitable for use with :meth:`ObjectLocator.is_test_module` and :meth:`ObjectLocator.is_test_method`\ . """ def prefixed_by_(name, value=None): return name.startswith(prefix) prefixed_by_.__name__ += prefix return prefixed_by_
Extracts the specific arguments of this CLI
def get_arguments(self): """ Extracts the specific arguments of this CLI """ ApiCli.get_arguments(self) if self.args.metric_name is not None: self._metric_name = self.args.metric_name self.path = "v1/metrics/{0}".format(self._metric_name)
r Return a datetime. tzinfo implementation for the given timezone
def timezone(zone): r''' Return a datetime.tzinfo implementation for the given timezone >>> from datetime import datetime, timedelta >>> utc = timezone('UTC') >>> eastern = timezone('US/Eastern') >>> eastern.zone 'US/Eastern' >>> timezone(unicode('US/Eastern')) is eastern True >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) >>> loc_dt = utc_dt.astimezone(eastern) >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' >>> loc_dt.strftime(fmt) '2002-10-27 01:00:00 EST (-0500)' >>> (loc_dt - timedelta(minutes=10)).strftime(fmt) '2002-10-27 00:50:00 EST (-0500)' >>> eastern.normalize(loc_dt - timedelta(minutes=10)).strftime(fmt) '2002-10-27 01:50:00 EDT (-0400)' >>> (loc_dt + timedelta(minutes=10)).strftime(fmt) '2002-10-27 01:10:00 EST (-0500)' Raises UnknownTimeZoneError if passed an unknown zone. >>> try: ... timezone('Asia/Shangri-La') ... except UnknownTimeZoneError: ... print('Unknown') Unknown >>> try: ... timezone(unicode('\N{TRADE MARK SIGN}')) ... except UnknownTimeZoneError: ... print('Unknown') Unknown ''' if zone.upper() == 'UTC': return utc try: zone = ascii(zone) except UnicodeEncodeError: # All valid timezones are ASCII raise UnknownTimeZoneError(zone) zone = _unmunge_zone(zone) if zone not in _tzinfo_cache: if zone in all_timezones_set: # fp = open_resource(zone) # try: _tzinfo_cache[zone] = build_tzinfo(zone)#, fp) # finally: # fp.close() else: raise UnknownTimeZoneError(zone) return _tzinfo_cache[zone]
Correct the timezone information on the given datetime
def normalize(self, dt, is_dst=False): '''Correct the timezone information on the given datetime''' if dt.tzinfo is None: raise ValueError('Naive time - no tzinfo set') return dt.replace(tzinfo=self)
Extracts the specific arguments of this CLI
def get_arguments(self): """ Extracts the specific arguments of this CLI """ ApiCli.get_arguments(self) if self.args.hostGroupId is not None: self.hostGroupId = self.args.hostGroupId if self.args.force is not None: self.force = self.args.force if self.force: self.url_parameters = {"forceRemove": True} self.path = "v1/hostgroup/{0}".format(str(self.hostGroupId))
Extracts the specific arguments of this CLI
def get_arguments(self): """ Extracts the specific arguments of this CLI """ ApiCli.get_arguments(self) self._actions = self.args.actions if self.args.actions is not None else None self._alarm_name = self.args.alarm_name if self.args.alarm_name is not None else None self._metric = self.args.metric if self.args.metric is not None else None self._aggregate = self.args.aggregate if self.args.aggregate is not None else None self._operation = self.args.operation if self.args.operation is not None else None self._threshold = self.args.threshold if self.args.threshold is not None else None self._trigger_interval = self.args.trigger_interval if self.args.trigger_interval is not None else None self._host_group_id = self.args.host_group_id if self.args.host_group_id is not None else None self._note = self.args.note if self.args.note is not None else None self._per_host_notify = self.args.per_host_notify if self.args.per_host_notify is not None else None self._is_disabled = self.args.is_disabled if self.args.is_disabled is not None else None self._notify_clear = self.args.notify_clear if self.args.notify_clear is not None else None self._notify_set = self.args.notify_set if self.args.notify_set is not None else None self._timeout_interval = self.args.timeout_interval if self.args.timeout_interval is not None else None
Escape - aware text splitting:
def esc_split(text, delimiter=" ", maxsplit=-1, escape="\\", *, ignore_empty=False): """Escape-aware text splitting: Split text on on a delimiter, recognizing escaped delimiters.""" is_escaped = False split_count = 0 yval = [] for char in text: if is_escaped: is_escaped = False yval.append(char) else: if char == escape: is_escaped = True elif char in delimiter and split_count != maxsplit: if yval or not ignore_empty: yield "".join(yval) split_count += 1 yval = [] else: yval.append(char) yield "".join(yval)
Join an iterable by a delimiter replacing instances of delimiter in items with escape + delimiter.
def esc_join(iterable, delimiter=" ", escape="\\"): """Join an iterable by a delimiter, replacing instances of delimiter in items with escape + delimiter. """ rep = escape + delimiter return delimiter.join(i.replace(delimiter, rep) for i in iterable)
Returns a list of the positions in the text where all new lines occur. This is used by get_line_and_char to efficiently find coordinates represented by offset positions.
def get_newline_positions(text): """Returns a list of the positions in the text where all new lines occur. This is used by get_line_and_char to efficiently find coordinates represented by offset positions. """ pos = [] for i, c in enumerate(text): if c == "\n": pos.append(i) return pos
Given a list of newline positions and an offset from the start of the source code that newline_positions was pulled from return a 2 - tuple of ( line char ) coordinates.
def get_line_and_char(newline_positions, position): """Given a list of newline positions, and an offset from the start of the source code that newline_positions was pulled from, return a 2-tuple of (line, char) coordinates. """ if newline_positions: for line_no, nl_pos in enumerate(newline_positions): if nl_pos >= position: if line_no == 0: return (line_no, position) else: return (line_no, position - newline_positions[line_no - 1] - 1) return (line_no + 1, position - newline_positions[-1] - 1) else: return (0, position)
Point to a position in source code.
def point_to_source(source, position, fmt=(2, True, "~~~~~", "^")): """Point to a position in source code. source is the text we're pointing in. position is a 2-tuple of (line_number, character_number) to point to. fmt is a 4-tuple of formatting parameters, they are: name default description ---- ------- ----------- surrounding_lines 2 the number of lines above and below the target line to print show_line_numbers True if true line numbers will be generated for the output_lines tail_body "~~~~~" the body of the tail pointer_char "^" the character that will point to the position """ surrounding_lines, show_line_numbers, tail_body, pointer_char = fmt line_no, char_no = position lines = source.split("\n") line = lines[line_no] if char_no >= len(tail_body): tail = " " * (char_no - len(tail_body)) + tail_body + pointer_char else: tail = " " * char_no + pointer_char + tail_body if show_line_numbers: line_no_width = int(math.ceil(math.log10(max(1, line_no + surrounding_lines))) + 1) line_fmt = "{0:" + str(line_no_width) + "}: {1}" else: line_fmt = "{1}" pivot = line_no + 1 output_lines = [(pivot, line), ("", tail)] for i in range(surrounding_lines): upper_ofst = i + 1 upper_idx = line_no + upper_ofst lower_ofst = -upper_ofst lower_idx = line_no + lower_ofst if lower_idx >= 0: output_lines.insert(0, (pivot + lower_ofst, lines[lower_idx])) if upper_idx < len(lines): output_lines.append((pivot + upper_ofst, lines[upper_idx])) return "\n".join(line_fmt.format(n, c) for n, c in output_lines)
Send output in textual format
def _dump_text(self): """ Send output in textual format """ results = self._relay_output['result']; for l in results: dt = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime(int(l[1]['ts']))) print("{0} {1} {2} {3}".format(l[0], dt, l[1]['type'], l[1]['msg']))
Call back function to be implemented by the CLI.
def _handle_results(self): """ Call back function to be implemented by the CLI. """ # Only process if we get HTTP result of 200 if self._api_result.status_code == requests.codes.ok: self._relay_output = json.loads(self._api_result.text) if self._raw: self._dump_json() else: self._dump_text()
Extracts the specific arguments of this CLI
def get_arguments(self): """ Extracts the specific arguments of this CLI """ PluginBase.get_arguments(self) if self.args.organizationName is not None: self.organizationName = self.args.organizationName if self.args.repositoryName is not None: self.repositoryName = self.args.repositoryName self.path = "v1/plugins/private/{0}/{1}/{2}".format(self.pluginName, self.organizationName, self.repositoryName)
Extract only the required fields for the create/ update API call
def extract_fields(self, metric): """ Extract only the required fields for the create/update API call """ m = {} if 'name' in metric: m['name'] = metric['name'] if 'description' in metric: m['description'] = metric['description'] if 'displayName' in metric: m['displayName'] = metric['displayName'] if 'displayNameShort' in metric: m['displayNameShort'] = metric['displayNameShort'] if 'unit' in metric: m['unit'] = metric['unit'] if 'defaultAggregate' in metric: m['defaultAggregate'] = metric['defaultAggregate'] if 'defaultResolutionMS' in metric: m['defaultResolutionMS'] = metric['defaultResolutionMS'] if 'isDisabled' in metric: m['isDisabled'] = metric['isDisabled'] if 'isBuiltin' in metric: m['isBuiltin'] = metric['isBuiltin'] if 'type' in metric: m['type'] = metric['type'] return m
Extracts the specific arguments of this CLI
def get_arguments(self): """ Extracts the specific arguments of this CLI """ AlarmModify.get_arguments(self) self._alarm_id = self.args.alarm_id if self.args.alarm_id is not None else None self.get_api_parameters()
Apply the criteria to filter out on the output required
def _filter(self): """ Apply the criteria to filter out on the output required """ if self._metrics or self._control or self._plugins: relays = self._relays['result']['relays'] for relay in relays: if self._metrics: del relays[relay]['metrics'] if self._control: del relays[relay]['control'] if self._plugins: if 'plugins' in relays[relay]: del relays[relay]['plugins']
Call back function to be implemented by the CLI.
def _handle_results(self): """ Call back function to be implemented by the CLI. """ # Only process if we get HTTP result of 200 if self._api_result.status_code == requests.codes.ok: self._relays = json.loads(self._api_result.text) self._filter() self._dump_json()
Initialize based on a list of fortune files
def fromlist(cls, files, equal=False, offensive=False, lang=None): """Initialize based on a list of fortune files""" self = cls.__new__(cls) self.files = fortunes = [] count = 0 for file in files: fortune = load_fortune(file, offensive=offensive, lang=lang) if fortune is None: logger.warn("Can't load: %s", file) continue count += 1 if equal else fortune.size fortunes.append((fortune, count)) if not fortunes: raise ValueError('All fortune files specified are invalid') self.count = count self.keys = [i[1] for i in self.files] return self
Initialize based on a list of fortune files with set chances
def set_chance(cls, files, equal=False, offensive=False, lang=None): # where files are (name, chance) """Initialize based on a list of fortune files with set chances""" self = cls.__new__(cls) total = 0. file = [] leftover = [] for name, chance in files: if total >= 1: break fortune = load_fortune(name, offensive=offensive, lang=lang) if fortune is None or not fortune.size: continue if chance: file.append((fortune, chance)) total += chance else: leftover.append(fortune) if leftover and total < 1: left = 1 - total if equal: perfile = left / len(leftover) for fortune in leftover: file.append((fortune, perfile)) else: entries = sum(map(attrgetter('size'), leftover)) logger.debug('%d entries left', entries) for fortune in leftover: chance = left * fortune.size / entries file.append((fortune, chance)) # Arbitrary limit to calculate upper bound with, nice round number self.count = count = 65536 bound = 0 self.files = fortunes = [] for file, chance in file: bound += int(chance * count) fortunes.append((file, bound)) self.keys = [i[1] for i in self.files] return self
virtue discovers and runs tests found in the given objects.
def main(context, **kwargs): """ virtue discovers and runs tests found in the given objects. Provide it with one or more tests (packages, modules or objects) to run. """ result = run(**kwargs) context.exit(not result.wasSuccessful())
grammar = { comment } rule { comment | rule } ;
def grammar(self, text): """grammar = {comment} , rule , {comment | rule} ;""" self._attempting(text) return concatenation([ zero_or_more( self.comment, ignore_whitespace=True ), self.rule, zero_or_more( alternation([ self.comment, self.rule, ]), ignore_whitespace=True ), ], ignore_whitespace=True)(text).retyped(TokenType.grammar)
comment = ( *. { printable - * | *. printable - ) }. * ) ;
def comment(self, text): """comment = "(*" . {printable - "*" | "*" . printable - ")"} . "*)" ;""" self._attempting(text) return concatenation([ "(*", zero_or_more( alternation([ exclusion( self.printable, "*" ), concatenation([ "*", exclusion( self.printable, ")" ), ], ignore_whitespace=False), ]), ignore_whitespace=False ), "*)", ], ignore_whitespace=False)(text).compressed(TokenType.comment)
rule = identifier = expression ; ;
def rule(self, text): """rule = identifier , "=" , expression , ";" ;""" self._attempting(text) return concatenation([ self.identifier, "=", self.expression, ";", ], ignore_whitespace=True)(text).retyped(TokenType.rule)
special_handling = ? identifier ? ;
def special_handling(self, text): """special_handling = "?" , identifier , "?" ;""" self._attempting(text) return concatenation([ "?", self.identifier, "?", ], ignore_whitespace=True)(text).retyped(TokenType.special_handling)
number = digit - 0. { digit } ;
def number(self, text): """number = digit - "0" . {digit} ;""" self._attempting(text) return concatenation([ exclusion( self.digit, "0" ), zero_or_more( self.digit, ignore_whitespace=False ), ], ignore_whitespace=False)(text).compressed(TokenType.number)
Extracts the specific arguments of this CLI
def get_arguments(self): """ Extracts the specific arguments of this CLI """ ApiCli.get_arguments(self) if self.args.metricName is not None: self.metricName = self.args.metricName if self.args.measurement is not None: self.measurement = self.args.measurement if self.args.source is not None: self.source = self.args.source else: self.source = socket.gethostname() if self.args.timestamp is not None: self.timestamp = int(self.args.timestamp) m = {'metric': self.metricName, 'measure': self.measurement} if self.source is not None: m['source'] = self.source if self.timestamp is not None: m['timestamp'] = int(self.timestamp) self._process_properties() if self._properties is not None: m['metadata'] = self._properties self.data = json.dumps(m, sort_keys=True) self.headers = {'Content-Type': 'application/json', "Accept": "application/json"}
Call back function to be implemented by the CLI.
def _handle_results(self): """ Call back function to be implemented by the CLI. """ # Only process if we get HTTP result of 200 if self._api_result.status_code == requests.codes.ok: payload = json.loads(self._api_result.text) out = json.dumps(payload, sort_keys=True, indent=4, separators=(',', ': ')) print(self.colorize_json(out))
The parse tree generated by the source.
def grammar(self): """The parse tree generated by the source.""" if self._grammar is None: self.parser = Parser() grammar = self.parser.parse(self.input_source) self._grammar = grammar.trimmed().flattened().flattened(self._flatten) return self._grammar
The AST rules.
def rules(self): """The AST rules.""" if self._rules is None: self._rules = [] for child in self.grammar.children: if child.is_type(TokenType.rule): name, expression = child.children self._rules.append(Rule(name.value, self._expression_to_asn(expression), name.position, child.consumed)) return self._rules
The AST comments.
def comments(self): """The AST comments.""" if self._comments is None: self._comments = [c for c in self.grammar.children if c.is_type(TokenType.comment)] return self._comments
The diretives parsed from the comments.
def directives(self): """The diretives parsed from the comments.""" if self._directives is None: self._directives = [] for comment in self.comments: self._directives.extend(self.directives_from_comment(comment)) return self._directives
The python source of the parser generated from the input source.
def output_source(self): """The python source of the parser generated from the input source.""" if self._output_source is None: self._output_source = self._compile() return self._output_source
Returns the python source code for the generated parser.
def _compile(self): """Returns the python source code for the generated parser.""" fmt = """\"\"\"This parser was generated by pyebnf on {date}.\"\"\" from enum import Enum from pyebnf import parser_base as PB from pyebnf.primitive import alternation, concatenation, exclusion, one_or_more from pyebnf.primitive import option, repeated, repetition, terminal, zero_or_more {imports} {token_type_enum} {class_definition} """ fmt = self._clean_fmt(fmt) return fmt.format(date=datetime.utcnow().isoformat(), imports=self._get_imports(), token_type_enum=self._get_token_type_enum(), class_definition=self._get_class_definition())
Reads the directives and generates source code for custom imports.
def _get_imports(self): """Reads the directives and generates source code for custom imports.""" import_directives = [d for d in self.directives if d.name == "import"] if import_directives: return "\n" + "\n".join(d.args["value"] for d in import_directives) else: return ""
Builds the python source code for the Parser TokenType enum.
def _get_token_type_enum(self): """Builds the python source code for the Parser TokenType enum.""" fmt = "class TokenType(Enum):\n" \ "{indent}\"\"\"The token types for parse nodes generated by the Parser.\"\"\"\n" \ "{indent}" + \ "\n{indent}".join("{1} = {0}".format(num + 1, r.name) for num, r in enumerate(self.rules)) return fmt.format(indent=self.indent)
Builds the class definition of the parser.
def _get_class_definition(self): """Builds the class definition of the parser.""" fmt = """class Parser({parser_base}): {indent}\"\"\"This class contains methods for reading source code and generating a parse tree.\"\"\" {indent}entry_point = "{entry_point}" {rule_definitions} """ fmt = self._clean_fmt(fmt) return fmt.format(parser_base=self._get_parser_base(), indent=self.indent, entry_point=self._get_entry_point(), rule_definitions="\n".join(self._get_rule_definitions()))
Gets the entry_point value for the parser.
def _get_entry_point(self): """Gets the entry_point value for the parser.""" ep = self._find_directive("entry_point") if ep: return ep.args["value"] else: return self.rules[0].name
Generates the source code for a rule.
def _get_rule_definition(self, rule): """Generates the source code for a rule.""" fmt = """def {rule_fxn_name}(self, text): {indent}\"\"\"{rule_source}\"\"\" {indent}self._attempting(text) {indent}return {rule_definition}(text){transform} """ fmt = self._clean_fmt(fmt) source = self._indent(self._ast_to_code(rule.expression), skip_first_line=True) # All the primitives will accept a string x in place of terminal(x). This is terminal shorthand. # However, if a rule is only a wrapper around a single terminal, we have to actually make a # terminal call. This handles that situation. if self.use_terminal_shorthand and len(source) == 1 and source[0].startswith(("'", '"')): source = ["terminal({})".format(source[0])] rule_source = fmt.format(rule_fxn_name=self._get_rule_fxn_name(rule.name), indent=self.indent, rule_source=self._get_rule_source(rule), rule_definition="\n".join(source), transform=self._get_rule_transform(rule)) return self._indent(rule_source, 1)
Gets the variable part of the source code for a rule.
def _get_rule_source(self, rule): """Gets the variable part of the source code for a rule.""" p = len(self.input_source) + rule.position source = self.input_source[p:p + rule.consumed].rstrip() return self._indent(source, depth=self.indent + " ", skip_first_line=True)
The return value for each rule can be either retyped compressed or left alone. This method determines that and returns the source code text for accomplishing it.
def _get_rule_transform(self, rule): """The return value for each rule can be either retyped, compressed or left alone. This method determines that and returns the source code text for accomplishing it. """ rd = self._find_directive(lambda d: d.name == "rule" and d.args.get("name") == rule.name) if rd: args = rd.args else: args = {} transform = args.get("transform", "retype") if transform == "retype": new_name = args.get("to_type", "TokenType.{0}".format(rule.name)) return ".retyped({0})".format(new_name) elif transform == "compress": new_name = args.get("to_type", "TokenType.{0}".format(rule.name)) if new_name == "identity": return ".compressed()" else: return ".compressed({0})".format(new_name) elif transform == "identity": return ""
Convert an expression to an Abstract Syntax Tree Node.
def _expression_to_asn(self, expression): """Convert an expression to an Abstract Syntax Tree Node.""" new_children = [self._node_to_asn(c) for c in expression.children] return self._remove_grouping_groups(infix_to_optree(new_children))
Convert a parse tree node into an absract syntax tree node.
def _node_to_asn(self, node): """Convert a parse tree node into an absract syntax tree node.""" if node.is_type(TokenType.identifier): return Identifier(node.svalue) elif node.is_type(TokenType.terminal): return Terminal(node.svalue) elif node.is_type(TokenType.option_group): expr = node.children[0] return OptionGroup(self._expression_to_asn(expr)) elif node.is_type(TokenType.repetition_group): expr = node.children[0] return RepetitionGroup(self._expression_to_asn(expr)) elif node.is_type(TokenType.grouping_group): expr = node.children[0] return GroupingGroup(self._expression_to_asn(expr)) elif node.is_type(TokenType.special_handling): ident = node.children[0] return SpecialHandling(ident) elif node.is_type(TokenType.number): return Number(node.svalue) elif node.is_type((TokenType.operator, TokenType.op_mult, TokenType.op_add)): return OperatorNode(OPERATOR_INDEX[node.svalue], node.position) else: raise Exception("Unhandled parse tree node: {0}".format(node))
Flattens a list of optree operands based on a pred.
def _hoist_operands(self, operands, pred): """Flattens a list of optree operands based on a pred. This is used to convert concatenation([x, concatenation[y, ...]]) (or alternation) to concatenation([x, y, ...]). """ hopper = list(operands) new_operands = [] while hopper: target = hopper.pop(0) if pred(target): hopper = list(target.operands) + hopper else: new_operands.append(target) return new_operands
Grouping groups are implied by optrees this function hoists grouping group expressions up to their parent node.
def _remove_grouping_groups(self, optree): """Grouping groups are implied by optrees, this function hoists grouping group expressions up to their parent node. """ new_operands = [] for operand in optree.operands: if isinstance(operand, OptreeNode): new_operands.append(self._remove_grouping_groups(operand)) elif isinstance(operand, GroupingGroup): new_operands.append(operand.expression) else: new_operands.append(operand) return OptreeNode(optree.opnode, new_operands)
Convert an abstract syntax tree to python source code.
def _ast_to_code(self, node, **kwargs): """Convert an abstract syntax tree to python source code.""" if isinstance(node, OptreeNode): return self._ast_optree_node_to_code(node, **kwargs) elif isinstance(node, Identifier): return self._ast_identifier_to_code(node, **kwargs) elif isinstance(node, Terminal): return self._ast_terminal_to_code(node, **kwargs) elif isinstance(node, OptionGroup): return self._ast_option_group_to_code(node, **kwargs) elif isinstance(node, RepetitionGroup): return self._ast_repetition_group_to_code(node, **kwargs) elif isinstance(node, SpecialHandling): return self._ast_special_handling_to_code(node, **kwargs) elif isinstance(node, Number): return self._ast_number_to_code(node, **kwargs) else: raise Exception("Unhandled ast node: {0}".format(node))
Convert an abstract syntax operator tree to python source code.
def _ast_optree_node_to_code(self, node, **kwargs): """Convert an abstract syntax operator tree to python source code.""" opnode = node.opnode if opnode is None: return self._ast_to_code(node.operands[0]) else: operator = opnode.operator if operator is OP_ALTERNATE: return self._ast_op_alternate_to_code(node, **kwargs) elif operator is OP_WS_CONCAT: kwargs["ignore_whitespace"] = False return self._ast_op_concat_to_code(node, **kwargs) elif operator is OP_CONCAT: kwargs["ignore_whitespace"] = True return self._ast_op_concat_to_code(node, **kwargs) elif operator is OP_EXCLUDE: return self._ast_op_exclude_to_code(node, **kwargs) elif operator is OP_MULTIPLY: return self._ast_op_multiply_to_code(node, **kwargs) elif operator is OP_REPEAT: return self._ast_op_repeat_to_code(node, **kwargs) else: raise Exception("Unhandled optree node: {0}".format(node))
Convert an AST terminal to python source code.
def _ast_terminal_to_code(self, terminal, **kwargs): """Convert an AST terminal to python source code.""" value = _replace(terminal.value) if self.use_terminal_shorthand: return [value] else: return ["terminal({})".format(value)]
Convert an AST option group to python source code.
def _ast_option_group_to_code(self, option_group, **kwargs): """Convert an AST option group to python source code.""" lines = ["option("] lines.extend(self._indent(self._ast_to_code(option_group.expression))) lines.append(")") return lines
Convert an AST repetition group to python source code.
def _ast_repetition_group_to_code(self, repetition_group, ignore_whitespace=False, **kwargs): """Convert an AST repetition group to python source code.""" lines = ["zero_or_more("] lines.extend(self._indent(self._ast_to_code(repetition_group.expression))) lines[-1] += "," lines.append(self._indent("ignore_whitespace={}".format(bool(ignore_whitespace)))) lines.append(")") return lines
Convert an AST sepcial handling to python source code.
def _ast_special_handling_to_code(self, special_handling, **kwargs): """Convert an AST sepcial handling to python source code.""" ident = special_handling.value.svalue if ident in PB_SPECIAL_HANDLING: return ["PB.{0}".format(ident)] else: return ["self.{0}".format(ident)]
Convert an AST alternate op to python source code.
def _ast_op_alternate_to_code(self, opr, **kwargs): """Convert an AST alternate op to python source code.""" hoist_target = OP_ALTERNATE operands = self._hoist_operands(opr.operands, lambda t: isinstance(t, OptreeNode) and t.opnode.operator is hoist_target) lines = ["alternation(["] for op in operands: lines.extend(self._indent(self._ast_to_code(op))) lines[-1] += "," lines.append("])") return lines
Convert an AST concatenate op to python source code.
def _ast_op_concat_to_code(self, opr, *, ignore_whitespace, **kwargs): """Convert an AST concatenate op to python source code.""" hoist_target = OP_CONCAT if ignore_whitespace else OP_WS_CONCAT operands = self._hoist_operands(opr.operands, lambda t: isinstance(t, OptreeNode) and t.opnode.operator is hoist_target) lines = ["concatenation(["] for op in operands: lines.extend(self._indent(self._ast_to_code(op, ignore_whitespace=ignore_whitespace))) lines[-1] += "," lines.append("], ignore_whitespace={})".format(bool(ignore_whitespace))) return lines
Convert an AST exclude op to python source code.
def _ast_op_exclude_to_code(self, opr, **kwargs): """Convert an AST exclude op to python source code.""" opl, opr = opr.operands lines = ["exclusion("] lines.extend(self._indent(self._ast_to_code(opl))) lines[-1] += "," lines.extend(self._indent(self._ast_to_code(opr))) lines.append(")") return lines
Convert an AST multiply op to python source code.
def _ast_op_multiply_to_code(self, opr, ignore_whitespace=False, **kwargs): """Convert an AST multiply op to python source code.""" opl, opr = opr.operands if isinstance(opl, Number): times = opl.value subject = self._ast_to_code(opr) else: times = opr.value subject = self._ast_to_code(opl) lines = ["repeated("] lines.extend(self._indent(subject)) lines[-1] += "," lines.append("{0}times={1},".format(self.indent, times)) lines.append("{0}ignore_whitespace={1}".format(self.indent, bool(ignore_whitespace))) lines.append(")") return lines
Convert an AST repeat op to python source code.
def _ast_op_repeat_to_code(self, opr, ignore_whitespace=False, **kwargs): """Convert an AST repeat op to python source code.""" lines = ["one_or_more("] lines.extend(self._indent(self._ast_to_code(opr.operands[0]))) lines[-1] += "," lines.append(self._indent("ignore_whitespace={}".format(bool(ignore_whitespace)))) lines.append(")") return lines
Indent text by depth * self. indent.
def _indent(self, text, depth=1, *, skip_first_line=False, suffix=""): """Indent text by depth * self.indent. Text can be either a string, or a list of strings. If it is a string, it will be split on newline to a list of strings. if skip_first_line is true, the first line will not be indented like the others. """ as_list = isinstance(text, list) if as_list: lines = text else: lines = text.split("\n") new_lines = [] if isinstance(depth, int): spacing = self.indent * depth else: spacing = depth for i, line in enumerate(lines): if skip_first_line and i == 0: new_lines.append("{0}{1}".format(line, suffix)) else: new_lines.append("{0}{1}{2}".format(spacing, line, suffix)) if as_list: return new_lines else: return "\n".join(new_lines)
Finds all directives with a certain name or that passes a predicate.
def _find_directives(self, pred): """Finds all directives with a certain name, or that passes a predicate.""" if isinstance(pred, str): return [d for d in self.directives if d.name == pred] else: return [d for d in self.directives if pred(d)]
Custom flattening method for the parse tree.
def _flatten(child, parent): """Custom flattening method for the parse tree.""" return parent.is_type(TokenType.expression) and child.node_type == parent.node_type
A directive is a line in a comment that begins with !.
def directives_from_comment(cls, comment): """A directive is a line in a comment that begins with '!'.""" comment_contents = comment.value[2:-2].strip() comment_lines = (l.strip() for l in comment_contents.split("\n")) directives = (l[1:].strip() for l in comment_lines if l.startswith("!")) for directive_def in directives: yield cls.parse_directive_def(directive_def)
Turns a directive definition string into a directive object.
def parse_directive_def(cls, directive_def): """Turns a directive definition string into a directive object.""" name, *kwargs = esc_split(directive_def, ignore_empty=True) return Directive(name, {key: value for key, value in (esc_split(arg, "=") for arg in kwargs)})
Extracts the specific arguments of this CLI
def get_arguments(self): """ Extracts the specific arguments of this CLI """ ApiCli.get_arguments(self) if self.args.hostGroupName is not None: self.url_parameters = {"name": self.args.hostGroupName}
Extracts the specific arguments of this CLI
def get_arguments(self): """ Extracts the specific arguments of this CLI """ ApiCli.get_arguments(self) if self.args.plugin_name is not None: self.plugin_name = self.args.plugin_name self.path = "v1/plugins/{0}".format(self.plugin_name)
Extracts the specific arguments of this CLI
def get_arguments(self): """ Extracts the specific arguments of this CLI """ ApiCli.get_arguments(self) self._alarm_id = self.args.alarm_id if self.args.alarm_id is not None else None
Handle the results of the API call
def _handle_results(self): """ Handle the results of the API call """ # Only process if we get HTTP return code other 200. if self._api_result.status_code != requests.codes.ok: print(self.colorize_json(self._api_result.text))
Returns a human - readable version of numerical modifiers and key. To make the key suitable for global hotkey usage supply: mods_table = global_mods key_table = win32con key_prefix = VK_
def key_to_str(modifiers, key, mods_table = mods, key_table = wx, key_prefix = 'WXK_'): """ Returns a human-readable version of numerical modifiers and key. To make the key suitable for global hotkey usage, supply: mods_table = global_mods, key_table = win32con, key_prefix = 'VK_' """ logger.debug('Converting (%s, %s) to string.', modifiers, key) if not key: key_str = 'NONE' else: key_str = None res = '' for value, name in mods_table.items(): if (modifiers & value): res += name + '+' for x in dir(key_table): if x.startswith(key_prefix): if getattr(key_table, x) == key: key_str = converts.get(x, x[len(key_prefix):]) if not key_str: key_str = chr(key) res += key_str logger.debug('Final result: %s.', res) return res
Turns a string like CTRL_ALT + K into ( 3 75 ). To get a global hotkey try passing: key_table = win32con accel_format = MOD_%s key_format = VK_%s key_transpositions = { CTRL: CONTROL }
def str_to_key(value, key_table = wx, accel_format = 'ACCEL_%s', key_format = 'WXK_%s', key_transpositions = {}): """ Turns a string like "CTRL_ALT+K" into (3, 75). To get a global hotkey, try passing: key_table = win32con, accel_format = 'MOD_%s', key_format = 'VK_%s', key_transpositions = {'CTRL': 'CONTROL'} """ logger.debug('Converting "%s" to integers.', value) modifiers = 0 key = 0 split = value.split('+') for v in split: v = v.upper() a = accel_format % key_transpositions.get(v, v) logger.debug('Accelerator format = %s.', a) k = key_format % key_transpositions.get(v, v) logger.debug('Key format = %s.', k) if hasattr(key_table, a): logger.debug('Found accelerator on %r.', key_table) modifiers = modifiers | getattr(key_table, a) elif hasattr(key_table, k): logger.debug('Found key on %r.', key_table) if key: raise ValueError('Multiple keys specified.') else: key = getattr(key_table, k) if not key: logger.debug('No key yet, falling back to ord.') key = ord(split[-1]) logger.debug('modifiers = %d, key = %d.', modifiers, key) return (modifiers, key)
Get a new id if the provided one is None.
def get_id(id): """Get a new id if the provided one is None.""" if id == None: id = wx.NewId() logger.debug('Generated new ID %s.', id) else: logger.debug('Using provided id %s.', id) return id
Adds a key to the control. control: The control that the accelerator should be added to. key: A string like CTRL + F or CMD + T that specifies the key to use. func: The function that should be called when key is pressed. id: The id to Bind the event to. Defaults to wx. NewId ().
def add_accelerator(control, key, func, id = None): """ Adds a key to the control. control: The control that the accelerator should be added to. key: A string like "CTRL+F", or "CMD+T" that specifies the key to use. func: The function that should be called when key is pressed. id: The id to Bind the event to. Defaults to wx.NewId(). """ logger.debug('Adding key "%s" to control %s to call %s.', key, control, func) id = get_id(id) control.Bind(wx.EVT_MENU, func, id = id) t = _tables.get(control, []) modifiers, key_int = str_to_key(key) t.append((modifiers, key_int, id)) _tables[control] = t update_accelerators(control) return id
Removes an accelerator from control. control: The control to affect. key: The key to remove.
def remove_accelerator(control, key): """ Removes an accelerator from control. control: The control to affect. key: The key to remove. """ key = str_to_key(key) t = _tables.get(control, []) for a in t: if a[:2] == key: t.remove(a) if t: _tables[control] = t else: del _tables[control] update_accelerators(control) return True return False
Add a global hotkey bound to control via id that should call func. control: The control to bind to. key: The hotkey to use. func: The func to call. id: The new ID to use ( defaults to creating a new ID.
def add_hotkey(control, key, func, id = None): """ Add a global hotkey bound to control via id that should call func. control: The control to bind to. key: The hotkey to use. func: The func to call. id: The new ID to use (defaults to creating a new ID. """ if win32con is None: raise RuntimeError('win32con is not available.') logger.debug('Adding hotkey "%s" to control %s to call %s.', key, control, func) modifiers, keycode = str_to_key(key, key_table = win32con, accel_format = 'MOD_%s', key_format = 'VK_%s', key_transpositions = {'CTRL': 'CONTROL'}) id = get_id(id) control.Bind(wx.EVT_HOTKEY, func, id = id) l = _hotkeys.get(control, []) l.append([key, id]) _hotkeys[control] = l return control.RegisterHotKey(id, modifiers, keycode)
Remove a global hotkey. control - The control to affect key - The key to remove.
def remove_hotkey(control, key): """ Remove a global hotkey. control - The control to affect key - The key to remove. """ l = _hotkeys.get(control, []) for a in l: key_str, id = a if key_str == key: control.Unbind(wx.EVT_HOTKEY, id = id) control.UnregisterHotKey(id) l.remove(a) if l: _hotkeys[control] = l else: del _hotkeys[control]
Configure handling of command line arguments.
def add_arguments(self): """ Configure handling of command line arguments. """ self.add_logging_argument() self.parser.add_argument('-a', '--api-host', dest='api_host', action='store', metavar="api_host", help='{0} API host endpoint'.format(self.product_name)) self.parser.add_argument('-e', '--email', dest='email', action='store', metavar="e_mail", help='e-mail that has access to the {0} account'.format(self.product_name)) self.parser.add_argument('-t', '--api-token', dest='api_token', required=False, action='store', metavar="api_token", help='API token for given e-mail that has access to the {0} account'.format( self.product_name)) self.parser.add_argument('-z', '--curl', dest='curl', required=False, action='store_true', default=False, help='Output the corresponding curl command line and exit')
Configure logging based on command line options
def _configure_logging(self): """ Configure logging based on command line options """ if self.args.logLevel is not None: logging.basicConfig(level=self.levels[self.args.logLevel]) logging.info("Set logging level to {0}".format(self.args.logLevel))
CLIs get called back so that they can process any command line arguments that are given. This method handles the standard command line arguments for: API Host user password etc.
def get_arguments(self): """ CLIs get called back so that they can process any command line arguments that are given. This method handles the standard command line arguments for: API Host, user, password, etc. """ # We call this first so that logging is enabled as soon as possible self._configure_logging() # Extract the common command line arguments if self.args.api_host is not None: self._api_host = self.args.api_host if self.args.email is not None: self._email = self.args.email if self.args.api_token is not None: self._api_token = self.args.api_token self._curl = self.args.curl logging.debug("apihost: {0}".format(self._api_host)) logging.debug("email: {0}".format(self._email)) logging.debug("apitoken: {0}".format(self._api_token))
Validates the command line arguments passed to the CLI Derived classes that override need to call this method before validating their arguments
def _validate_arguments(self): """ Validates the command line arguments passed to the CLI Derived classes that override need to call this method before validating their arguments """ if self._email is None: self.set_error_message("E-mail for the account not provided") return False if self._api_token is None: self.set_error_message("API Token for the account not provided") return False return True
Run the steps to execute the CLI
def execute(self): """ Run the steps to execute the CLI """ # Set default arguments from environment variables self._get_environment() # Call our member function to add command line arguments, child classes that override need # to call the ApiCli version first to add standard arguments self.add_arguments() # Parse the command line arguments self._parse_args() # Arguments are parsed call back to the instance so that it can extract the command line # arguments for its use self.get_arguments() self.get_api_parameters() if self._validate_arguments(): if self._curl: self._curl_output() else: self._call_api() self._handle_results() else: print(self._message)
Convert a list of nodes in infix order to a list of nodes in postfix order.
def infix_to_postfix(nodes, *, recurse_types=None): """Convert a list of nodes in infix order to a list of nodes in postfix order. E.G. with normal algebraic precedence, 3 + 4 * 5 -> 3 4 5 * + """ output = [] operators = [] for node in nodes: if isinstance(node, OperatorNode): # Drain out all operators whose precedence is gte the node's... cmp_operator = node.operator while operators: current_operator = operators[-1].operator if current_operator.precedence > cmp_operator.precedence or \ current_operator.precedence == cmp_operator.precedence and current_operator.association == Association.left: output.append(operators.pop()) else: break operators.append(node) else: if recurse_types is not None and node.node_type in recurse_types: output.extend(infix_to_postfix(node.children, recurse_types=recurse_types)) else: output.append(node) return output + list(reversed(operators))
Convert a list of nodes in postfix order to an Optree.
def postfix_to_optree(nodes): """Convert a list of nodes in postfix order to an Optree.""" while len(nodes) > 1: nodes = _reduce(nodes) if len(nodes) == 0: raise OperatorError("Empty node list") node = nodes[0] if isinstance(node, OperatorNode): raise OperatorError("Operator without operands") if isinstance(node, OptreeNode): return node return OptreeNode(None, (node, ))
Finds the first operator in the list converts it and its operands to a OptreeNode then returns a new list with the operator and operands replaced by the new OptreeNode.
def _reduce(nodes): """Finds the first operator in the list, converts it and its operands to a OptreeNode, then returns a new list with the operator and operands replaced by the new OptreeNode. """ i = 0 while i < len(nodes): if isinstance(nodes[i], OperatorNode): break else: i += 1 if i == len(nodes): raise OperatorError("No operator found") operator_node = nodes[i] operator = operator_node.operator operands_lbound = i - operator.cardinality if operands_lbound < 0: raise OperatorError("Insufficient operands for operator {0}".format(operator.symbol)) return nodes[:operands_lbound] + \ [OptreeNode(operator_node, tuple(nodes[operands_lbound:i]))] + \ nodes[i+1:]
Pretty print an optree starting at root.
def pprint(root, depth=0, space_unit=" "): """Pretty print an optree, starting at root.""" spacing = space_unit * depth if isinstance(root, OptreeNode): print("{0}Operator ({1})".format(spacing, root.opnode.operator.symbol if root.opnode else "None -> IDENTITY")) for operand in root.operands: pprint(operand, depth + 1) else: print("{0}• {1}".format(spacing, root))
Extracts the specific arguments of this CLI
def get_arguments(self): """ Extracts the specific arguments of this CLI """ ApiCli.get_arguments(self) if self.args.pluginName is not None: self.pluginName = self.args.pluginName
Transforms the command line properties into python dictionary: return:
def _process_properties(self, properties): """ Transforms the command line properties into python dictionary :return: """ if properties is not None: self._properties = {} for p in properties: d = p.split('=') self._properties[d[0]] = d[1]
Add the specific arguments of this CLI
def add_arguments(self): """ Add the specific arguments of this CLI """ MetricCommon.add_arguments(self) self.parser.add_argument('-n', '--metric-name', dest='metricName', action='store', required=True, metavar='metric_name', help='Metric identifier') self.parser.add_argument('-d', '--display-name', dest='displayName', action='store', required=True, metavar='display_name', help='Metric display name') self.parser.add_argument('-s', '--display-name-short', dest='displayNameShort', action='store', required=True, metavar='display_short_name', help='Metric short display name') self.parser.add_argument('-i', '--description', dest='description', action='store', required=not self.update, metavar='description', help='Metric description') self.parser.add_argument('-g', '--aggregate', dest='aggregate', action='store', required=True, choices=['avg', 'max', 'min', 'sum'], help='Metric default aggregate') self.parser.add_argument('-u', '--unit', dest='unit', action='store', required=False, choices=['percent', 'number', 'bytecount', 'duration'], help='Metric unit') self.parser.add_argument('-r', '--resolution', dest='resolution', action='store', metavar='resolution', required=False, help='Metric default resolution') self.parser.add_argument('-y', '--type', dest='type', action='store', default=None, required=False, metavar='type', help='Sets the type metadata field') self.parser.add_argument('-x', '--is-disabled', dest='isDisabled', action='store', default=None, required=False, choices=['true', 'false'], help='Enable or disable the metric definition')
Extracts the specific arguments of this CLI
def get_arguments(self): """ Extracts the specific arguments of this CLI """ MetricCommon.get_arguments(self) if self.args.metricName is not None: self.metricName = self.args.metricName if self.args.displayName is not None: self.displayName = self.args.displayName if self.args.displayNameShort is not None: self.displayNameShort = self.args.displayNameShort if self.args.description is not None: self.description = self.args.description if self.args.aggregate is not None: self.aggregate = self.args.aggregate if self.args.unit is not None: self.unit = self.args.unit if self.args.resolution is not None: self.resolution = self.args.resolution if self.args.isDisabled is not None: self.isDisabled = self.args.isDisabled if self.args.type is not None: self.type = self.args.type data = {} if self.metricName is not None: data['name'] = self.metricName if self.displayName is not None: data['displayName'] = self.displayName if self.displayNameShort is not None: data['displayNameShort'] = self.displayNameShort if self.description is not None: data['description'] = self.description if self.aggregate is not None: data['defaultAggregate'] = self.aggregate if self.unit is not None: data['unit'] = self.unit if self.resolution is not None: data['defaultResolutionMS'] = self.resolution if self.isDisabled is not None: data['isDisabled'] = True if self.isDisabled == 'yes' else False if self.type is not None: data['type'] = self.type self.path = "v1/metrics/{0}".format(self.metricName) self.data = json.dumps(data, sort_keys=True) self.headers = {'Content-Type': 'application/json', "Accept": "application/json"}
Extracts the specific arguments of this CLI
def get_arguments(self): """ Extracts the specific arguments of this CLI """ ApiCli.get_arguments(self) self._alarm_name = self.args.alarm_name if self.args.alarm_name is not None else None
Load the metrics file from the given path
def read(self): """ Load the metrics file from the given path """ f = open(self.path, "r") self.manifest_json = f.read()
Read the file and parse JSON into dictionary
def load(self): """ Read the file and parse JSON into dictionary """ manifest = PluginManifest(self.file_path) manifest.get() self.manifest = manifest.get_manifest()
Looks up the metric definition from the definitions from the API call
def getMetricDefinition(self, name): """ Looks up the metric definition from the definitions from the API call """ metric = None for m in self.metric_definitions: if m['name'] == name: metric = m break return metric
Prints out table header based on the size of the data in columns
def printMetricsHeader(self, m, d): """ Prints out table header based on the size of the data in columns """ mstr = "Metric Name" dstr = "Description" print('|{0}{1}|{2}{3}|'.format(mstr, ' ' * (m - len(mstr)), dstr, ' ' * (d - len(dstr)))) print('|:{0}|:{1}|'.format('-' * (m - 1), '-' * (d - 1)))
Gets the maximum length of each column in the field table
def getFieldsColumnLengths(self): """ Gets the maximum length of each column in the field table """ nameLen = 0 descLen = 0 for f in self.fields: nameLen = max(nameLen, len(f['title'])) descLen = max(descLen, len(f['description'])) return (nameLen, descLen)
Gets the maximum length of each column
def getMetricsColumnLengths(self): """ Gets the maximum length of each column """ displayLen = 0 descLen = 0 for m in self.metrics: displayLen = max(displayLen, len(m['displayName'])) descLen = max(descLen, len(m['description'])) return (displayLen, descLen)
Escape underscores so that the markdown is correct
def escapeUnderscores(self): """ Escape underscores so that the markdown is correct """ new_metrics = [] for m in self.metrics: m['name'] = m['name'].replace("_", "\_") new_metrics.append(m) self.metrics = new_metrics
Prints out table header based on the size of the data in columns
def printFieldsHeader(self, f, d): """ Prints out table header based on the size of the data in columns """ fstr = "Field Name" dstr = "Description" f = max(f, len(fstr)) d = max(d, len(dstr)) print('|{0}{1}|{2}{3}|'.format(fstr, ' ' * (f - len(fstr)), dstr, ' ' * (d - len(dstr)))) print('|:{0}|:{1}|'.format('-' * (f - 1), '-' * (d - 1))) return (f, d)
Prints out table rows based on the size of the data in columns
def printMetrics(self, m, d): """ Prints out table rows based on the size of the data in columns """ for metric in self.metrics: mstr = metric['displayName'] dstr = metric['description'] mlen = m - len(mstr) dlen = d - len(dstr) print("|{0}{1}|{2}{3}|".format(mstr, ' ' * mlen, dstr, ' ' * dlen))