sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def format_message(self, evr_hist_data): ''' Format EVR message with EVR data Given a byte array of EVR data, format the EVR's message attribute printf format strings and split the byte array into appropriately sized chunks. Supports most format strings containing length and type fields. Args: evr_hist_data: A bytearray of EVR data. Bytes are expected to be in MSB ordering. Example formatting:: # This is the character '!', string 'Foo', and int '4279317316' bytearray([0x21, 0x46, 0x6f, 0x6f, 0x00, 0xff, 0x11, 0x33, 0x44]) Returns: The EVR's message string formatted with the EVR data or the unformatted EVR message string if there are no valid format strings present in it. Raises: ValueError: When the bytearray cannot be fully processed with the specified format strings. This is usually a result of the expected data length and the byte array length not matching. ''' size_formatter_info = { 's' : -1, 'c' : 1, 'i' : 4, 'd' : 4, 'u' : 4, 'x' : 4, 'hh': 1, 'h' : 2, 'l' : 4, 'll': 8, 'f' : 8, 'g' : 8, 'e' : 8, } type_formatter_info = { 'c' : 'U{}', 'i' : 'MSB_I{}', 'd' : 'MSB_I{}', 'u' : 'MSB_U{}', 'f' : 'MSB_D{}', 'e' : 'MSB_D{}', 'g' : 'MSB_D{}', 'x' : 'MSB_U{}', } formatters = re.findall("%(?:\d+\$)?([cdieEfgGosuxXhlL]+)", self._message) cur_byte_index = 0 data_chunks = [] for f in formatters: # If the format string we found is > 1 character we know that a length # field is included and we need to adjust our sizing accordingly. f_size_char = f_type = f[-1] if len(f) > 1: f_size_char = f[:-1] fsize = size_formatter_info[f_size_char.lower()] try: if f_type != 's': end_index = cur_byte_index + fsize fstr = type_formatter_info[f_type.lower()].format(fsize*8) # Type formatting can give us incorrect format strings when # a size formatter promotes a smaller data type. For instnace, # 'hhu' says we'll promote a char (1 byte) to an unsigned # int for display. Here, the type format string would be # incorrectly set to 'MSB_U8' if we didn't correct. if fsize == 1 and 'MSB_' in fstr: fstr = fstr[4:] d = dtype.PrimitiveType(fstr).decode( evr_hist_data[cur_byte_index:end_index] ) # Some formatters have an undefined data size (such as strings) # and require additional processing to determine the length of # the data and decode data. else: end_index = str(evr_hist_data).index('\x00', cur_byte_index) d = str(evr_hist_data[cur_byte_index:end_index]) data_chunks.append(d) except: msg = "Unable to format EVR Message with data {}".format(evr_hist_data) log.error(msg) raise ValueError(msg) cur_byte_index = end_index # If we were formatting a string we need to add another index offset # to exclude the null terminator. if f == 's': cur_byte_index += 1 # Format and return the EVR message if formatters were present, otherwise # just return the EVR message as is. if len(formatters) == 0: return self._message else: # Python format strings cannot handle size formatter information. So something # such as %llu needs to be adjusted to be a valid identifier in python by # removing the size formatter. msg = self._message for f in formatters: if len(f) > 1: msg = msg.replace('%{}'.format(f), '%{}'.format(f[-1])) return msg % tuple(data_chunks)
Format EVR message with EVR data Given a byte array of EVR data, format the EVR's message attribute printf format strings and split the byte array into appropriately sized chunks. Supports most format strings containing length and type fields. Args: evr_hist_data: A bytearray of EVR data. Bytes are expected to be in MSB ordering. Example formatting:: # This is the character '!', string 'Foo', and int '4279317316' bytearray([0x21, 0x46, 0x6f, 0x6f, 0x00, 0xff, 0x11, 0x33, 0x44]) Returns: The EVR's message string formatted with the EVR data or the unformatted EVR message string if there are no valid format strings present in it. Raises: ValueError: When the bytearray cannot be fully processed with the specified format strings. This is usually a result of the expected data length and the byte array length not matching.
entailment
def _parseHeader (self, line, lineno, log): """Parses a sequence header line containing 'name: value' pairs.""" if line.startswith('#') and line.find(':') > 0: tokens = [ t.strip().lower() for t in line[1:].split(":", 1) ] name = tokens[0] pos = SeqPos(line, lineno) if name in self.header: msg = 'Ignoring duplicate header parameter: %s' log.warning(msg % name, pos) else: for expected in ['seqid', 'version']: if name == expected: value = util.toNumber(tokens[1], None) if value is None: msg = 'Parameter "%s" value "%s" is not a number.' log.error(msg % (name, tokens[1]), poss) else: self.header[name] = value
Parses a sequence header line containing 'name: value' pairs.
entailment
def append (self, cmd, delay=0.000, attrs=None): """Adds a new command with a relative time delay to this sequence.""" self.lines.append( SeqCmd(cmd, delay, attrs) )
Adds a new command with a relative time delay to this sequence.
entailment
def printText (self, stream=None): """Prints a text representation of this sequence to the given stream or standard output. """ if stream is None: stream = sys.stdout stream.write('# seqid : %u\n' % self.seqid ) stream.write('# version : %u\n' % self.version ) stream.write('# crc32 : 0x%04x\n' % self.crc32 ) stream.write('# ncmds : %u\n' % len(self.commands) ) stream.write('# duration: %.3fs\n' % self.duration ) stream.write('\n') for line in self.lines: stream.write( str(line) ) stream.write('\n')
Prints a text representation of this sequence to the given stream or standard output.
entailment
def read (self, filename=None): """Reads a command sequence from the given filename (defaults to self.pathname). """ if filename is None: filename = self.pathname stream = open(filename, 'rb') magic = struct.unpack('>H', stream.read(2))[0] stream.close() if magic == Seq.Magic: self.readBinary(filename) else: self.readText(filename)
Reads a command sequence from the given filename (defaults to self.pathname).
entailment
def readBinary (self, filename=None): """Reads a binary command sequence from the given filename (defaults to self.pathname). """ if filename is None: filename = self.pathname stream = open(filename, 'rb') magic = struct.unpack('>H', stream.read(2))[0] self.crc32 = struct.unpack('>I', stream.read(4))[0] self.seqid = struct.unpack('>H', stream.read(2))[0] self.version = struct.unpack('>H', stream.read(2))[0] ncmds = struct.unpack('>H', stream.read(2))[0] reserved = stream.read(20) for n in range(ncmds): bytes = stream.read(110) self.lines.append( SeqCmd.decode(bytes, self.cmddict) )
Reads a binary command sequence from the given filename (defaults to self.pathname).
entailment
def readText (self, filename=None): """Reads a text command sequence from the given filename (defaults to self.pathname). """ if filename is None: filename = self.pathname self.header = { } inBody = False with open(filename, 'rt') as stream: for (lineno, line) in enumerate(stream.readlines()): stripped = line.strip() if stripped == '': continue elif stripped.startswith('#'): if not inBody: self._parseHeader(line, lineno, self.log) else: inBody = True self.lines.append( SeqCmd.parse(line, lineno, self.log, self.cmddict) ) if 'seqid' in self.header: self.seqid = self.header['seqid'] elif self.seqid is None: self.log.error('No sequence id present in header.') if 'version' in self.header: self.version = self.header['version'] elif self.version is None: self.log.warning('No version present in header. Defaulting to zero (0).') self.version = 0
Reads a text command sequence from the given filename (defaults to self.pathname).
entailment
def validate (self): """Returns True if this Sequence is valid, False otherwise. Validation error messages are stored in self.messages. """ if not os.path.isfile(self.pathname): self.message.append('Filename "%s" does not exist.') else: try: with open(self.pathname, 'r') as stream: pass except IOError: self.messages.append('Could not open "%s" for reading.' % self.pathname) for line in self.commands: messages = [ ] if line.cmd and not line.cmd.validate(messages): msg = 'error: %s: %s' % (line.cmd.name, " ".join(messages)) self.log.messages.append(msg) return len(self.log.messages) == 0
Returns True if this Sequence is valid, False otherwise. Validation error messages are stored in self.messages.
entailment
def writeBinary (self, filename=None): """Writes a binary representation of this sequence to the given filename (defaults to self.binpath). """ if filename is None: filename = self.binpath with open(filename, 'wb') as output: # Magic Number output.write( struct.pack('>H', Seq.Magic ) ) # Upload Type output.write( struct.pack('B', 9 ) ) # Version output.write( struct.pack('B', self.version ) ) # Number of Commands output.write( struct.pack('>H', len(self.commands) ) ) # Sequence ID output.write( struct.pack('>H', self.seqid ) ) # CRC Placeholder output.write( struct.pack('>I', 0 ) ) pad = struct.pack('B', 0) for n in range(20): output.write(pad) for line in self.lines: output.write( line.encode() ) self.crc32 = util.crc32File(filename, 0) with open(filename, 'r+b') as output: output.seek(28) output.write( struct.pack('>I', self.crc32) )
Writes a binary representation of this sequence to the given filename (defaults to self.binpath).
entailment
def writeText (self, filename=None): """Writes a text representation of this sequence to the given filename (defaults to self.txtpath). """ if filename is None: filename = self.txtpath with open(filename, 'wt') as output: self.printText(output)
Writes a text representation of this sequence to the given filename (defaults to self.txtpath).
entailment
def decode (cls, bytes, cmddict): """Decodes a sequence command from an array of bytes, according to the given command dictionary, and returns a new SeqCmd. """ attrs = SeqCmdAttrs.decode(bytes[0:1]) delay = SeqDelay .decode(bytes[1:4]) cmd = cmddict .decode(bytes[4:] ) return cls(cmd, delay, attrs)
Decodes a sequence command from an array of bytes, according to the given command dictionary, and returns a new SeqCmd.
entailment
def encode (self): """Encodes this SeqCmd to binary and returns a bytearray.""" return self.attrs.encode() + self.delay.encode() + self.cmd.encode()
Encodes this SeqCmd to binary and returns a bytearray.
entailment
def parse (cls, line, lineno, log, cmddict): """Parses the sequence command from a line of text, according to the given command dictionary, and returns a new SeqCmd. """ delay = SeqDelay .parse(line, lineno, log, cmddict) attrs = SeqCmdAttrs.parse(line, lineno, log, cmddict) comment = SeqComment .parse(line, lineno, log, cmddict) stop = len(line) if comment: stop = comment.pos.col.start - 1 if attrs and attrs.pos.col.stop != -1: stop = attrs.pos.col.start - 1 tokens = line[:stop].split() name = tokens[1] args = tokens[2:] start = line.find(name) pos = SeqPos(line, lineno, start + 1, stop) if name not in cmddict: log.error('Unrecognized command "%s".' % name, pos) elif cmddict[name].nargs != len(args): msg = 'Command argument size mismatch: expected %d, but encountered %d.' log.error(msg % (cmddict[name].nargs, len(args)), pos) args = [ util.toNumber(a, a) for a in args ] cmd = cmddict.create(name, *args) return cls(cmd, delay, attrs, comment, pos)
Parses the sequence command from a line of text, according to the given command dictionary, and returns a new SeqCmd.
entailment
def default (self): """The default sequence command attributes (as an integer).""" byte = 0 for bit, name, value0, value1, default in SeqCmdAttrs.Table: if default == value1: byte = setBit(byte, bit, 1) return byte
The default sequence command attributes (as an integer).
entailment
def decode (cls, bytes, cmddict=None): """Decodes sequence command attributes from an array of bytes and returns a new SeqCmdAttrs. """ byte = struct.unpack('B', bytes)[0] self = cls() defval = self.default for bit, name, value0, value1, default in SeqCmdAttrs.Table: mask = 1 << bit bitset = mask & byte defset = mask & defval if bitset != defset: if bitset: self.attrs[name] = value1 else: self.attrs[name] = value0 return self
Decodes sequence command attributes from an array of bytes and returns a new SeqCmdAttrs.
entailment
def encode (self): """Encodes this SeqCmdAttrs to binary and returns a bytearray.""" byte = self.default for bit, name, value0, value1, default in SeqCmdAttrs.Table: if name in self.attrs: value = self.attrs[name] byte = setBit(byte, bit, value == value1) return struct.pack('B', byte)
Encodes this SeqCmdAttrs to binary and returns a bytearray.
entailment
def parse (cls, line, lineno, log, cmddict=None): """Parses a SeqCmdAttrs from a line of text and returns it or None. Warning and error messages are logged via the SeqMsgLog log. """ start = line.find('{') stop = line.find('}') pos = SeqPos(line, lineno, start + 1, stop) result = cls(None, pos) if start >= 0 and stop >= start: attrs = { } pairs = line[start + 1:stop].split(',') for item in pairs: ncolons = item.count(':') if ncolons == 0: log.error('Missing colon in command attribute "%s".' % item, pos) elif ncolons > 1: log.error('Too many colons in command attribute "%s".' % item, pos) else: name, value = (s.strip() for s in item.split(':')) attrs[name] = value result = cls(attrs, pos) elif start != -1 or stop != -1: log.error('Incorrect command attribute curly brace placement.', pos) return result
Parses a SeqCmdAttrs from a line of text and returns it or None. Warning and error messages are logged via the SeqMsgLog log.
entailment
def decode (cls, bytes, cmddict=None): """Decodes a sequence delay from an array of bytes, according to the given command dictionary, and returns a new SeqDelay. """ delay_s = struct.unpack('>H', bytes[0:2])[0] delay_ms = struct.unpack('B' , bytes[2:3])[0] return cls(delay_s + (delay_ms / 255.0))
Decodes a sequence delay from an array of bytes, according to the given command dictionary, and returns a new SeqDelay.
entailment
def encode (self): """Encodes this SeqDelay to a binary bytearray.""" delay_s = int( math.floor(self.delay) ) delay_ms = int( (self.delay - delay_s) * 255.0 ) return struct.pack('>H', delay_s) + struct.pack('B', delay_ms)
Encodes this SeqDelay to a binary bytearray.
entailment
def parse (cls, line, lineno, log, cmddict=None): """Parses the SeqDelay from a line of text. Warning and error messages are logged via the SeqMsgLog log. """ delay = -1 token = line.split()[0] start = line.find(token) pos = SeqPos(line, lineno, start + 1, start + len(token)) try: delay = float(token) except ValueError: msg = 'String "%s" could not be interpreted as a numeric time delay.' log.error(msg % token, pos) return cls(delay, pos)
Parses the SeqDelay from a line of text. Warning and error messages are logged via the SeqMsgLog log.
entailment
def parse (cls, line, lineno, log, cmddict=None): """Parses the SeqMetaCmd from a line of text. Warning and error messages are logged via the SeqMsgLog log. """ start = line.find('%') pos = SeqPos(line, lineno, start + 1, len(line)) result = None if start >= 0: result = cls(line[start:], pos) return result
Parses the SeqMetaCmd from a line of text. Warning and error messages are logged via the SeqMsgLog log.
entailment
def error (self, msg, pos=None): """Logs an error message pertaining to the given SeqPos.""" self.log(msg, 'error: ' + self.location(pos))
Logs an error message pertaining to the given SeqPos.
entailment
def location (self, pos): """Formats the location of the given SeqPos as: filename:line:col: """ result = '' if self.filename: result += self.filename + ':' if pos: result += str(pos) return result
Formats the location of the given SeqPos as: filename:line:col:
entailment
def log (self, msg, prefix=None): """Logs a message with an optional prefix.""" if prefix: if not prefix.strip().endswith(':'): prefix += ': ' msg = prefix + msg self.messages.append(msg)
Logs a message with an optional prefix.
entailment
def warning (self, msg, pos=None): """Logs a warning message pertaining to the given SeqAtom.""" self.log(msg, 'warning: ' + self.location(pos))
Logs a warning message pertaining to the given SeqAtom.
entailment
def expandConfigPaths (config, prefix=None, datetime=None, pathvars=None, parameter_key='', *keys): """Updates all relative configuration paths in dictionary config, which contain a key in keys, by prepending prefix. If keys is omitted, it defaults to 'directory', 'file', 'filename', 'path', 'pathname'. See util.expandPath(). """ if len(keys) == 0: keys = PATH_KEYS for name, value in config.items(): if name in keys and type(name) is str: expanded = util.expandPath(value, prefix) cleaned = replaceVariables(expanded, datetime=datetime, pathvars=pathvars) for p in cleaned: if not os.path.exists(p): msg = "Config parameter {}.{} specifies nonexistent path {}".format(parameter_key, name, p) log.warn(msg) config[name] = cleaned[0] if len(cleaned) == 1 else cleaned elif type(value) is dict: param_key = name if parameter_key == '' else parameter_key + '.' + name expandConfigPaths(value, prefix, datetime, pathvars, param_key, *keys)
Updates all relative configuration paths in dictionary config, which contain a key in keys, by prepending prefix. If keys is omitted, it defaults to 'directory', 'file', 'filename', 'path', 'pathname'. See util.expandPath().
entailment
def replaceVariables(path, datetime=None, pathvars=None): """Return absolute path with path variables replaced as applicable""" if datetime is None: datetime = time.gmtime() # if path variables are not given, set as empty list if pathvars is None: pathvars = [ ] # create an init path list to loop through if isinstance(path, list): path_list = path else: path_list = [ path ] # Set up the regex to search for variables regex = re.compile('\$\{(.*?)\}') # create a newpath list that will hold the 'cleaned' paths # with variables and strftime format directives replaced newpath_list = [ ] for p in path_list: # create temppath_list to be used a we work through the newpath_list.append(p) # Variable replacement # Find all the variables in path using the regex for k in regex.findall(p): # Check if the key is in path variables map if k in pathvars: # get the str or list of values v = pathvars[k] # Check value of variable must be in (string, integer, list) if type(v) is dict: msg = "Path variable must refer to string, integer, or list" raise TypeError(msg) # get the list of possible variable values value_list = v if type(v) is list else [ v ] # create temp_list for now temp_list = [] # loop through the most recent newpath list # need to do this every time in order to account for all possible # combinations # replace the variables # loop through the list of values and replace the variables for v in value_list: for newpath in newpath_list: # remove the path from newpath_list temp_list.append(newpath.replace('${%s}' % k, str(v))) # replace newpath_list newpath_list = temp_list # strftime translation # Loop through newpath_list to do strftime translation for index, newpath in enumerate(newpath_list): # Apply strftime translation newpath_list[index] = time.strftime(newpath, datetime) return newpath_list
Return absolute path with path variables replaced as applicable
entailment
def flatten (d, *keys): """Flattens the dictionary d by merging keys in order such that later keys take precedence over earlier keys. """ flat = { } for k in keys: flat = merge(flat, d.pop(k, { })) return flat
Flattens the dictionary d by merging keys in order such that later keys take precedence over earlier keys.
entailment
def loadYAML (filename=None, data=None): """Loads either the given YAML configuration file or YAML data. Returns None if there was an error reading from the configuration file and logs an error message via ait.core.log.error(). """ config = None try: if filename: data = open(filename, 'rt') config = yaml.load(data) if type(data) is file: data.close() except IOError, e: msg = 'Could not read AIT configuration file "%s": %s' log.error(msg, filename, str(e)) return config
Loads either the given YAML configuration file or YAML data. Returns None if there was an error reading from the configuration file and logs an error message via ait.core.log.error().
entailment
def merge (d, o): """Recursively merges keys from o into d and returns d.""" for k in o.keys(): if type(o[k]) is dict and k in d: merge(d[k], o[k]) else: d[k] = o[k] return d
Recursively merges keys from o into d and returns d.
entailment
def _getattr_ (self, name): """Internal method. Used by __getattr__() and __getitem__().""" value = self._config.get(name) if type(value) is dict: value = AitConfig(self._filename, config=value) return value
Internal method. Used by __getattr__() and __getitem__().
entailment
def _directory (self): """The directory for this AitConfig.""" if self._filename is None: return os.path.join(self._ROOT_DIR, 'config') else: return os.path.dirname(self._filename)
The directory for this AitConfig.
entailment
def _datapaths(self): """Returns a simple key-value map for easy access to data paths""" paths = { } try: data = self._config['data'] for k in data: paths[k] = data[k]['path'] except KeyError as e: raise AitConfigMissing(e.message) except Exception as e: raise AitConfigError('Error reading data paths: %s' % e) return paths
Returns a simple key-value map for easy access to data paths
entailment
def reload (self, filename=None, data=None): """Reloads the a AIT configuration. The AIT configuration is automatically loaded when the AIT package is first imported. To replace the configuration, call reload() (defaults to the current config.filename) or reload(new_filename). """ if data is None and filename is None: filename = self._filename self._config = loadYAML(filename, data) self._filename = filename if self._config is not None: keys = 'default', self._platform, self._hostname self._config = flatten(self._config, *keys) # on reload, if pathvars have not been set, we want to start # with the defaults, add the platform and hostname, and # merge in all of the information provided in the config if self._pathvars is None: self._pathvars = self.getDefaultPathVariables() expandConfigPaths(self._config, self._directory, self._datetime, merge(self._config, self._pathvars)) else: self._config = { }
Reloads the a AIT configuration. The AIT configuration is automatically loaded when the AIT package is first imported. To replace the configuration, call reload() (defaults to the current config.filename) or reload(new_filename).
entailment
def get (self, name, default=None): """Returns the attribute value *AitConfig.name* or *default* if name does not exist. The name may be a series of attributes separated periods. For example, "foo.bar.baz". In that case, lookups are attempted in the following order until one succeeeds: 1. AitConfig['foo.bar.baz'], and 2. AitConfig.foo.bar.baz 3. (If both fail, return *default*) """ if name in self: return self[name] config = self parts = name.split('.') heads = parts[:-1] tail = parts[-1] for part in heads: if part in config and type(config[part]) is AitConfig: config = config[part] else: return default return config[tail] if tail in config else default
Returns the attribute value *AitConfig.name* or *default* if name does not exist. The name may be a series of attributes separated periods. For example, "foo.bar.baz". In that case, lookups are attempted in the following order until one succeeeds: 1. AitConfig['foo.bar.baz'], and 2. AitConfig.foo.bar.baz 3. (If both fail, return *default*)
entailment
def addPathVariables(self, pathvars): """ Adds path variables to the pathvars map property""" if type(pathvars) is dict: self._pathvars = merge(self._pathvars, pathvars)
Adds path variables to the pathvars map property
entailment
def getPDT(typename): """get(typename) -> PrimitiveType Returns the PrimitiveType for typename or None. """ if typename not in PrimitiveTypeMap and typename.startswith("S"): PrimitiveTypeMap[typename] = PrimitiveType(typename) return PrimitiveTypeMap.get(typename, None)
get(typename) -> PrimitiveType Returns the PrimitiveType for typename or None.
entailment
def get(typename): """get(typename) -> PrimitiveType or ComplexType Returns the PrimitiveType or ComplexType for typename or None. """ dt = getPDT(typename) or getCDT(typename) if dt is None: pdt, nelems = ArrayType.parse(typename) if pdt and nelems: dt = ArrayType(pdt, nelems) return dt
get(typename) -> PrimitiveType or ComplexType Returns the PrimitiveType or ComplexType for typename or None.
entailment
def decode(self, bytes, raw=False): """decode(bytearray, raw=False) -> value Decodes the given bytearray according to this PrimitiveType definition. NOTE: The parameter ``raw`` is present to adhere to the ``decode()`` inteface, but has no effect for PrimitiveType definitions. """ return struct.unpack(self.format, buffer(bytes))[0]
decode(bytearray, raw=False) -> value Decodes the given bytearray according to this PrimitiveType definition. NOTE: The parameter ``raw`` is present to adhere to the ``decode()`` inteface, but has no effect for PrimitiveType definitions.
entailment
def validate(self, value, messages=None, prefix=None): """validate(value[, messages[, prefix]]) -> True | False Validates the given value according to this PrimitiveType definition. Validation error messages are appended to an optional messages array, each with the optional message prefix. """ valid = False def log(msg): if messages is not None: if prefix is not None: tok = msg.split() msg = prefix + ' ' + tok[0].lower() + " " + " ".join(tok[1:]) messages.append(msg) if self.string: valid = type(value) is str else: if type(value) is str: log("String '%s' cannot be represented as a number." % value) elif type(value) not in (int, long, float): log("Value '%s' is not a primitive type." % str(value)) elif type(value) is float and not self.float: log("Float '%g' cannot be represented as an integer." % value) else: if value < self.min or value > self.max: args = (str(value), self.min, self.max) log("Value '%s' out of range [%d, %d]." % args) else: valid = True return valid
validate(value[, messages[, prefix]]) -> True | False Validates the given value according to this PrimitiveType definition. Validation error messages are appended to an optional messages array, each with the optional message prefix.
entailment
def _assertIndex(self, index): """Raise TypeError or IndexError if index is not an integer or out of range for the number of elements in this array, respectively. """ if type(index) is not int: raise TypeError('list indices must be integers') if index < 0 or index >= self.nelems: raise IndexError('list index out of range')
Raise TypeError or IndexError if index is not an integer or out of range for the number of elements in this array, respectively.
entailment
def decode(self, bytes, index=None, raw=False): """decode(bytes[[, index], raw=False]) -> value1, ..., valueN Decodes the given sequence of bytes according to this Array's element type. If the optional `index` parameter is an integer or slice, then only the element(s) at the specified position(s) will be decoded and returned. """ if index is None: index = slice(0, self.nelems) if type(index) is slice: step = 1 if index.step is None else index.step indices = xrange(index.start, index.stop, step) result = [ self.decodeElem(bytes, n, raw) for n in indices ] else: result = self.decodeElem(bytes, index, raw) return result
decode(bytes[[, index], raw=False]) -> value1, ..., valueN Decodes the given sequence of bytes according to this Array's element type. If the optional `index` parameter is an integer or slice, then only the element(s) at the specified position(s) will be decoded and returned.
entailment
def decodeElem(self, bytes, index, raw=False): """Decodes a single element at array[index] from a sequence bytes that contain data for the entire array. """ self._assertIndex(index) start = index * self.type.nbytes stop = start + self.type.nbytes if stop > len(bytes): msg = 'Decoding %s[%d] requires %d bytes, ' msg += 'but the ArrayType.decode() method received only %d bytes.' raise IndexError(msg % (self.type.name, index, stop, len(bytes))) return self.type.decode( bytes[start:stop], raw )
Decodes a single element at array[index] from a sequence bytes that contain data for the entire array.
entailment
def encode(self, *args): """encode(value1[, ...]) -> bytes Encodes the given values to a sequence of bytes according to this Array's underlying element type """ if len(args) != self.nelems: msg = 'ArrayType %s encode() requires %d values, but received %d.' raise ValueError(msg % (self.name, self.nelems, len(args))) return bytearray().join(self.type.encode(arg) for arg in args)
encode(value1[, ...]) -> bytes Encodes the given values to a sequence of bytes according to this Array's underlying element type
entailment
def parse (name): """parse(name) -> [typename | None, nelems | None] Parses an ArrayType name to return the element type name and number of elements, e.g.: >>> ArrayType.parse('MSB_U16[32]') ['MSB_U16', 32] If typename cannot be determined, None is returned. Similarly, if nelems is not an integer or less than one (1), None is returned. """ parts = [None, None] start = name.find('[') if start != -1: stop = name.find(']', start) if stop != -1: try: parts[0] = name[:start] parts[1] = int(name[start + 1:stop]) if parts[1] <= 0: raise ValueError except ValueError: msg = 'ArrayType specification: "%s" must have an ' msg += 'integer greater than zero in square brackets.' raise ValueError(msg % name) return parts
parse(name) -> [typename | None, nelems | None] Parses an ArrayType name to return the element type name and number of elements, e.g.: >>> ArrayType.parse('MSB_U16[32]') ['MSB_U16', 32] If typename cannot be determined, None is returned. Similarly, if nelems is not an integer or less than one (1), None is returned.
entailment
def cmddict(self): """PrimitiveType base for the ComplexType""" if self._cmddict is None: self._cmddict = cmd.getDefaultDict() return self._cmddict
PrimitiveType base for the ComplexType
entailment
def encode(self, value): """encode(value) -> bytearray Encodes the given value to a bytearray according to this PrimitiveType definition. """ opcode = self.cmddict[value].opcode return super(CmdType, self).encode(opcode)
encode(value) -> bytearray Encodes the given value to a bytearray according to this PrimitiveType definition.
entailment
def decode(self, bytes, raw=False): """decode(bytearray, raw=False) -> value Decodes the given bytearray and returns the corresponding Command Definition (:class:`CmdDefn`) for the underlying 'MSB_U16' command opcode. If the optional parameter ``raw`` is ``True``, the command opcode itself will be returned instead of the Command Definition (:class:`CmdDefn`). """ opcode = super(CmdType, self).decode(bytes) result = None if raw: result = opcode elif opcode in self.cmddict.opcodes: result = self.cmddict.opcodes[opcode] else: raise ValueError('Unrecognized command opcode: %d' % opcode) return result
decode(bytearray, raw=False) -> value Decodes the given bytearray and returns the corresponding Command Definition (:class:`CmdDefn`) for the underlying 'MSB_U16' command opcode. If the optional parameter ``raw`` is ``True``, the command opcode itself will be returned instead of the Command Definition (:class:`CmdDefn`).
entailment
def evrs(self): """Getter EVRs dictionary""" if self._evrs is None: import ait.core.evr as evr self._evrs = evr.getDefaultDict() return self._evrs
Getter EVRs dictionary
entailment
def encode(self, value): """encode(value) -> bytearray Encodes the given value to a bytearray according to this Complex Type definition. """ e = self.evrs.get(value, None) if not e: log.error(str(value) + " not found as EVR. Cannot encode.") return None else: return super(EVRType, self).encode(e.code)
encode(value) -> bytearray Encodes the given value to a bytearray according to this Complex Type definition.
entailment
def decode(self, bytes, raw=False): """decode(bytearray, raw=False) -> value Decodes the given bytearray according the corresponding EVR Definition (:class:`EVRDefn`) for the underlying 'MSB_U16' EVR code. If the optional parameter ``raw`` is ``True``, the EVR code itself will be returned instead of the EVR Definition (:class:`EVRDefn`). """ code = super(EVRType, self).decode(bytes) result = None if raw: result = code elif code in self.evrs.codes: result = self.evrs.codes[code] else: result = code log.warn('Unrecognized EVR code: %d' % code) return result
decode(bytearray, raw=False) -> value Decodes the given bytearray according the corresponding EVR Definition (:class:`EVRDefn`) for the underlying 'MSB_U16' EVR code. If the optional parameter ``raw`` is ``True``, the EVR code itself will be returned instead of the EVR Definition (:class:`EVRDefn`).
entailment
def decode(self, bytes, raw=False): """decode(bytearray, raw=False) -> value Decodes the given bytearray and returns the number of (fractional) seconds. If the optional parameter ``raw`` is ``True``, the byte (U8) itself will be returned. """ result = super(Time8Type, self).decode(bytes) if not raw: result /= 256.0 return result
decode(bytearray, raw=False) -> value Decodes the given bytearray and returns the number of (fractional) seconds. If the optional parameter ``raw`` is ``True``, the byte (U8) itself will be returned.
entailment
def encode(self, value): """encode(value) -> bytearray Encodes the given value to a bytearray according to this ComplexType definition. """ if type(value) is not datetime.datetime: raise TypeError('encode() argument must be a Python datetime') return super(Time32Type, self).encode( dmc.toGPSSeconds(value) )
encode(value) -> bytearray Encodes the given value to a bytearray according to this ComplexType definition.
entailment
def decode(self, bytes, raw=False): """decode(bytearray, raw=False) -> value Decodes the given bytearray containing the elapsed time in seconds since the GPS epoch and returns the corresponding Python :class:`datetime`. If the optional parameter ``raw`` is ``True``, the integral number of seconds will be returned instead. """ sec = super(Time32Type, self).decode(bytes) return sec if raw else dmc.toLocalTime(sec)
decode(bytearray, raw=False) -> value Decodes the given bytearray containing the elapsed time in seconds since the GPS epoch and returns the corresponding Python :class:`datetime`. If the optional parameter ``raw`` is ``True``, the integral number of seconds will be returned instead.
entailment
def encode(self, value): """encode(value) -> bytearray Encodes the given value to a bytearray according to this ComplexType definition. """ if type(value) is not datetime.datetime: raise TypeError('encode() argument must be a Python datetime') coarse = Time32Type().encode(value) fine = Time8Type() .encode(value.microsecond / 1e6) return coarse + fine
encode(value) -> bytearray Encodes the given value to a bytearray according to this ComplexType definition.
entailment
def decode(self, bytes, raw=False): """decode(bytearray, raw=False) -> value Decodes the given bytearray containing the elapsed time in seconds plus 1/256 subseconds since the GPS epoch returns the corresponding Python :class:`datetime`. If the optional parameter ``raw`` is ``True``, the number of seconds and subseconds will be returned as a floating-point number instead. """ coarse = Time32Type().decode(bytes[:4], raw) fine = Time8Type() .decode(bytes[4:]) if not raw: fine = datetime.timedelta(microseconds=fine * 1e6) return coarse + fine
decode(bytearray, raw=False) -> value Decodes the given bytearray containing the elapsed time in seconds plus 1/256 subseconds since the GPS epoch returns the corresponding Python :class:`datetime`. If the optional parameter ``raw`` is ``True``, the number of seconds and subseconds will be returned as a floating-point number instead.
entailment
def load(self, ymlfile=None): """Load and process the YAML file""" if ymlfile is not None: self.ymlfile = ymlfile try: # If yaml should be 'cleaned' of document references if self._clean: self.data = self.process(self.ymlfile) else: with open(self.ymlfile, 'rb') as stream: for data in yaml.load_all(stream): self.data.append(data) self.loaded = True except ScannerError, e: msg = "YAML formattting error - '" + self.ymlfile + ": '" + str(e) + "'" raise util.YAMLError(msg)
Load and process the YAML file
entailment
def process(self, ymlfile): """Cleans out all document tags from the YAML file to make it JSON-friendly to work with the JSON Schema. """ output = "" try: # Need a list of line numbers where the documents resides # Used for finding/displaying errors self.doclines = [] linenum = None with open(ymlfile, 'r') as txt: for linenum, line in enumerate(txt): # Pattern to match document start lines doc_pattern = re.compile('(---) (![a-z]+)(.*$)', flags=re.I) # Pattern to match sequence start lines seq_pattern = re.compile('(\s*)(-+) !([a-z]+)(.*$)', flags=re.I) # If we find a document, remove the tag if doc_pattern.match(line): line = doc_pattern.sub(r"---", line).lower() self.doclines.append(linenum) elif seq_pattern.match(line): # Replace the sequence start with key string line = seq_pattern.sub(r"\1\2 \3: line " + str(linenum), line).lower() output = output + line if linenum is None: msg = "Empty YAML file: " + ymlfile raise util.YAMLError(msg) else: # Append one more document to docline for the end self.doclines.append(linenum+1) return output except IOError, e: msg = "Could not process YAML file '" + ymlfile + "': '" + str(e) + "'" raise IOError(msg)
Cleans out all document tags from the YAML file to make it JSON-friendly to work with the JSON Schema.
entailment
def load(self, schemafile=None): """Load and process the schema file""" if schemafile is not None: self._schemafile = schemafile try: self.data = json.load(open(self._schemafile)) except (IOError, ValueError), e: msg = "Could not load schema file '" + self._schemafile + "': '" + str(e) + "'" raise jsonschema.SchemaError(msg) self.loaded = True
Load and process the schema file
entailment
def pretty(self, start, end, e, messages=None): """Pretties up the output error message so it is readable and designates where the error came from""" log.debug("Displaying document from lines '%i' to '%i'", start, end) errorlist = [] if len(e.context) > 0: errorlist = e.context else: errorlist.append(e) for error in errorlist: validator = error.validator if validator == "required": # Handle required fields msg = error.message messages.append("Between lines %d - %d. %s" % (start, end, msg)) elif validator == "additionalProperties": # Handle additional properties not allowed if len(error.message) > 256: msg = error.message[:253] + "..." else: msg = error.message messages.append("Between lines %d - %d. %s" % (start, end, msg)) elif len(error.relative_path) > 0: # Handle other cases where we can loop through the lines # get the JSON path to traverse through the file jsonpath = error.relative_path array_index = 0 current_start = start foundline = 0 found = False context = collections.deque(maxlen=20) tag = " <<<<<<<<< Expects: %s <<<<<<<<<\n""" for cnt, path in enumerate(error.relative_path): # Need to set the key we are looking, and then check the array count # if it is an array, we have some interesting checks to do if int(cnt) % 2 == 0: # we know we have some array account # array_index keeps track of the array count we are looking for or number # of matches we need to skip over before we get to the one we care about # check if previous array_index > 0. if so, then we know we need to use # that one to track down the specific instance of this nested key. # later on, we utilize this array_index loop through # if array_index == 0: array_index = jsonpath[cnt] match_count = 0 continue elif int(cnt) % 2 == 1: # we know we have some key name # current_key keeps track of the key we are looking for in the JSON Path current_key = jsonpath[cnt] for linenum in range(current_start, end): line = linecache.getline(self.ymlfile, linenum) # Check if line contains the error if ":" in line: l = line.split(':') key = l[0] value = ':'.join(l[1:]) # TODO: # Handle maxItems TBD # Handle minItems TBD # Handle in-order (bytes) TBD # Handle uniqueness TBD # Handle cases where key in yml file is hexadecimal try: key = int(key.strip(), 16) except ValueError: key = key.strip() if str(key) == current_key: # check if we are at our match_count and end of the path if match_count == array_index: # check if we are at end of the jsonpath if cnt == len(jsonpath)-1: # we are at the end of path so let's stop here' if error.validator == "type": if value.strip() == str(error.instance): errormsg = "Value '%s' should be of type '%s'" % (error.instance, str(error.validator_value)) line = line.replace("\n", (tag % errormsg)) foundline = linenum found = True elif value.strip() == "" and error.instance is None: errormsg = "Missing value for %s." % key line = line.replace("\n", (tag % errormsg)) foundline = linenum found = True elif not found: # print "EXTRA FOO" # print match_count # print array_index # print current_key # print line # otherwise change the start to the current line current_start = linenum break match_count += 1 # for the context queue, we want to get the error to appear in # the middle of the error output. to do so, we will only append # to the queue in 2 cases: # # 1. before we find the error (found == False). we can # just keep pushing on the queue until we find it in the YAML. # 2. once we find the error (found == True), we just want to push # onto the queue until the the line is in the middle if not found or (found and context.maxlen > (linenum-foundline)*2): context.append(line) elif found and context.maxlen <= (linenum-foundline)*2: break # Loop through the queue and generate a readable msg output out = "" for line in context: out += line if foundline: msg = "Error found on line %d in %s:\n\n%s" % (foundline, self.ymlfile, out) messages.append(msg) # reset the line it was found on and the context foundline = 0 context.clear() linecache.clearcache() else: messages.append(error.message)
Pretties up the output error message so it is readable and designates where the error came from
entailment
def schema_val(self, messages=None): "Perform validation with processed YAML and Schema" self._ymlproc = YAMLProcessor(self._ymlfile) self._schemaproc = SchemaProcessor(self._schemafile) valid = True log.debug("BEGIN: Schema-based validation for YAML '%s' with schema '%s'", self._ymlfile, self._schemafile) # Make sure the yml and schema have been loaded if self._ymlproc.loaded and self._schemaproc.loaded: # Load all of the yaml documents. Could be more than one in the same YAML file. for docnum, data in enumerate(yaml.load_all(self._ymlproc.data)): # Since YAML allows integer keys but JSON does not, we need to first # dump the data as a JSON string to encode all of the potential integers # as strings, and then read it back out into the YAML format. Kind of # a clunky workaround but it works as expected. data = yaml.load(json.dumps(data)) # Now we want to get a validator ready v = jsonschema.Draft4Validator(self._schemaproc.data) # Loop through the errors (if any) and set valid = False if any are found # Display the error message for error in sorted(v.iter_errors(data)): msg = "Schema-based validation failed for YAML file '" + self._ymlfile + "'" self.ehandler.process(docnum, self._ymlproc.doclines, error, messages) valid = False if not valid: log.error(msg) elif not self._ymlproc.loaded: raise util.YAMLError("YAML must be loaded in order to validate.") elif not self._schemaproc.loaded: raise jsonschema.SchemaError("Schema must be loaded in order to validate.") log.debug("END: Schema-based validation complete for '%s'", self._ymlfile) return valid
Perform validation with processed YAML and Schema
entailment
def content_val(self, ymldata=None, messages=None): """Validates the Command Dictionary to ensure the contents for each of the fields meets specific criteria regarding the expected types, byte ranges, etc.""" self._ymlproc = YAMLProcessor(self._ymlfile, False) # Turn off the YAML Processor log.debug("BEGIN: Content-based validation of Command dictionary") if ymldata is not None: cmddict = ymldata elif ymldata is None and self._ymlproc.loaded: cmddict = self._ymlproc.data elif not self._ymlproc.loaded: raise util.YAMLError("YAML failed to load.") try: # instantiate the document number. this will increment in order to # track the line numbers and section where validation fails docnum = 0 # boolean to hold argument validity argsvalid = True # list of rules to validate against rules = [] ### set the command rules # # set uniqueness rule for command names rules.append(UniquenessRule('name', "Duplicate command name: %s", messages)) # set uniqueness rule for opcodes rules.append(UniquenessRule('opcode', "Duplicate opcode: %s", messages)) # ### for cmdcnt, cmddefn in enumerate(cmddict[0]): # check the command rules for rule in rules: rule.check(cmddefn) # list of argument rules to validate against argrules = [] ### set rules for command arguments # # set uniqueness rule for opcodes argrules.append(UniquenessRule('name', "Duplicate argument name: " + cmddefn.name + ".%s", messages)) # set type rule for arg.type argrules.append(TypeRule('type', "Invalid argument type for argument: " + cmddefn.name + ".%s", messages)) # set argument size rule for arg.type.nbytes argrules.append(TypeSizeRule('nbytes', "Invalid argument size for argument: " + cmddefn.name + ".%s", messages)) # set argument enumerations rule to check no enumerations contain un-quoted YAML special variables argrules.append(EnumRule('enum', "Invalid enum value for argument: " + cmddefn.name + ".%s", messages)) # set byte order rule to ensure proper ordering of aruguments argrules.append(ByteOrderRule('bytes', "Invalid byte order for argument: " + cmddefn.name + ".%s", messages)) # ### argdefns = cmddefn.argdefns for arg in argdefns: # check argument rules for rule in argrules: rule.check(arg) # check if argument rule failed, if so set the validity to False if not all(r.valid is True for r in argrules): argsvalid = False log.debug("END: Content-based validation complete for '%s'", self._ymlfile) # check validity of all command rules and argument validity return all(rule.valid is True for rule in rules) and argsvalid except util.YAMLValidationError, e: # Display the error message if messages is not None: if len(e.message) < 128: msg = "Validation Failed for YAML file '" + self._ymlfile + "': '" + str(e.message) + "'" else: msg = "Validation Failed for YAML file '" + self._ymlfile + "'" log.error(msg) self.ehandler.process(docnum, self.ehandler.doclines, e, messages) return False
Validates the Command Dictionary to ensure the contents for each of the fields meets specific criteria regarding the expected types, byte ranges, etc.
entailment
def validate(self, ymldata=None, messages=None): """Validates the Telemetry Dictionary definitions""" schema_val = self.schema_val(messages) if len(messages) == 0: content_val = self.content_val(ymldata, messages) return schema_val and content_val
Validates the Telemetry Dictionary definitions
entailment
def content_val(self, ymldata=None, messages=None): """Validates the Telemetry Dictionary to ensure the contents for each of the fields meets specific criteria regarding the expected types, byte ranges, etc.""" # Turn off the YAML Processor log.debug("BEGIN: Content-based validation of Telemetry dictionary") if ymldata is not None: tlmdict = ymldata else: tlmdict = tlm.TlmDict(self._ymlfile) try: # instantiate the document number. this will increment in order to # track the line numbers and section where validation fails docnum = 0 # boolean to hold argument validity fldsvalid = True # list of rules to validate against rules = [] ### set the packet rules # # set uniqueness rule for packet names rules.append(UniquenessRule('name', "Duplicate packet name: %s", messages)) ### # Loop through the keys and check each PacketDefinition for key in tlmdict.keys(): pktdefn = tlmdict[key] # check the telemetry packet rules for rule in rules: rule.check(pktdefn) # list of field rules to validate against fldrules = [] ### set rules for telemetry fields # # set uniqueness rule for field name fldrules.append(UniquenessRule('name', "Duplicate field name: " + pktdefn.name + ".%s", messages)) # set type rule for field.type fldrules.append(TypeRule('type', "Invalid field type for field: " + pktdefn.name + ".%s", messages)) # set field size rule for field.type.nbytes fldrules.append(TypeSizeRule('nbytes', "Invalid field size for field: " + pktdefn.name + ".%s", messages)) # set field enumerations rule to check no enumerations contain un-quoted YAML special variables fldrules.append(EnumRule('enum', "Invalid enum value for field: " + pktdefn.name + ".%s", messages)) # ### flddefns = pktdefn.fields for fld in flddefns: # check field rules for rule in fldrules: rule.check(fld) # check if field rule failed, if so set the validity to False if not all(r.valid is True for r in fldrules): fldsvalid = False log.debug("END: Content-based validation complete for '%s'", self._ymlfile) # check validity of all packet rules and field validity return all(rule.valid is True for rule in rules) and fldsvalid except util.YAMLValidationError, e: # Display the error message if messages is not None: if len(e.message) < 128: msg = "Validation Failed for YAML file '" + self._ymlfile + "': '" + str(e.message) + "'" else: msg = "Validation Failed for YAML file '" + self._ymlfile + "'" log.error(msg) self.ehandler.process(self.ehandler.doclines, e, messages) return False
Validates the Telemetry Dictionary to ensure the contents for each of the fields meets specific criteria regarding the expected types, byte ranges, etc.
entailment
def check(self, defn): """Performs the uniqueness check against the value list maintained in this rule objects """ val = getattr(defn, self.attr) if val is not None and val in self.val_list: self.messages.append(self.msg % str(val)) # TODO self.messages.append("TBD location message") self.valid = False elif val is not None: self.val_list.append(val) log.debug(self.val_list)
Performs the uniqueness check against the value list maintained in this rule objects
entailment
def check(self, defn): """Performs isinstance check for the definitions data type. Assumes the defn has 'type' and 'name' attributes """ allowedTypes = dtype.PrimitiveType, dtype.ArrayType if not isinstance(defn.type, allowedTypes): self.messages.append(self.msg % str(defn.name)) # self.messages.append("TBD location message") self.valid = False
Performs isinstance check for the definitions data type. Assumes the defn has 'type' and 'name' attributes
entailment
def check(self, defn, msg=None): """Uses the byte range in the object definition to determine the number of bytes and compares to the size defined in the type. Assumes the defn has 'type' and 'name' attributes, and a slice() method """ if isinstance(defn.type, dtype.PrimitiveType): # Check the nbytes designated in the YAML match the PDT nbytes = defn.type.nbytes defnbytes = defn.slice().stop - defn.slice().start if nbytes != defnbytes: self.messages.append(self.msg % defn.name) self.messages.append("Definition size of (" + str(defnbytes) + " bytes) does not match size of data" + " type " +str(defn.type.name) + " (" + str(nbytes) + " byte(s))") # TODO self.messages.append("TBD location message") self.valid = False
Uses the byte range in the object definition to determine the number of bytes and compares to the size defined in the type. Assumes the defn has 'type' and 'name' attributes, and a slice() method
entailment
def check(self, defn, msg=None): """Uses the definitions slice() method to determine its start/stop range. """ # Check enum does not contain boolean keys if (defn.slice().start != self.prevstop): self.messages.append(self.msg % str(defn.name)) # TODO self.messages.append("TBD location message") self.valid = False self.prevstop = defn.slice().stop
Uses the definitions slice() method to determine its start/stop range.
entailment
def wait (cond, msg=None, _timeout=10, _raiseException=True): """Waits either a specified number of seconds, e.g.: .. code-block:: python wait(1.2) or for a given condition to be True. Conditions may be take several forms: Python string expression, lambda, or function, e.g.: .. code-block:: python wait('instrument_mode == "SAFE"') wait(lambda: instrument_mode == "SAFE") def isSafe(): return instrument_mode == "SAFE" wait(isSafe) The default ``_timeout`` is 10 seconds. If the condition is not satisfied before the timeout has elapsed, an :exception:``APITimeoutError`` exception is raised. The :exception:``APITimeoutError`` exception may be supressed in favor of returning ``True`` on success (i.e. condition satisfied) and ``False`` on failure (i.e. timeout exceeded) by setting the ``_raiseException`` parameter to ``False``. The :exception:``FalseWaitError`` will be thrown only if a boolean with value "False" is passed as an argument to wait. The purpose of this is to avoid infinite loops and catch conditional arguments are not passed in as strings and therefore evaluated before the wait function gets called. These parameters are prefixed with an underscore so they may also be used to control exception handling when sending commands. Since methods that generate commands take keyword arguments, we did not want these parameter names to conflict with command parameter names. """ status = False delay = 0.25 elapsed = 0 if msg is None and type(cond) is str: msg = cond if type(cond) is bool: if cond: log.warn('Boolean passed as argument to wait. Make sure argument to wait is surrounded by a lambda or " "') else: raise FalseWaitError(msg) if type(cond) in (int, float): gevent.sleep(cond) status = True else: while True: if _timeout is not None and elapsed >= _timeout: if _raiseException: raise APITimeoutError(_timeout, msg) else: status = False break if type(cond) is str: caller = inspect.stack()[1][0] status = eval(cond, caller.f_globals, caller.f_locals) elif callable(cond): status = cond() else: status = cond if status: break gevent.sleep(delay) elapsed += delay return status
Waits either a specified number of seconds, e.g.: .. code-block:: python wait(1.2) or for a given condition to be True. Conditions may be take several forms: Python string expression, lambda, or function, e.g.: .. code-block:: python wait('instrument_mode == "SAFE"') wait(lambda: instrument_mode == "SAFE") def isSafe(): return instrument_mode == "SAFE" wait(isSafe) The default ``_timeout`` is 10 seconds. If the condition is not satisfied before the timeout has elapsed, an :exception:``APITimeoutError`` exception is raised. The :exception:``APITimeoutError`` exception may be supressed in favor of returning ``True`` on success (i.e. condition satisfied) and ``False`` on failure (i.e. timeout exceeded) by setting the ``_raiseException`` parameter to ``False``. The :exception:``FalseWaitError`` will be thrown only if a boolean with value "False" is passed as an argument to wait. The purpose of this is to avoid infinite loops and catch conditional arguments are not passed in as strings and therefore evaluated before the wait function gets called. These parameters are prefixed with an underscore so they may also be used to control exception handling when sending commands. Since methods that generate commands take keyword arguments, we did not want these parameter names to conflict with command parameter names.
entailment
def send (self, command, *args, **kwargs): """Creates, validates, and sends the given command as a UDP packet to the destination (host, port) specified when this CmdAPI was created. Returns True if the command was created, valid, and sent, False otherwise. """ status = False cmdobj = self._cmddict.create(command, *args, **kwargs) messages = [] if not cmdobj.validate(messages): for msg in messages: log.error(msg) else: encoded = cmdobj.encode() if self._verbose: size = len(cmdobj.name) pad = (size - len(cmdobj.name) + 1) * ' ' gds.hexdump(encoded, preamble=cmdobj.name + ':' + pad) try: values = (self._host, self._port, str(cmdobj)) log.command('Sending to %s:%d: %s' % values) self._socket.sendto(encoded, (self._host, self._port)) status = True with pcap.open(self.CMD_HIST_FILE, 'a') as output: output.write(str(cmdobj)) except socket.error as e: log.error(e.message) except IOError as e: log.error(e.message) return status
Creates, validates, and sends the given command as a UDP packet to the destination (host, port) specified when this CmdAPI was created. Returns True if the command was created, valid, and sent, False otherwise.
entailment
def _pop(self, block=True, timeout=None, left=False): """Removes and returns the an item from this GeventDeque. This is an internal method, called by the public methods pop() and popleft(). """ item = None timer = None deque = self._deque empty = IndexError('pop from an empty deque') if block is False: if len(self._deque) > 0: item = deque.popleft() if left else deque.pop() else: raise empty else: try: if timeout is not None: timer = gevent.Timeout(timeout, empty) timer.start() while True: self.notEmpty.wait() if len(deque) > 0: item = deque.popleft() if left else deque.pop() break finally: if timer is not None: timer.cancel() if len(deque) == 0: self.notEmpty.clear() return item
Removes and returns the an item from this GeventDeque. This is an internal method, called by the public methods pop() and popleft().
entailment
def append(self, item): """Add item to the right side of the GeventDeque. This method does not block. Either the GeventDeque grows to consume available memory, or if this GeventDeque has and is at maxlen, the leftmost item is removed. """ self._deque.append(item) self.notEmpty.set()
Add item to the right side of the GeventDeque. This method does not block. Either the GeventDeque grows to consume available memory, or if this GeventDeque has and is at maxlen, the leftmost item is removed.
entailment
def appendleft(self, item): """Add item to the left side of the GeventDeque. This method does not block. Either the GeventDeque grows to consume available memory, or if this GeventDeque has and is at maxlen, the rightmost item is removed. """ self._deque.appendleft(item) self.notEmpty.set()
Add item to the left side of the GeventDeque. This method does not block. Either the GeventDeque grows to consume available memory, or if this GeventDeque has and is at maxlen, the rightmost item is removed.
entailment
def extend(self, iterable): """Extend the right side of this GeventDeque by appending elements from the iterable argument. """ self._deque.extend(iterable) if len(self._deque) > 0: self.notEmpty.set()
Extend the right side of this GeventDeque by appending elements from the iterable argument.
entailment
def extendleft(self, iterable): """Extend the left side of this GeventDeque by appending elements from the iterable argument. Note, the series of left appends results in reversing the order of elements in the iterable argument. """ self._deque.extendleft(iterable) if len(self._deque) > 0: self.notEmpty.set()
Extend the left side of this GeventDeque by appending elements from the iterable argument. Note, the series of left appends results in reversing the order of elements in the iterable argument.
entailment
def popleft(self, block=True, timeout=None): """Remove and return an item from the right side of the GeventDeque. If no elements are present, raises an IndexError. If optional args *block* is True and *timeout* is ``None`` (the default), block if necessary until an item is available. If *timeout* is a positive number, it blocks at most *timeout* seconds and raises the :class:`IndexError` exception if no item was available within that time. Otherwise (*block* is False), return an item if one is immediately available, else raise the :class:`IndexError` exception (*timeout* is ignored in that case). """ return self._pop(block, timeout, left=True)
Remove and return an item from the right side of the GeventDeque. If no elements are present, raises an IndexError. If optional args *block* is True and *timeout* is ``None`` (the default), block if necessary until an item is available. If *timeout* is a positive number, it blocks at most *timeout* seconds and raises the :class:`IndexError` exception if no item was available within that time. Otherwise (*block* is False), return an item if one is immediately available, else raise the :class:`IndexError` exception (*timeout* is ignored in that case).
entailment
def start (self): """Starts this UdpTelemetryServer.""" values = self._defn.name, self.server_host, self.server_port log.info('Listening for %s telemetry on %s:%d (UDP)' % values) super(UdpTelemetryServer, self).start()
Starts this UdpTelemetryServer.
entailment
def confirm(self, msg, _timeout=-1): ''' Send a confirm prompt to the GUI Arguments: msg (string): The message to display to the user. _timeout (int): The optional amount of time for which the prompt should be displayed to the user before a timeout occurs. Defaults to -1 which indicates there is no timeout limit. ''' return self.msgBox('confirm', _timeout=_timeout, msg=msg)
Send a confirm prompt to the GUI Arguments: msg (string): The message to display to the user. _timeout (int): The optional amount of time for which the prompt should be displayed to the user before a timeout occurs. Defaults to -1 which indicates there is no timeout limit.
entailment
def msgBox(self, promptType, _timeout=-1, **options): ''' Send a user prompt request to the GUI Arguments: promptType (string): The prompt type to send to the GUI. Currently the only type supported is 'confirm'. _timeout (int): The optional amount of time for which the prompt should be displayed to the user before a timeout occurs. Defaults to -1 which indicates there is no timeout limit. options (dict): The keyword arguments that should be passed to the requested prompt type. Check prompt specific sections below for information on what arguments are expected to be present. Raises: ValueError: If the prompt type received is an unexpected value **Confirm Prompt** Display a message to the user and prompt them for a confirm/deny response to the message. Arguments: msg (string): The message to display to the user Returns: True if the user picks 'Confirm', False if the user picks 'Deny' Raises: KeyError: If the options passed to the prompt handler doesn't contain a `msg` attribute. APITimeoutError: If the timeout value is reached without receiving a response. ''' if promptType == 'confirm': return self._sendConfirmPrompt(_timeout, options) else: raise ValueError('Unknown prompt type: {}'.format(promptType))
Send a user prompt request to the GUI Arguments: promptType (string): The prompt type to send to the GUI. Currently the only type supported is 'confirm'. _timeout (int): The optional amount of time for which the prompt should be displayed to the user before a timeout occurs. Defaults to -1 which indicates there is no timeout limit. options (dict): The keyword arguments that should be passed to the requested prompt type. Check prompt specific sections below for information on what arguments are expected to be present. Raises: ValueError: If the prompt type received is an unexpected value **Confirm Prompt** Display a message to the user and prompt them for a confirm/deny response to the message. Arguments: msg (string): The message to display to the user Returns: True if the user picks 'Confirm', False if the user picks 'Deny' Raises: KeyError: If the options passed to the prompt handler doesn't contain a `msg` attribute. APITimeoutError: If the timeout value is reached without receiving a response.
entailment
def _subscribe_all(self): """ Subscribes all streams to their input. Subscribes all plugins to all their inputs. Subscribes all plugin outputs to the plugin. """ for stream in (self.inbound_streams + self.outbound_streams): for input_ in stream.inputs: if not type(input_) is int and input_ is not None: self._subscribe(stream, input_) for plugin in self.plugins: for input_ in plugin.inputs: self._subscribe(plugin, input_) for output in plugin.outputs: # Find output stream instance subscriber = next((x for x in self.outbound_streams if x.name == output), None) if subscriber is None: log.warn('The outbound stream {} does not ' 'exist so will not receive messages ' 'from {}'.format(output, plugin)) else: self._subscribe(subscriber, plugin.name)
Subscribes all streams to their input. Subscribes all plugins to all their inputs. Subscribes all plugin outputs to the plugin.
entailment
def addLocalHandlers (logger): """Adds logging handlers to logger to log to the following local resources: 1. The terminal 2. localhost:514 (i.e. syslogd) 3. localhost:2514 (i.e. the AIT GUI syslog-like handler) """ termlog = logging.StreamHandler() termlog.setFormatter( LogFormatter() ) logger.addHandler( termlog ) logger.addHandler( SysLogHandler() ) logger.addHandler( SysLogHandler(('localhost', 2514)) )
Adds logging handlers to logger to log to the following local resources: 1. The terminal 2. localhost:514 (i.e. syslogd) 3. localhost:2514 (i.e. the AIT GUI syslog-like handler)
entailment
def addRemoteHandlers (logger): """Adds logging handlers to logger to remotely log to: ait.config.logging.hostname:514 (i.e. syslogd) If not set or hostname cannot be resolved, this method has no effect. """ try: hostname = ait.config.logging.hostname # Do not "remote" log to this host, as that's already covered # by addLocalHandlers(). if socket.getfqdn() != hostname: socket.getaddrinfo(hostname, None) logger.addHandler( SysLogHandler( (hostname, 514) ) ) except AttributeError: pass # No ait.config.logging.hostname except socket.gaierror: pass
Adds logging handlers to logger to remotely log to: ait.config.logging.hostname:514 (i.e. syslogd) If not set or hostname cannot be resolved, this method has no effect.
entailment
def parseSyslog(msg): """Parses Syslog messages (RFC 5424) The `Syslog Message Format (RFC 5424) <https://tools.ietf.org/html/rfc5424#section-6>`_ can be parsed with simple whitespace tokenization:: SYSLOG-MSG = HEADER SP STRUCTURED-DATA [SP MSG] HEADER = PRI VERSION SP TIMESTAMP SP HOSTNAME SP APP-NAME SP PROCID SP MSGID ... NILVALUE = "-" This method does not return STRUCTURED-DATA. It parses NILVALUE ("-") STRUCTURED-DATA or simple STRUCTURED-DATA which does not contain (escaped) ']'. :returns: A dictionary keyed by the constituent parts of the Syslog message. """ tokens = msg.split(' ', 6) result = { } if len(tokens) > 0: pri = tokens[0] start = pri.find('<') stop = pri.find('>') if start != -1 and stop != -1: result['pri'] = pri[start + 1:stop] else: result['pri'] = '' if stop != -1 and len(pri) > stop: result['version'] = pri[stop + 1:] else: result['version'] = '' result[ 'timestamp' ] = tokens[1] if len(tokens) > 1 else '' result[ 'hostname' ] = tokens[2] if len(tokens) > 2 else '' result[ 'appname' ] = tokens[3] if len(tokens) > 3 else '' result[ 'procid' ] = tokens[4] if len(tokens) > 4 else '' result[ 'msgid' ] = tokens[5] if len(tokens) > 5 else '' result[ 'msg' ] = '' if len(tokens) > 6: # The following will work for NILVALUE STRUCTURED-DATA or # simple STRUCTURED-DATA which does not contain ']'. rest = tokens[6] start = rest.find('-') if start == -1: start = rest.find(']') if len(rest) > start: result['msg'] = rest[start + 1:].strip() return result
Parses Syslog messages (RFC 5424) The `Syslog Message Format (RFC 5424) <https://tools.ietf.org/html/rfc5424#section-6>`_ can be parsed with simple whitespace tokenization:: SYSLOG-MSG = HEADER SP STRUCTURED-DATA [SP MSG] HEADER = PRI VERSION SP TIMESTAMP SP HOSTNAME SP APP-NAME SP PROCID SP MSGID ... NILVALUE = "-" This method does not return STRUCTURED-DATA. It parses NILVALUE ("-") STRUCTURED-DATA or simple STRUCTURED-DATA which does not contain (escaped) ']'. :returns: A dictionary keyed by the constituent parts of the Syslog message.
entailment
def formatTime (self, record, datefmt=None): """Return the creation time of the specified LogRecord as formatted text.""" if datefmt is None: datefmt = '%Y-%m-%d %H:%M:%S' ct = self.converter(record.created) t = time.strftime(datefmt, ct) s = '%s.%03d' % (t, record.msecs) return s
Return the creation time of the specified LogRecord as formatted text.
entailment
def format (self, record): """Returns the given LogRecord as formatted text.""" record.hostname = self.hostname return logging.Formatter.format(self, record)
Returns the given LogRecord as formatted text.
entailment
def formatTime (self, record, datefmt=None): """Returns the creation time of the given LogRecord as formatted text. NOTE: The datefmt parameter and self.converter (the time conversion method) are ignored. BSD Syslog Protocol messages always use local time, and by our convention, Syslog Protocol messages use UTC. """ if self.bsd: lt_ts = datetime.datetime.fromtimestamp(record.created) ts = lt_ts.strftime(self.BSD_DATEFMT) if ts[4] == '0': ts = ts[0:4] + ' ' + ts[5:] else: utc_ts = datetime.datetime.utcfromtimestamp(record.created) ts = utc_ts.strftime(self.SYS_DATEFMT) return ts
Returns the creation time of the given LogRecord as formatted text. NOTE: The datefmt parameter and self.converter (the time conversion method) are ignored. BSD Syslog Protocol messages always use local time, and by our convention, Syslog Protocol messages use UTC.
entailment
def open (filename, mode='r', **options): """Returns an instance of a :class:`PCapStream` class which contains the ``read()``, ``write()``, and ``close()`` methods. Binary mode is assumed for this module, so the "b" is not required when calling ``open()``. If the optiontal ``rollover`` parameter is True, a :class:`PCapRolloverStream` is created instead. In that case ``filename`` is treated as a ``strftime(3)`` format string and ``nbytes``, ``npackets``, ``nseconds``, and ``dryrun`` parameters may also be specified. See :class:``PCapRolloverStream`` for more information. NOTE: :class:`PCapRolloverStream` is always opened in write mode ("wb") and supports only ``write()`` and ``close()``, not ``read()``. """ mode = mode.replace('b', '') + 'b' if options.get('rollover', False): stream = PCapRolloverStream(filename, options.get('nbytes' , None), options.get('npackets', None), options.get('nseconds', None), options.get('dryrun' , False)) else: stream = PCapStream( __builtin__.open(filename, mode), mode ) return stream
Returns an instance of a :class:`PCapStream` class which contains the ``read()``, ``write()``, and ``close()`` methods. Binary mode is assumed for this module, so the "b" is not required when calling ``open()``. If the optiontal ``rollover`` parameter is True, a :class:`PCapRolloverStream` is created instead. In that case ``filename`` is treated as a ``strftime(3)`` format string and ``nbytes``, ``npackets``, ``nseconds``, and ``dryrun`` parameters may also be specified. See :class:``PCapRolloverStream`` for more information. NOTE: :class:`PCapRolloverStream` is always opened in write mode ("wb") and supports only ``write()`` and ``close()``, not ``read()``.
entailment
def query(starttime, endtime, output=None, *filenames): '''Given a time range and input file, query creates a new file with only that subset of data. If no outfile name is given, the new file name is the old file name with the time range appended. Args: starttime: The datetime of the beginning time range to be extracted from the files. endtime: The datetime of the end of the time range to be extracted from the files. output: Optional: The output file name. Defaults to [first filename in filenames][starttime]-[endtime].pcap filenames: A tuple of one or more file names to extract data from. ''' if not output: output = (filenames[0].replace('.pcap','') + starttime.isoformat() + '-' + endtime.isoformat() + '.pcap') else: output = output with open(output,'w') as outfile: for filename in filenames: log.info("pcap.query: processing %s..." % filename) with open(filename, 'r') as stream: for header, packet in stream: if packet is not None: if header.timestamp >= starttime and header.timestamp <= endtime: outfile.write(packet, header=header)
Given a time range and input file, query creates a new file with only that subset of data. If no outfile name is given, the new file name is the old file name with the time range appended. Args: starttime: The datetime of the beginning time range to be extracted from the files. endtime: The datetime of the end of the time range to be extracted from the files. output: Optional: The output file name. Defaults to [first filename in filenames][starttime]-[endtime].pcap filenames: A tuple of one or more file names to extract data from.
entailment
def segment(filenames, format, **options): """Segment the given pcap file(s) by one or more thresholds (``nbytes``, ``npackets``, ``nseconds``). New segment filenames are determined based on the ``strftime(3)`` ``format`` string and the timestamp of the first packet in the file. :param filenames: Single filename (string) or list of filenames :param format: Output filename in ``strftime(3)`` format :param nbytes: Rollover after writing N bytes :param npackets: Rollover after writing N packets :param nseconds: Rollover after N seconds have elapsed between the first and last packet timestamp in the file. :param dryrun: Simulate file writes and output log messages. """ output = open(format, rollover=True, **options) if isinstance(filenames, str): filenames = [ filenames ] for filename in filenames: with open(filename, 'r') as stream: for header, packet in stream: output.write(packet, header) output.close()
Segment the given pcap file(s) by one or more thresholds (``nbytes``, ``npackets``, ``nseconds``). New segment filenames are determined based on the ``strftime(3)`` ``format`` string and the timestamp of the first packet in the file. :param filenames: Single filename (string) or list of filenames :param format: Output filename in ``strftime(3)`` format :param nbytes: Rollover after writing N bytes :param npackets: Rollover after writing N packets :param nseconds: Rollover after N seconds have elapsed between the first and last packet timestamp in the file. :param dryrun: Simulate file writes and output log messages.
entailment
def times(filenames, tolerance=2): """For the given file(s), return the time ranges available. Tolerance sets the number of seconds between time ranges. Any gaps larger than tolerance seconds will result in a new time range. :param filenames: Single filename (string) or list of filenames :param tolerance: Maximum seconds between contiguous time ranges :returns: A dictionary keyed by filename, with each value a list of (start, stop) time ranges for that file. """ times = { } delta = datetime.timedelta(seconds=tolerance) if isinstance(filenames, str): filenames = [ filenames ] for filename in filenames: with open(filename, 'r') as stream: times[filename] = list() header, packet = stream.read() start , stop = header.timestamp, header.timestamp for header, packet in stream: if header.timestamp - stop > delta: times[filename].append((start, stop)) start = header.timestamp stop = header.timestamp times[filename].append((start, stop)) return times
For the given file(s), return the time ranges available. Tolerance sets the number of seconds between time ranges. Any gaps larger than tolerance seconds will result in a new time range. :param filenames: Single filename (string) or list of filenames :param tolerance: Maximum seconds between contiguous time ranges :returns: A dictionary keyed by filename, with each value a list of (start, stop) time ranges for that file.
entailment
def read (self, stream): """Reads PCapGlobalHeader data from the given stream.""" self._data = stream.read(self._size) if len(self._data) >= self._size: values = struct.unpack(self._format, self._data) else: values = None, None, None, None, None, None, None if values[0] == 0xA1B2C3D4 or values[0] == 0xA1B23C4D: self._swap = '@' elif values[0] == 0xD4C3B2A1 or values[0] == 0x4D3CB2A1: self._swap = EndianSwap if values[0] is not None: values = struct.unpack(self._swap + self._format, self._data) self.magic_number = values[0] self.version_major = values[1] self.version_minor = values[2] self.thiszone = values[3] self.sigfigs = values[4] self.snaplen = values[5] self.network = values[6]
Reads PCapGlobalHeader data from the given stream.
entailment
def read (self, stream): """Reads PCapPacketHeader data from the given stream.""" self._data = stream.read(self._size) if len(self._data) >= self._size: values = struct.unpack(self._swap + self._format, self._data) else: values = None, None, None, None self.ts_sec = values[0] self.ts_usec = values[1] self.incl_len = values[2] self.orig_len = values[3]
Reads PCapPacketHeader data from the given stream.
entailment
def rollover (self): """Indicates whether or not its time to rollover to a new file.""" rollover = False if not rollover and self._threshold.nbytes is not None: rollover = self._total.nbytes >= self._threshold.nbytes if not rollover and self._threshold.npackets is not None: rollover = self._total.npackets >= self._threshold.npackets if not rollover and self._threshold.nseconds is not None: nseconds = math.ceil(self._total.nseconds) rollover = nseconds >= self._threshold.nseconds return rollover
Indicates whether or not its time to rollover to a new file.
entailment
def write (self, bytes, header=None): """Writes packet ``bytes`` and the optional pcap packet ``header``. If the pcap packet ``header`` is not specified, one will be generated based on the number of packet ``bytes`` and current time. """ if header is None: header = PCapPacketHeader(orig_len=len(bytes)) if self._stream is None: if self._threshold.nseconds is not None: # Round down to the nearest multiple of nseconds nseconds = self._threshold.nseconds remainder = int( math.floor( header.ts % nseconds ) ) delta = datetime.timedelta(seconds=remainder) timestamp = header.timestamp - delta else: timestamp = header.timestamp self._filename = timestamp.strftime(self._format) self._startTime = calendar.timegm( timestamp.replace(microsecond=0).timetuple() ) if self._dryrun: self._stream = True self._total.nbytes += len(PCapGlobalHeader()) else: self._stream = open(self._filename, 'w') self._total.nbytes += len(self._stream.header) if not self._dryrun: self._stream.write(bytes, header) self._total.nbytes += len(bytes) + len(header) self._total.npackets += 1 self._total.nseconds = header.ts - self._startTime if self.rollover: self.close() return header.incl_len
Writes packet ``bytes`` and the optional pcap packet ``header``. If the pcap packet ``header`` is not specified, one will be generated based on the number of packet ``bytes`` and current time.
entailment
def close (self): """Closes this :class:``PCapStream`` by closing the underlying Python stream.""" if self._stream: values = ( self._total.nbytes, self._total.npackets, int( math.ceil(self._total.nseconds) ), self._filename ) if self._dryrun: msg = 'Would write %d bytes, %d packets, %d seconds to %s.' else: msg = 'Wrote %d bytes, %d packets, %d seconds to %s.' self._stream.close() log.info(msg % values) self._filename = None self._startTime = None self._stream = None self._total = PCapFileStats(0, 0, 0)
Closes this :class:``PCapStream`` by closing the underlying Python stream.
entailment
def next (self): """Returns the next header and packet from this PCapStream. See read(). """ header, packet = self.read() if packet is None: raise StopIteration return header, packet
Returns the next header and packet from this PCapStream. See read().
entailment
def read (self): """Reads a single packet from the this pcap stream, returning a tuple (PCapPacketHeader, packet) """ header = PCapPacketHeader(self._stream, self.header._swap) packet = None if not header.incomplete(): packet = self._stream.read(header.incl_len) return (header, packet)
Reads a single packet from the this pcap stream, returning a tuple (PCapPacketHeader, packet)
entailment
def write (self, bytes, header=None): """write() is meant to work like the normal file write(). It takes two arguments, a byte array to write to the file as a single PCAP packet, and an optional header if one already exists. The length of the byte array should be less than 65535 bytes. write() returns the number of bytes actually written to the file. """ if type(bytes) is str: bytes = bytearray(bytes) if not isinstance(header, PCapPacketHeader): header = PCapPacketHeader(orig_len=len(bytes)) packet = bytes[0:header.incl_len] self._stream.write( str(header) ) self._stream.write( packet ) self._stream.flush() return header.incl_len
write() is meant to work like the normal file write(). It takes two arguments, a byte array to write to the file as a single PCAP packet, and an optional header if one already exists. The length of the byte array should be less than 65535 bytes. write() returns the number of bytes actually written to the file.
entailment
def hash_file(filename): """"This function returns the SHA-1 hash of the file passed into it""" # make a hash object h = hashlib.sha1() # open file for reading in binary mode with open(filename,'rb') as file: # loop till the end of the file chunk = 0 while chunk != b'': # read only 1024 bytes at a time chunk = file.read(1024) h.update(chunk) # return the hex representation of digest return h.hexdigest()
This function returns the SHA-1 hash of the file passed into it
entailment
def add (self, defn): """Adds the given Command Definition to this Command Dictionary.""" self[defn.name] = defn self.colnames[defn.name] = defn
Adds the given Command Definition to this Command Dictionary.
entailment