sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def dirty(self):
"""True if the cache needs to be updated, False otherwise"""
return not os.path.exists(self.cachename) or \
(os.path.getmtime(self.filename) >
os.path.getmtime(self.cachename)) | True if the cache needs to be updated, False otherwise | entailment |
def cache(self):
"""Caches the result of loader(filename) to cachename."""
msg = 'Saving updates from more recent "%s" to "%s"'
log.info(msg, self.filename, self.cachename)
with open(self.cachename, 'wb') as output:
cPickle.dump(self._dict, output, -1) | Caches the result of loader(filename) to cachename. | entailment |
def load(self):
"""Loads the Python object
Loads the Python object, either via loader(filename) or the
pickled cache file, whichever was modified most recently.
"""
if self._dict is None:
if self.dirty:
self._dict = self._loader(self.filename)
self.cache()
else:
with open(self.cachename, 'rb') as stream:
self._dict = cPickle.load(stream)
return self._dict | Loads the Python object
Loads the Python object, either via loader(filename) or the
pickled cache file, whichever was modified most recently. | entailment |
def process(self, input_data, topic=None, **kwargs):
"""
Splits tuple received from PacketHandler into packet UID and packet message.
Decodes packet and inserts into database backend.
Logs any exceptions raised.
Params:
input_data: message received from inbound stream through PacketHandler
topic: name of inbound stream message received from
**kwargs: any args required for connected to the backend
"""
try:
split = input_data[1:-1].split(',', 1)
uid, pkt = int(split[0]), split[1]
defn = self.packet_dict[uid]
decoded = tlm.Packet(defn, data=bytearray(pkt))
self.dbconn.insert(decoded, **kwargs)
except Exception as e:
log.error('Data archival failed with error: {}.'.format(e)) | Splits tuple received from PacketHandler into packet UID and packet message.
Decodes packet and inserts into database backend.
Logs any exceptions raised.
Params:
input_data: message received from inbound stream through PacketHandler
topic: name of inbound stream message received from
**kwargs: any args required for connected to the backend | entailment |
def getTimestampUTC():
"""getTimestampUTC() -> (ts_sec, ts_usec)
Returns the current UTC time in seconds and microseconds.
"""
utc = datetime.datetime.utcnow()
ts_sec = calendar.timegm( utc.timetuple() )
ts_usec = utc.microsecond
return ts_sec, ts_usec | getTimestampUTC() -> (ts_sec, ts_usec)
Returns the current UTC time in seconds and microseconds. | entailment |
def getUTCDatetimeDOY(days=0, hours=0, minutes=0, seconds=0):
"""getUTCDatetimeDOY -> datetime
Returns the UTC current datetime with the input timedelta arguments (days, hours, minutes, seconds)
added to current date. Returns ISO-8601 datetime format for day of year:
YYYY-DDDTHH:mm:ssZ
"""
return (datetime.datetime.utcnow() +
datetime.timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds)).strftime(DOY_Format) | getUTCDatetimeDOY -> datetime
Returns the UTC current datetime with the input timedelta arguments (days, hours, minutes, seconds)
added to current date. Returns ISO-8601 datetime format for day of year:
YYYY-DDDTHH:mm:ssZ | entailment |
def toc():
"""toc() -> float | None
Returns the total elapsed seconds since the most recent tic(), or
None if tic() was not called.
Examples:
>>> import time
>>> tic()
>>> time.sleep(1.2)
>>> elapsed = toc()
>>> assert abs(elapsed - 1.2) <= 1e-2
.. note:: The tic() and toc() functions are simplistic and may introduce
significant overhead, especially in tight loops. Their use should
be limited to one-off experiments and rough numbers. The Python
profile package (i.e. 'import profile') should be used for serious
and detailed profiling.
"""
end = datetime.datetime.now()
return totalSeconds( end - TICs.pop() ) if len(TICs) else None | toc() -> float | None
Returns the total elapsed seconds since the most recent tic(), or
None if tic() was not called.
Examples:
>>> import time
>>> tic()
>>> time.sleep(1.2)
>>> elapsed = toc()
>>> assert abs(elapsed - 1.2) <= 1e-2
.. note:: The tic() and toc() functions are simplistic and may introduce
significant overhead, especially in tight loops. Their use should
be limited to one-off experiments and rough numbers. The Python
profile package (i.e. 'import profile') should be used for serious
and detailed profiling. | entailment |
def toGPSWeekAndSecs(timestamp=None):
"""Converts the given UTC timestamp (defaults to the current time) to
a two-tuple, (GPS week number, GPS seconds within the week).
"""
if timestamp is None:
timestamp = datetime.datetime.utcnow()
leap = LeapSeconds.get_GPS_offset_for_date(timestamp)
secsInWeek = 604800
delta = totalSeconds(timestamp - GPS_Epoch) + leap
seconds = delta % secsInWeek
week = int( math.floor(delta / secsInWeek) )
return (week, seconds) | Converts the given UTC timestamp (defaults to the current time) to
a two-tuple, (GPS week number, GPS seconds within the week). | entailment |
def toGMST(dt=None):
"""Converts the given Python datetime or Julian date (float) to
Greenwich Mean Sidereal Time (GMST) (in radians) using the formula
from D.A. Vallado (2004).
See:
D.A. Vallado, Fundamentals of Astrodynamics and Applications, p. 192
http://books.google.com/books?id=PJLlWzMBKjkC&lpg=PA956&vq=192&pg=PA192
"""
if dt is None or type(dt) is datetime.datetime:
jd = toJulian(dt)
else:
jd = dt
tUT1 = (jd - 2451545.0) / 36525.0
gmst = 67310.54841 + (876600 * 3600 + 8640184.812866) * tUT1
gmst += 0.093104 * tUT1**2
gmst -= 6.2e-6 * tUT1**3
# Convert from seconds to degrees, i.e.
# 86400 seconds / 360 degrees = 240 seconds / degree
gmst /= 240.
# Convert to radians
gmst = math.radians(gmst) % TwoPi
if gmst < 0:
gmst += TwoPi
return gmst | Converts the given Python datetime or Julian date (float) to
Greenwich Mean Sidereal Time (GMST) (in radians) using the formula
from D.A. Vallado (2004).
See:
D.A. Vallado, Fundamentals of Astrodynamics and Applications, p. 192
http://books.google.com/books?id=PJLlWzMBKjkC&lpg=PA956&vq=192&pg=PA192 | entailment |
def toJulian(dt=None):
"""Converts a Python datetime to a Julian date, using the formula from
Meesus (1991). This formula is reproduced in D.A. Vallado (2004).
See:
D.A. Vallado, Fundamentals of Astrodynamics and Applications, p. 187
http://books.google.com/books?id=PJLlWzMBKjkC&lpg=PA956&vq=187&pg=PA187
"""
if dt is None:
dt = datetime.datetime.utcnow()
if dt.month < 3:
year = dt.year - 1
month = dt.month + 12
else:
year = dt.year
month = dt.month
A = int(year / 100.0)
B = 2 - A + int(A / 4.0)
C = ( (dt.second / 60.0 + dt.minute) / 60.0 + dt.hour ) / 24.0
jd = int(365.25 * (year + 4716))
jd += int(30.6001 * (month + 1)) + dt.day + B - 1524.5 + C
return jd | Converts a Python datetime to a Julian date, using the formula from
Meesus (1991). This formula is reproduced in D.A. Vallado (2004).
See:
D.A. Vallado, Fundamentals of Astrodynamics and Applications, p. 187
http://books.google.com/books?id=PJLlWzMBKjkC&lpg=PA956&vq=187&pg=PA187 | entailment |
def toLocalTime(seconds, microseconds=0):
"""toLocalTime(seconds, microseconds=0) -> datetime
Converts the given number of seconds since the GPS Epoch (midnight
on January 6th, 1980) to this computer's local time. Returns a
Python datetime object.
Examples:
>>> toLocalTime(0)
datetime.datetime(1980, 1, 6, 0, 0)
>>> toLocalTime(25 * 86400)
datetime.datetime(1980, 1, 31, 0, 0)
"""
delta = datetime.timedelta(seconds=seconds, microseconds=microseconds)
return GPS_Epoch + delta | toLocalTime(seconds, microseconds=0) -> datetime
Converts the given number of seconds since the GPS Epoch (midnight
on January 6th, 1980) to this computer's local time. Returns a
Python datetime object.
Examples:
>>> toLocalTime(0)
datetime.datetime(1980, 1, 6, 0, 0)
>>> toLocalTime(25 * 86400)
datetime.datetime(1980, 1, 31, 0, 0) | entailment |
def totalSeconds(td):
"""totalSeconds(td) -> float
Return the total number of seconds contained in the given Python
datetime.timedelta object. Python 2.6 and earlier do not have
timedelta.total_seconds().
Examples:
>>> totalSeconds( toLocalTime(86400.123) - toLocalTime(0.003) )
86400.12
"""
if hasattr(td, "total_seconds"):
ts = td.total_seconds()
else:
ts = (td.microseconds + (td.seconds + td.days * 24 * 3600.0) * 1e6) / 1e6
return ts | totalSeconds(td) -> float
Return the total number of seconds contained in the given Python
datetime.timedelta object. Python 2.6 and earlier do not have
timedelta.total_seconds().
Examples:
>>> totalSeconds( toLocalTime(86400.123) - toLocalTime(0.003) )
86400.12 | entailment |
def _update_leap_second_data(self):
""" Updates the systems leap second information
Pulls the latest leap second information from
https://www.ietf.org/timezones/data/leap-seconds.list
and updates the leapsecond config file.
Raises:
ValueError: If the connection to IETF does not return 200
IOError: If the path to the leap seconds file is not valid
"""
log.info('Attempting to acquire latest leapsecond data')
ls_file = ait.config.get(
'leapseconds.filename',
os.path.join(ait.config._directory, _DEFAULT_FILE_NAME)
)
url = 'https://www.ietf.org/timezones/data/leap-seconds.list'
r = requests.get(url)
if r.status_code != 200:
msg = 'Unable to locate latest timezone data. Connection to IETF failed'
log.error(msg)
raise ValueError(msg)
text = r.text.split('\n')
lines = [l for l in text if l.startswith('#@') or not l.startswith('#')]
data = {'valid': None, 'leapseconds': []}
data['valid'] = datetime.datetime(1900, 1, 1) + datetime.timedelta(seconds=int(lines[0].split('\t')[1]))
leap = 1
for l in lines[1:-1]:
t = datetime.datetime(1900, 1, 1) + datetime.timedelta(seconds=int(l.split('\t')[0]))
if t < GPS_Epoch:
continue
data['leapseconds'].append((t, leap))
leap += 1
self._data = data
with open(ls_file, 'w') as outfile:
pickle.dump(data, outfile) | Updates the systems leap second information
Pulls the latest leap second information from
https://www.ietf.org/timezones/data/leap-seconds.list
and updates the leapsecond config file.
Raises:
ValueError: If the connection to IETF does not return 200
IOError: If the path to the leap seconds file is not valid | entailment |
def wait(self):
"""
Starts all greenlets for concurrent processing.
Joins over all greenlets that are not servers.
"""
for greenlet in (self.greenlets + self.servers):
log.info("Starting {} greenlet...".format(greenlet))
greenlet.start()
gevent.joinall(self.greenlets) | Starts all greenlets for concurrent processing.
Joins over all greenlets that are not servers. | entailment |
def _load_streams(self):
"""
Reads, parses and creates streams specified in config.yaml.
"""
common_err_msg = 'No valid {} stream configurations found. '
specific_err_msg = {'inbound': 'No data will be received (or displayed).',
'outbound': 'No data will be published.'}
err_msgs = {}
for stream_type in ['inbound', 'outbound']:
err_msgs[stream_type] = common_err_msg.format(stream_type) + specific_err_msg[stream_type]
streams = ait.config.get('server.{}-streams'.format(stream_type))
if streams is None:
log.warn(err_msgs[stream_type])
else:
for index, s in enumerate(streams):
try:
if stream_type == 'inbound':
strm = self._create_inbound_stream(s['stream'])
if type(strm) == PortInputStream:
self.servers.append(strm)
else:
self.inbound_streams.append(strm)
elif stream_type == 'outbound':
strm = self._create_outbound_stream(s['stream'])
self.outbound_streams.append(strm)
log.info('Added {} stream {}'.format(stream_type, strm))
except Exception:
exc_type, value, tb = sys.exc_info()
log.error('{} creating {} stream {}: {}'.format(exc_type,
stream_type,
index,
value))
if not self.inbound_streams and not self.servers:
log.warn(err_msgs['inbound'])
if not self.outbound_streams:
log.warn(err_msgs['outbound']) | Reads, parses and creates streams specified in config.yaml. | entailment |
def _create_inbound_stream(self, config=None):
"""
Creates an inbound stream from its config.
Params:
config: stream configuration as read by ait.config
Returns:
stream: a Stream
Raises:
ValueError: if any of the required config values are missing
"""
if config is None:
raise ValueError('No stream config to create stream from.')
name = self._get_stream_name(config)
stream_handlers = self._get_stream_handlers(config, name)
stream_input = config.get('input', None)
if stream_input is None:
raise(cfg.AitConfigMissing('inbound stream {}\'s input'.format(name)))
if type(stream_input[0]) is int:
return PortInputStream(name,
stream_input,
stream_handlers,
zmq_args={'zmq_context': self.broker.context,
'zmq_proxy_xsub_url': self.broker.XSUB_URL,
'zmq_proxy_xpub_url': self.broker.XPUB_URL})
else:
return ZMQStream(name,
stream_input,
stream_handlers,
zmq_args={'zmq_context': self.broker.context,
'zmq_proxy_xsub_url': self.broker.XSUB_URL,
'zmq_proxy_xpub_url': self.broker.XPUB_URL}) | Creates an inbound stream from its config.
Params:
config: stream configuration as read by ait.config
Returns:
stream: a Stream
Raises:
ValueError: if any of the required config values are missing | entailment |
def _create_outbound_stream(self, config=None):
"""
Creates an outbound stream from its config.
Params:
config: stream configuration as read by ait.config
Returns:
stream: a Stream
Raises:
ValueError: if any of the required config values are missing
"""
if config is None:
raise ValueError('No stream config to create stream from.')
name = self._get_stream_name(config)
stream_handlers = self._get_stream_handlers(config, name)
stream_input = config.get('input', None)
stream_output = config.get('output', None)
if type(stream_output) is int:
return PortOutputStream(name,
stream_input,
stream_output,
stream_handlers,
zmq_args={'zmq_context': self.broker.context,
'zmq_proxy_xsub_url': self.broker.XSUB_URL,
'zmq_proxy_xpub_url': self.broker.XPUB_URL})
else:
if stream_output is not None:
log.warn("Output of stream {} is not an integer port. "
"Stream outputs can only be ports.".format(name))
return ZMQStream(name,
stream_input,
stream_handlers,
zmq_args={'zmq_context': self.broker.context,
'zmq_proxy_xsub_url': self.broker.XSUB_URL,
'zmq_proxy_xpub_url': self.broker.XPUB_URL}) | Creates an outbound stream from its config.
Params:
config: stream configuration as read by ait.config
Returns:
stream: a Stream
Raises:
ValueError: if any of the required config values are missing | entailment |
def _create_handler(self, config):
"""
Creates a handler from its config.
Params:
config: handler config
Returns:
handler instance
"""
if config is None:
raise ValueError('No handler config to create handler from.')
if 'name' not in config:
raise ValueError('Handler name is required.')
handler_name = config['name']
# try to create handler
module_name = handler_name.rsplit('.', 1)[0]
class_name = handler_name.rsplit('.', 1)[-1]
module = import_module(module_name)
handler_class = getattr(module, class_name)
instance = handler_class(**config)
return instance | Creates a handler from its config.
Params:
config: handler config
Returns:
handler instance | entailment |
def _load_plugins(self):
"""
Reads, parses and creates plugins specified in config.yaml.
"""
plugins = ait.config.get('server.plugins')
if plugins is None:
log.warn('No plugins specified in config.')
else:
for index, p in enumerate(plugins):
try:
plugin = self._create_plugin(p['plugin'])
self.plugins.append(plugin)
log.info('Added plugin {}'.format(plugin))
except Exception:
exc_type, value, tb = sys.exc_info()
log.error('{} creating plugin {}: {}'.format(exc_type,
index,
value))
if not self.plugins:
log.warn('No valid plugin configurations found. No plugins will be added.') | Reads, parses and creates plugins specified in config.yaml. | entailment |
def _create_plugin(self, config):
"""
Creates a plugin from its config.
Params:
config: plugin configuration as read by ait.config
Returns:
plugin: a Plugin
Raises:
ValueError: if any of the required config values are missing
"""
if config is None:
raise ValueError('No plugin config to create plugin from.')
name = config.pop('name', None)
if name is None:
raise(cfg.AitConfigMissing('plugin name'))
# TODO I don't think we actually care about this being unique? Left over from
# previous conversations about stuff?
module_name = name.rsplit('.', 1)[0]
class_name = name.rsplit('.', 1)[-1]
if class_name in [x.name for x in (self.outbound_streams +
self.inbound_streams +
self.servers +
self.plugins)]:
raise ValueError(
'Plugin "{}" already loaded. Only one plugin of a given name is allowed'.
format(class_name)
)
plugin_inputs = config.pop('inputs', None)
if plugin_inputs is None:
log.warn('No plugin inputs specified for {}'.format(name))
plugin_inputs = [ ]
subscribers = config.pop('outputs', None)
if subscribers is None:
log.warn('No plugin outputs specified for {}'.format(name))
subscribers = [ ]
# try to create plugin
module = import_module(module_name)
plugin_class = getattr(module, class_name)
instance = plugin_class(plugin_inputs,
subscribers,
zmq_args={'zmq_context': self.broker.context,
'zmq_proxy_xsub_url': self.broker.XSUB_URL,
'zmq_proxy_xpub_url': self.broker.XPUB_URL},
**config
)
return instance | Creates a plugin from its config.
Params:
config: plugin configuration as read by ait.config
Returns:
plugin: a Plugin
Raises:
ValueError: if any of the required config values are missing | entailment |
def slotsToJSON(obj, slots=None):
"""Converts the given Python object to one suitable for Javascript
Object Notation (JSON) serialization via :func:`json.dump` or
:func:`json.dumps`. This function delegates to :func:`toJSON`.
Specifically only attributes in the list of *slots* are converted.
If *slots* is not provided, it defaults to the object's
``__slots__` and any inherited ``__slots__``.
To omit certain slots from serialization, the object may define a
:meth:`__jsonOmit__(key, val)` method. When the method returns
True for any particular slot name (i.e. key) and value
combination, the slot will not serialized.
"""
if slots is None:
slots = list(obj.__slots__) if hasattr(obj, '__slots__') else [ ]
for base in obj.__class__.__bases__:
if hasattr(base, '__slots__'):
slots.extend(base.__slots__)
testOmit = hasattr(obj, '__jsonOmit__') and callable(obj.__jsonOmit__)
result = { }
for slot in slots:
key = slot[1:] if slot.startswith('_') else slot
val = getattr(obj, slot, None)
if testOmit is False or obj.__jsonOmit__(key, val) is False:
result[key] = toJSON(val)
return result | Converts the given Python object to one suitable for Javascript
Object Notation (JSON) serialization via :func:`json.dump` or
:func:`json.dumps`. This function delegates to :func:`toJSON`.
Specifically only attributes in the list of *slots* are converted.
If *slots* is not provided, it defaults to the object's
``__slots__` and any inherited ``__slots__``.
To omit certain slots from serialization, the object may define a
:meth:`__jsonOmit__(key, val)` method. When the method returns
True for any particular slot name (i.e. key) and value
combination, the slot will not serialized. | entailment |
def toJSON (obj):
"""Converts the given Python object to one suitable for Javascript
Object Notation (JSON) serialization via :func:`json.dump` or
:func:`json.dumps`. If the Python object has a :meth:`toJSON`
method, it is always given preference and will be called to peform
the conversion.
Otherwise, plain mapping and sequence types are converted to
Python dictionaries and lists, respectively, by recursively
calling this :func:`toJSON` function on mapping keys and values or
iterable items. Python primitive types handled natively by the
JSON encoder (``int``, ``long``, ``float``, ``str``, ``unicode``,
and ``None``) are returned as-is.
If no other conversion is appropriate, the Python builtin function
:func:`str` is used to convert the object.
"""
if hasattr(obj, 'toJSON') and callable(obj.toJSON):
result = obj.toJSON()
elif isinstance(obj, (int, long, float, str, unicode)) or obj is None:
result = obj
elif isinstance(obj, collections.Mapping):
result = { toJSON(key): toJSON(obj[key]) for key in obj }
elif isinstance(obj, collections.Sequence):
result = [ toJSON(item) for item in obj ]
else:
result = str(obj)
return result | Converts the given Python object to one suitable for Javascript
Object Notation (JSON) serialization via :func:`json.dump` or
:func:`json.dumps`. If the Python object has a :meth:`toJSON`
method, it is always given preference and will be called to peform
the conversion.
Otherwise, plain mapping and sequence types are converted to
Python dictionaries and lists, respectively, by recursively
calling this :func:`toJSON` function on mapping keys and values or
iterable items. Python primitive types handled natively by the
JSON encoder (``int``, ``long``, ``float``, ``str``, ``unicode``,
and ``None``) are returned as-is.
If no other conversion is appropriate, the Python builtin function
:func:`str` is used to convert the object. | entailment |
def createDirStruct(paths, verbose=True):
'''Loops ait.config._datapaths from AIT_CONFIG and creates a directory.
Replaces year and doy with the respective year and day-of-year.
If neither are given as arguments, current UTC day and year are used.
Args:
paths:
[optional] list of directory paths you would like to create.
doy and year will be replaced by the datetime day and year, respectively.
datetime:
UTC Datetime string in ISO 8601 Format YYYY-MM-DDTHH:mm:ssZ
'''
for k, path in paths.items():
p = None
try:
pathlist = path if type(path) is list else [ path ]
for p in pathlist:
os.makedirs(p)
if verbose:
log.info('Creating directory: ' + p)
except OSError, e:
#print path
if e.errno == errno.EEXIST and os.path.isdir(p):
pass
else:
raise
return True | Loops ait.config._datapaths from AIT_CONFIG and creates a directory.
Replaces year and doy with the respective year and day-of-year.
If neither are given as arguments, current UTC day and year are used.
Args:
paths:
[optional] list of directory paths you would like to create.
doy and year will be replaced by the datetime day and year, respectively.
datetime:
UTC Datetime string in ISO 8601 Format YYYY-MM-DDTHH:mm:ssZ | entailment |
def capture_packet(self):
''' Write packet data to the logger's log file. '''
data = self.socket.recv(self._buffer_size)
for h in self.capture_handlers:
h['reads'] += 1
h['data_read'] += len(data)
d = data
if 'pre_write_transforms' in h:
for data_transform in h['pre_write_transforms']:
d = data_transform(d)
h['logger'].write(d) | Write packet data to the logger's log file. | entailment |
def clean_up(self):
''' Clean up the socket and log file handles. '''
self.socket.close()
for h in self.capture_handlers:
h['logger'].close() | Clean up the socket and log file handles. | entailment |
def socket_monitor_loop(self):
''' Monitor the socket and log captured data. '''
try:
while True:
gevent.socket.wait_read(self.socket.fileno())
self._handle_log_rotations()
self.capture_packet()
finally:
self.clean_up() | Monitor the socket and log captured data. | entailment |
def add_handler(self, handler):
''' Add an additional handler
Args:
handler:
A dictionary of handler configuration for the handler
that should be added. See :func:`__init__` for details
on valid parameters.
'''
handler['logger'] = self._get_logger(handler)
handler['reads'] = 0
handler['data_read'] = 0
self.capture_handlers.append(handler) | Add an additional handler
Args:
handler:
A dictionary of handler configuration for the handler
that should be added. See :func:`__init__` for details
on valid parameters. | entailment |
def remove_handler(self, name):
''' Remove a handler given a name
Note, if multiple handlers have the same name the last matching
instance in the handler list will be removed.
Args:
name:
The name of the handler to remove
'''
index = None
for i, h in enumerate(self.capture_handlers):
if h['name'] == name:
index = i
if index is not None:
self.capture_handlers[index]['logger'].close()
del self.capture_handlers[index] | Remove a handler given a name
Note, if multiple handlers have the same name the last matching
instance in the handler list will be removed.
Args:
name:
The name of the handler to remove | entailment |
def dump_handler_config_data(self):
''' Return capture handler configuration data.
Return a dictionary of capture handler configuration data of the form:
.. code-block:: none
[{
'handler': <handler configuration dictionary>,
'log_file_path': <Path to the current log file that the logger
is writing. Note that if rotation is used it\'s possible
this data will be stale eventually.>,
'conn_type': <The string defining the connection type of the
logger.>,
'address': <The list containing address info that the logger is
using for its connection.>
}, ...]
'''
ignored_keys = ['logger', 'log_rot_time', 'reads', 'data_read']
config_data = []
for h in self.capture_handlers:
config_data.append({
'handler': {
k:v for k, v in h.iteritems()
if k not in ignored_keys
},
'log_file_path': h['logger']._stream.name,
'conn_type': self.conn_type,
'address': self.address,
})
return config_data | Return capture handler configuration data.
Return a dictionary of capture handler configuration data of the form:
.. code-block:: none
[{
'handler': <handler configuration dictionary>,
'log_file_path': <Path to the current log file that the logger
is writing. Note that if rotation is used it\'s possible
this data will be stale eventually.>,
'conn_type': <The string defining the connection type of the
logger.>,
'address': <The list containing address info that the logger is
using for its connection.>
}, ...] | entailment |
def dump_all_handler_stats(self):
''' Return handler capture statistics
Return a dictionary of capture handler statistics of the form:
.. code-block:: none
[{
'name': The handler's name,
'reads': The number of packet reads this handler has received
'data_read_length': The total length of the data received
'approx_data_rate': The approximate data rate for this handler
}, ...]
'''
stats = []
for h in self.capture_handlers:
now = calendar.timegm(time.gmtime())
rot_time = calendar.timegm(h['log_rot_time'])
time_delta = now - rot_time
approx_data_rate = '{} bytes/second'.format(h['data_read'] / float(time_delta))
stats.append({
'name': h['name'],
'reads': h['reads'],
'data_read_length': '{} bytes'.format(h['data_read']),
'approx_data_rate': approx_data_rate
})
return stats | Return handler capture statistics
Return a dictionary of capture handler statistics of the form:
.. code-block:: none
[{
'name': The handler's name,
'reads': The number of packet reads this handler has received
'data_read_length': The total length of the data received
'approx_data_rate': The approximate data rate for this handler
}, ...] | entailment |
def _handle_log_rotations(self):
''' Rotate each handler's log file if necessary '''
for h in self.capture_handlers:
if self._should_rotate_log(h):
self._rotate_log(h) | Rotate each handler's log file if necessary | entailment |
def _should_rotate_log(self, handler):
''' Determine if a log file rotation is necessary '''
if handler['rotate_log']:
rotate_time_index = handler.get('rotate_log_index', 'day')
try:
rotate_time_index = self._decode_time_rotation_index(rotate_time_index)
except ValueError:
rotate_time_index = 2
rotate_time_delta = handler.get('rotate_log_delta', 1)
cur_t = time.gmtime()
first_different_index = 9
for i in range(9):
if cur_t[i] != handler['log_rot_time'][i]:
first_different_index = i
break
if first_different_index < rotate_time_index:
# If the time deltas differ by a time step greater than what we
# have set for the rotation (I.e., months instead of days) we will
# automatically rotate.
return True
else:
time_delta = cur_t[rotate_time_index] - handler['log_rot_time'][rotate_time_index]
return time_delta >= rotate_time_delta
return False | Determine if a log file rotation is necessary | entailment |
def _decode_time_rotation_index(self, time_rot_index):
''' Return the time struct index to use for log rotation checks '''
time_index_decode_table = {
'year': 0, 'years': 0, 'tm_year': 0,
'month': 1, 'months': 1, 'tm_mon': 1,
'day': 2, 'days': 2, 'tm_mday': 2,
'hour': 3, 'hours': 3, 'tm_hour': 3,
'minute': 4, 'minutes': 4, 'tm_min': 4,
'second': 5, 'seconds': 5, 'tm_sec': 5,
}
if time_rot_index not in time_index_decode_table.keys():
raise ValueError('Invalid time option specified for log rotation')
return time_index_decode_table[time_rot_index] | Return the time struct index to use for log rotation checks | entailment |
def _get_log_file(self, handler):
''' Generate log file path for a given handler
Args:
handler:
The handler configuration dictionary for which a log file
path should be generated.
'''
if 'file_name_pattern' not in handler:
filename = '%Y-%m-%d-%H-%M-%S-{name}.pcap'
else:
filename = handler['file_name_pattern']
log_file = handler['log_dir']
if 'path' in handler:
log_file = os.path.join(log_file, handler['path'], filename)
else:
log_file = os.path.join(log_file, filename)
log_file = time.strftime(log_file, time.gmtime())
log_file = log_file.format(**handler)
return log_file | Generate log file path for a given handler
Args:
handler:
The handler configuration dictionary for which a log file
path should be generated. | entailment |
def _get_logger(self, handler):
''' Initialize a PCAP stream for logging data '''
log_file = self._get_log_file(handler)
if not os.path.isdir(os.path.dirname(log_file)):
os.makedirs(os.path.dirname(log_file))
handler['log_rot_time'] = time.gmtime()
return pcap.open(log_file, mode='a') | Initialize a PCAP stream for logging data | entailment |
def add_logger(self, name, address, conn_type, log_dir_path=None, **kwargs):
''' Add a new stream capturer to the manager.
Add a new stream capturer to the manager with the provided configuration
details. If an existing capturer is monitoring the same address the
new handler will be added to it.
Args:
name:
A string defining the new capturer's name.
address:
A tuple containing address data for the capturer. Check the
:class:`SocketStreamCapturer` documentation for what is
required.
conn_type:
A string defining the connection type. Check the
:class:`SocketStreamCapturer` documentation for a list of valid
options.
log_dir_path:
An optional path defining the directory where the
capturer should write its files. If this isn't provided the root
log directory from the manager configuration is used.
'''
capture_handler_conf = kwargs
if not log_dir_path:
log_dir_path = self._mngr_conf['root_log_directory']
log_dir_path = os.path.normpath(os.path.expanduser(log_dir_path))
capture_handler_conf['log_dir'] = log_dir_path
capture_handler_conf['name'] = name
if 'rotate_log' not in capture_handler_conf:
capture_handler_conf['rotate_log'] = True
transforms = []
if 'pre_write_transforms' in capture_handler_conf:
for transform in capture_handler_conf['pre_write_transforms']:
if isinstance(transform, str):
if globals().has_key(transform):
transforms.append(globals().get(transform))
else:
msg = (
'Unable to load data transformation '
'"{}" for handler "{}"'
).format(
transform,
capture_handler_conf['name']
)
log.warn(msg)
elif hasattr(transform, '__call__'):
transforms.append(transform)
else:
msg = (
'Unable to determine how to load data transform "{}"'
).format(transform)
log.warn(msg)
capture_handler_conf['pre_write_transforms'] = transforms
address_key = str(address)
if address_key in self._stream_capturers:
capturer = self._stream_capturers[address_key][0]
capturer.add_handler(capture_handler_conf)
return
socket_logger = SocketStreamCapturer(capture_handler_conf,
address,
conn_type)
greenlet = gevent.spawn(socket_logger.socket_monitor_loop)
self._stream_capturers[address_key] = (
socket_logger,
greenlet
)
self._pool.add(greenlet) | Add a new stream capturer to the manager.
Add a new stream capturer to the manager with the provided configuration
details. If an existing capturer is monitoring the same address the
new handler will be added to it.
Args:
name:
A string defining the new capturer's name.
address:
A tuple containing address data for the capturer. Check the
:class:`SocketStreamCapturer` documentation for what is
required.
conn_type:
A string defining the connection type. Check the
:class:`SocketStreamCapturer` documentation for a list of valid
options.
log_dir_path:
An optional path defining the directory where the
capturer should write its files. If this isn't provided the root
log directory from the manager configuration is used. | entailment |
def stop_capture_handler(self, name):
''' Remove all handlers with a given name
Args:
name:
The name of the handler(s) to remove.
'''
empty_capturers_indeces = []
for k, sc in self._stream_capturers.iteritems():
stream_capturer = sc[0]
stream_capturer.remove_handler(name)
if stream_capturer.handler_count == 0:
self._pool.killone(sc[1])
empty_capturers_indeces.append(k)
for i in empty_capturers_indeces:
del self._stream_capturers[i] | Remove all handlers with a given name
Args:
name:
The name of the handler(s) to remove. | entailment |
def stop_stream_capturer(self, address):
''' Stop a capturer that the manager controls.
Args:
address:
An address array of the form ['host', 'port'] or similar
depending on the connection type of the stream capturer being
terminated. The capturer for the address will be terminated
along with all handlers for that capturer if the address is
that of a managed capturer.
Raises:
ValueError:
The provided address doesn't match a capturer that is
currently managed.
'''
address = str(address)
if address not in self._stream_capturers:
raise ValueError('Capturer address does not match a managed capturer')
stream_cap = self._stream_capturers[address]
self._pool.killone(stream_cap[1])
del self._stream_capturers[address] | Stop a capturer that the manager controls.
Args:
address:
An address array of the form ['host', 'port'] or similar
depending on the connection type of the stream capturer being
terminated. The capturer for the address will be terminated
along with all handlers for that capturer if the address is
that of a managed capturer.
Raises:
ValueError:
The provided address doesn't match a capturer that is
currently managed. | entailment |
def rotate_capture_handler_log(self, name):
''' Force a rotation of a handler's log file
Args:
name:
The name of the handler who's log file should be rotated.
'''
for sc_key, sc in self._stream_capturers.iteritems():
for h in sc[0].capture_handlers:
if h['name'] == name:
sc[0]._rotate_log(h) | Force a rotation of a handler's log file
Args:
name:
The name of the handler who's log file should be rotated. | entailment |
def get_logger_data(self):
''' Return data on managed loggers.
Returns a dictionary of managed logger configuration data. The format
is primarily controlled by the
:func:`SocketStreamCapturer.dump_handler_config_data` function::
{
<capture address>: <list of handler config for data capturers>
}
'''
return {
address : stream_capturer[0].dump_handler_config_data()
for address, stream_capturer in self._stream_capturers.iteritems()
} | Return data on managed loggers.
Returns a dictionary of managed logger configuration data. The format
is primarily controlled by the
:func:`SocketStreamCapturer.dump_handler_config_data` function::
{
<capture address>: <list of handler config for data capturers>
} | entailment |
def get_handler_stats(self):
''' Return handler read statistics
Returns a dictionary of managed handler data read statistics. The
format is primarily controlled by the
:func:`SocketStreamCapturer.dump_all_handler_stats` function::
{
<capture address>: <list of handler capture statistics>
}
'''
return {
address : stream_capturer[0].dump_all_handler_stats()
for address, stream_capturer in self._stream_capturers.iteritems()
} | Return handler read statistics
Returns a dictionary of managed handler data read statistics. The
format is primarily controlled by the
:func:`SocketStreamCapturer.dump_all_handler_stats` function::
{
<capture address>: <list of handler capture statistics>
} | entailment |
def get_capture_handler_config_by_name(self, name):
''' Return data for handlers of a given name.
Args:
name:
Name of the capture handler(s) to return config data for.
Returns:
Dictionary dump from the named capture handler as given by
the :func:`SocketStreamCapturer.dump_handler_config_data` method.
'''
handler_confs = []
for address, stream_capturer in self._stream_capturers.iteritems():
handler_data = stream_capturer[0].dump_handler_config_data()
for h in handler_data:
if h['handler']['name'] == name:
handler_confs.append(h)
return handler_confs | Return data for handlers of a given name.
Args:
name:
Name of the capture handler(s) to return config data for.
Returns:
Dictionary dump from the named capture handler as given by
the :func:`SocketStreamCapturer.dump_handler_config_data` method. | entailment |
def run_socket_event_loop(self):
''' Start monitoring managed loggers. '''
try:
while True:
self._pool.join()
# If we have no loggers we'll sleep briefly to ensure that we
# allow other processes (I.e., the webserver) to do their work.
if len(self._logger_data.keys()) == 0:
time.sleep(0.5)
except KeyboardInterrupt:
pass
finally:
self._pool.kill() | Start monitoring managed loggers. | entailment |
def start(self):
''' Starts the server. '''
self._app.run(host=self._host, port=self._port) | Starts the server. | entailment |
def _route(self):
''' Handles server route instantiation. '''
self._app.route('/',
method='GET',
callback=self._get_logger_list)
self._app.route('/stats',
method='GET',
callback=self._fetch_handler_stats)
self._app.route('/<name>/start',
method='POST',
callback=self._add_logger_by_name)
self._app.route('/<name>/stop',
method='DELETE',
callback=self._stop_logger_by_name)
self._app.route('/<name>/config',
method='GET',
callback=self._get_logger_conf)
self._app.route('/<name>/rotate',
method='POST',
callback=self._rotate_capturer_log) | Handles server route instantiation. | entailment |
def _add_logger_by_name(self, name):
''' Handles POST requests for adding a new logger.
Expects logger configuration to be passed in the request's query string.
The logger name is included in the URL and the address components and
connection type should be included as well. The loc attribute is
defaulted to "localhost" when making the socket connection if not
defined.
loc = IP / interface
port = port / protocol
conn_type = udp or ethernet
Raises:
ValueError:
if the port or connection type are not supplied.
'''
data = dict(request.forms)
loc = data.pop('loc', '')
port = data.pop('port', None)
conn_type = data.pop('conn_type', None)
if not port or not conn_type:
e = 'Port and/or conn_type not set'
raise ValueError(e)
address = [loc, int(port)]
if 'rotate_log' in data:
data['rotate_log'] = True if data == 'true' else False
if 'rotate_log_delta' in data:
data['rotate_log_delta'] = int(data['rotate_log_delta'])
self._logger_manager.add_logger(name, address, conn_type, **data) | Handles POST requests for adding a new logger.
Expects logger configuration to be passed in the request's query string.
The logger name is included in the URL and the address components and
connection type should be included as well. The loc attribute is
defaulted to "localhost" when making the socket connection if not
defined.
loc = IP / interface
port = port / protocol
conn_type = udp or ethernet
Raises:
ValueError:
if the port or connection type are not supplied. | entailment |
def handle_includes(defns):
'''Recursive handling of includes for any input list of defns.
The assumption here is that when an include is handled by the
pyyaml reader, it adds them as a list, which is stands apart from the rest
of the expected YAML definitions.
'''
newdefns = []
for d in defns:
if isinstance(d,list):
newdefns.extend(handle_includes(d))
else:
newdefns.append(d)
return newdefns | Recursive handling of includes for any input list of defns.
The assumption here is that when an include is handled by the
pyyaml reader, it adds them as a list, which is stands apart from the rest
of the expected YAML definitions. | entailment |
def eval(self, packet):
"""Returns the result of evaluating this DNToEUConversion in the
context of the given Packet.
"""
result = None
terms = None
if self._when is None or self._when.eval(packet):
result = self._equation.eval(packet)
return result | Returns the result of evaluating this DNToEUConversion in the
context of the given Packet. | entailment |
def validate(self, value, messages=None):
"""Returns True if the given field value is valid, False otherwise.
Validation error messages are appended to an optional messages
array.
"""
valid = True
primitive = value
def log(msg):
if messages is not None:
messages.append(msg)
if self.enum:
if value not in self.enum.values():
valid = False
flds = (self.name, str(value))
log("%s value '%s' not in allowed enumerated values." % flds)
else:
primitive = int(self.enum.keys()[self.enum.values().index(value)])
if self.type:
if self.type.validate(primitive, messages, self.name) is False:
valid = False
return valid | Returns True if the given field value is valid, False otherwise.
Validation error messages are appended to an optional messages
array. | entailment |
def decode(self, bytes, raw=False, index=None):
"""Decodes the given bytes according to this Field Definition.
If raw is True, no enumeration substitutions will be applied
to the data returned.
If index is an integer or slice (and the type of this
FieldDefinition is an ArrayType), then only the element(s) at
the specified position(s) will be decoded.
"""
if index is not None and isinstance(self.type, dtype.ArrayType):
value = self.type.decode( bytes[self.slice()], index, raw )
else:
value = self.type.decode( bytes[self.slice()], raw )
# Apply bit mask if needed
if self.mask is not None:
value &= self.mask
if self.shift > 0:
value >>= self.shift
if not raw and self.enum is not None:
value = self.enum.get(value, value)
return value | Decodes the given bytes according to this Field Definition.
If raw is True, no enumeration substitutions will be applied
to the data returned.
If index is an integer or slice (and the type of this
FieldDefinition is an ArrayType), then only the element(s) at
the specified position(s) will be decoded. | entailment |
def encode(self, value):
"""Encodes the given value according to this FieldDefinition."""
if type(value) == str and self.enum and value in self.enum:
value = self.enum[value]
if type(value) == int:
if self.shift > 0:
value <<= self.shift
if self.mask is not None:
value &= self.mask
return self.type.encode(value) if self.type else bytearray() | Encodes the given value according to this FieldDefinition. | entailment |
def slice(self, offset=0):
"""Returns a Python slice object (e.g. for array indexing) indicating
the start and stop byte position of this Telemetry field. The
start and stop positions may be translated by the optional
byte offset.
"""
if self.bytes is None:
start = 0
stop = start + self.nbytes
elif type(self.bytes) is int:
start = self.bytes
stop = start + self.nbytes
else:
start = self.bytes[0]
stop = self.bytes[1] + 1
return slice(start + offset, stop + offset) | Returns a Python slice object (e.g. for array indexing) indicating
the start and stop byte position of this Telemetry field. The
start and stop positions may be translated by the optional
byte offset. | entailment |
def _assertField(self, fieldname):
"""Raise AttributeError when Packet has no field with the given
name."""
if not self._hasattr(fieldname):
values = self._defn.name, fieldname
raise AttributeError("Packet '%s' has no field '%s'" % values) | Raise AttributeError when Packet has no field with the given
name. | entailment |
def _getattr (self, fieldname, raw=False, index=None):
"""Returns the value of the given packet field name.
If raw is True, the field value is only decoded. That is no
enumeration substituions or DN to EU conversions are applied.
"""
self._assertField(fieldname)
value = None
if fieldname == 'raw':
value = createRawPacket(self)
elif fieldname == 'history':
value = self._defn.history
else:
if fieldname in self._defn.derivationmap:
defn = self._defn.derivationmap[fieldname]
else:
defn = self._defn.fieldmap[fieldname]
if isinstance(defn.type, dtype.ArrayType) and index is None:
return createFieldList(self, defn, raw)
if defn.when is None or defn.when.eval(self):
if isinstance(defn, DerivationDefinition):
value = defn.equation.eval(self)
elif raw or (defn.dntoeu is None and defn.expr is None):
value = defn.decode(self._data, raw, index)
elif defn.dntoeu is not None:
value = defn.dntoeu.eval(self)
elif defn.expr is not None:
value = defn.expr.eval(self)
return value | Returns the value of the given packet field name.
If raw is True, the field value is only decoded. That is no
enumeration substituions or DN to EU conversions are applied. | entailment |
def _hasattr(self, fieldname):
"""Returns True if this packet contains fieldname, False otherwise."""
special = 'history', 'raw'
return (fieldname in special or
fieldname in self._defn.fieldmap or
fieldname in self._defn.derivationmap) | Returns True if this packet contains fieldname, False otherwise. | entailment |
def _update_bytes(self, defns, start=0):
"""Updates the 'bytes' field in all FieldDefinition.
Any FieldDefinition.bytes which is undefined (None) or '@prev'
will have its bytes field computed based on its data type size
and where the previous FieldDefinition ended (or the start
parameter in the case of very first FieldDefinition). If
bytes is set to '@prev', this has the effect of *starting* the
FieldDefinition at the same place as the *previous*
FieldDefinition. This reads well in YAML, e.g.:
bytes: '@prev'
Returns the end of the very last FieldDefinition in Python
slice notation, i.e. [start, stop). This would correspond to
the *start* of the next FieldDefinition, if it existed.
"""
pos = slice(start, start)
for fd in defns:
if fd.bytes == '@prev' or fd.bytes is None:
if fd.bytes == '@prev':
fd.bytes = None
pos = fd.slice(pos.start)
elif fd.bytes is None:
pos = fd.slice(pos.stop)
if pos.start == pos.stop - 1:
fd.bytes = pos.start
else:
fd.bytes = [ pos.start, pos.stop - 1 ]
pos = fd.slice()
return pos.stop | Updates the 'bytes' field in all FieldDefinition.
Any FieldDefinition.bytes which is undefined (None) or '@prev'
will have its bytes field computed based on its data type size
and where the previous FieldDefinition ended (or the start
parameter in the case of very first FieldDefinition). If
bytes is set to '@prev', this has the effect of *starting* the
FieldDefinition at the same place as the *previous*
FieldDefinition. This reads well in YAML, e.g.:
bytes: '@prev'
Returns the end of the very last FieldDefinition in Python
slice notation, i.e. [start, stop). This would correspond to
the *start* of the next FieldDefinition, if it existed. | entailment |
def nbytes(self):
"""The number of bytes for this telemetry packet"""
max_byte = -1
for defn in self.fields:
byte = defn.bytes if type(defn.bytes) is int else max(defn.bytes)
max_byte = max(max_byte, byte)
return max_byte + 1 | The number of bytes for this telemetry packet | entailment |
def validate(self, pkt, messages=None):
"""Returns True if the given Packet is valid, False otherwise.
Validation error messages are appended to an optional messages
array.
"""
valid = True
for f in self.fields:
try:
value = getattr(pkt, f.name)
except AttributeError:
valid = False
if messages is not None:
msg = "Telemetry field mismatch for packet '%s'. "
msg += "Unable to retrieve value for %s in Packet."
values = self.name, f.name
messages.append(msg % values)
break
if f.validate(value, messages) is False:
valid = False
return valid | Returns True if the given Packet is valid, False otherwise.
Validation error messages are appended to an optional messages
array. | entailment |
def eval(self, packet):
"""Returns the result of evaluating this PacketExpression in the
context of the given Packet.
"""
try:
context = createPacketContext(packet)
result = eval(self._code, packet._defn.globals, context)
except ZeroDivisionError:
result = None
return result | Returns the result of evaluating this PacketExpression in the
context of the given Packet. | entailment |
def _assertField(self, name):
"""Raise AttributeError when PacketHistory has no field with the given
name.
"""
if name not in self._names:
msg = 'PacketHistory "%s" has no field "%s"'
values = self._defn.name, name
raise AttributeError(msg % values) | Raise AttributeError when PacketHistory has no field with the given
name. | entailment |
def add(self, packet):
"""Add the given Packet to this PacketHistory."""
for name in self._names:
value = getattr(packet, name)
if value is not None:
self._dict[name] = value | Add the given Packet to this PacketHistory. | entailment |
def add(self, defn):
"""Adds the given Packet Definition to this Telemetry Dictionary."""
if defn.name not in self:
self[defn.name] = defn
else:
msg = "Duplicate packet name '%s'" % defn.name
log.error(msg)
raise util.YAMLError(msg) | Adds the given Packet Definition to this Telemetry Dictionary. | entailment |
def create(self, name, data=None):
"""Creates a new packet with the given definition and raw data.
"""
return createPacket(self[name], data) if name in self else None | Creates a new packet with the given definition and raw data. | entailment |
def load(self, content):
"""Loads Packet Definitions from the given YAML content into this
Telemetry Dictionary. Content may be either a filename
containing YAML content or a YAML string.
Load has no effect if this Command Dictionary was already
instantiated with a filename or YAML content.
"""
if self.filename is None:
if os.path.isfile(content):
self.filename = content
stream = open(self.filename, 'rb')
else:
stream = content
pkts = yaml.load(stream)
pkts = handle_includes(pkts)
for pkt in pkts:
self.add(pkt)
if type(stream) is file:
stream.close() | Loads Packet Definitions from the given YAML content into this
Telemetry Dictionary. Content may be either a filename
containing YAML content or a YAML string.
Load has no effect if this Command Dictionary was already
instantiated with a filename or YAML content. | entailment |
def writeToCSV(self, output_path=None):
'''writeToCSV - write the telemetry dictionary to csv
'''
header = ['Name', 'First Byte', 'Last Byte', 'Bit Mask', 'Endian',
'Type', 'Description', 'Values']
if output_path is None:
output_path = ait.config._directory
for pkt_name in self.tlmdict:
filename = os.path.join(output_path, pkt_name + '.csv')
with open(filename, 'wb') as output:
csvwriter = csv.writer(output, quoting=csv.QUOTE_ALL)
csvwriter.writerow(header)
for fld in self.tlmdict[pkt_name].fields:
# Pre-process some fields
# Description
desc = fld.desc.replace('\n', ' ') if fld.desc is not None else ""
# Mask
mask = hex(fld.mask) if fld.mask is not None else ""
# Enumerations
enums = '\n'.join("%s: %s" % (k, fld.enum[k])
for k in fld.enum) if fld.enum is not None else ""
# Set row
row = [fld.name, fld.slice().start, fld.slice().stop,
mask, fld.type.endian, fld.type.name, desc, enums]
csvwriter.writerow(row) | writeToCSV - write the telemetry dictionary to csv | entailment |
def decode(self, bytes):
"""Decodes the given bytes according to this AIT Argument
Definition.
"""
value = self.type.decode(bytes)
if self._enum is not None:
for name, val in self._enum.items():
if value == val:
value = name
break
return value | Decodes the given bytes according to this AIT Argument
Definition. | entailment |
def encode(self, value):
"""Encodes the given value according to this AIT Argument
Definition.
"""
if type(value) == str and self.enum and value in self.enum:
value = self.enum[value]
return self.type.encode(value) if self.type else bytearray() | Encodes the given value according to this AIT Argument
Definition. | entailment |
def validate(self, value, messages=None):
"""Returns True if the given Argument value is valid, False otherwise.
Validation error messages are appended to an optional messages
array.
"""
valid = True
primitive = value
def log(msg):
if messages is not None:
messages.append(msg)
if self.enum:
if value not in self.enum.keys():
valid = False
args = (self.name, str(value))
log("%s value '%s' not in allowed enumerated values." % args)
else:
primitive = int(self.enum[value])
if self.type:
if self.type.validate(primitive, messages, self.name) is False:
valid = False
if self.range:
if primitive < self.range[0] or primitive > self.range[1]:
valid = False
args = (self.name, str(primitive), self.range[0], self.range[1])
log("%s value '%s' out of range [%d, %d]." % args)
return valid | Returns True if the given Argument value is valid, False otherwise.
Validation error messages are appended to an optional messages
array. | entailment |
def encode(self, pad=106):
"""Encodes this AIT command to binary.
If pad is specified, it indicates the maximum size of the encoded
command in bytes. If the encoded command is less than pad, the
remaining bytes are set to zero.
Commands sent to ISS payloads over 1553 are limited to 64 words
(128 bytes) with 11 words (22 bytes) of CCSDS overhead (SSP
52050J, Section 3.2.3.4). This leaves 53 words (106 bytes) for
the command itself.
"""
opcode = struct.pack('>H', self.defn.opcode)
offset = len(opcode)
size = max(offset + self.defn.argsize, pad)
encoded = bytearray(size)
encoded[0:offset] = opcode
encoded[offset] = self.defn.argsize
offset += 1
index = 0
for defn in self.defn.argdefns:
if defn.fixed:
value = defn.value
else:
value = self.args[index]
index += 1
encoded[defn.slice(offset)] = defn.encode(value)
return encoded | Encodes this AIT command to binary.
If pad is specified, it indicates the maximum size of the encoded
command in bytes. If the encoded command is less than pad, the
remaining bytes are set to zero.
Commands sent to ISS payloads over 1553 are limited to 64 words
(128 bytes) with 11 words (22 bytes) of CCSDS overhead (SSP
52050J, Section 3.2.3.4). This leaves 53 words (106 bytes) for
the command itself. | entailment |
def nbytes(self):
"""The number of bytes required to encode this command.
Encoded commands are comprised of a two byte opcode, followed by a
one byte size, and then the command argument bytes. The size
indicates the number of bytes required to represent command
arguments.
"""
return len(self.opcode) + 1 + sum(arg.nbytes for arg in self.argdefns) | The number of bytes required to encode this command.
Encoded commands are comprised of a two byte opcode, followed by a
one byte size, and then the command argument bytes. The size
indicates the number of bytes required to represent command
arguments. | entailment |
def argsize(self):
"""The total size in bytes of all the command arguments."""
argsize = sum(arg.nbytes for arg in self.argdefns)
return argsize if len(self.argdefns) > 0 else 0 | The total size in bytes of all the command arguments. | entailment |
def validate(self, cmd, messages=None):
"""Returns True if the given Command is valid, False otherwise.
Validation error messages are appended to an optional messages
array.
"""
valid = True
args = [ arg for arg in cmd.args if arg is not None ]
if self.nargs != len(args):
valid = False
if messages is not None:
msg = 'Expected %d arguments, but received %d.'
messages.append(msg % (self.nargs, len(args)))
for defn, value in zip(self.args, cmd.args):
if value is None:
valid = False
if messages is not None:
messages.append('Argument "%s" is missing.' % defn.name)
elif defn.validate(value, messages) is False:
valid = False
if len(cmd._unrecognized) > 0:
valid = False
if messages is not None:
for name in cmd.unrecognized:
messages.append('Argument "%s" is unrecognized.' % name)
return valid | Returns True if the given Command is valid, False otherwise.
Validation error messages are appended to an optional messages
array. | entailment |
def add(self, defn):
"""Adds the given Command Definition to this Command Dictionary."""
self[defn.name] = defn
self.opcodes[defn._opcode] = defn | Adds the given Command Definition to this Command Dictionary. | entailment |
def create(self, name, *args, **kwargs):
"""Creates a new AIT command with the given arguments."""
tokens = name.split()
if len(tokens) > 1 and (len(args) > 0 or len(kwargs) > 0):
msg = 'A Cmd may be created with either positional arguments '
msg += '(passed as a string or a Python list) or keyword '
msg += 'arguments, but not both.'
raise TypeError(msg)
if len(tokens) > 1:
name = tokens[0]
args = [ util.toNumber(t, t) for t in tokens[1:] ]
defn = self.get(name, None)
if defn is None:
raise TypeError('Unrecognized command: %s' % name)
return createCmd(defn, *args, **kwargs) | Creates a new AIT command with the given arguments. | entailment |
def decode(self, bytes):
"""Decodes the given bytes according to this AIT Command
Definition.
"""
opcode = struct.unpack(">H", bytes[0:2])[0]
nbytes = struct.unpack("B", bytes[2:3])[0]
name = None
args = []
if opcode in self.opcodes:
defn = self.opcodes[opcode]
name = defn.name
stop = 3
for arg in defn.argdefns:
start = stop
stop = start + arg.nbytes
if arg.fixed:
pass # FIXME: Confirm fixed bytes are as expected?
else:
args.append(arg.decode(bytes[start:stop]))
return self.create(name, *args) | Decodes the given bytes according to this AIT Command
Definition. | entailment |
def load(self, content):
"""Loads Command Definitions from the given YAML content into
into this Command Dictionary. Content may be either a
filename containing YAML content or a YAML string.
Load has no effect if this Command Dictionary was already
instantiated with a filename or YAML content.
"""
if self.filename is None:
if os.path.isfile(content):
self.filename = content
stream = open(self.filename, 'rb')
else:
stream = content
for cmd in yaml.load(stream):
self.add(cmd)
if type(stream) is file:
stream.close() | Loads Command Definitions from the given YAML content into
into this Command Dictionary. Content may be either a
filename containing YAML content or a YAML string.
Load has no effect if this Command Dictionary was already
instantiated with a filename or YAML content. | entailment |
def cbrt (x):
"""Returns the cube root of x."""
if x >= 0:
return math.pow(x , 1.0 / 3.0)
else:
return - math.pow(abs(x), 1.0 / 3.0) | Returns the cube root of x. | entailment |
def eci2ecef (x, y, z, gmst=None):
"""Converts the given ECI coordinates to ECEF at the given Greenwich
Mean Sidereal Time (GMST) (defaults to now).
This code was adapted from
`shashwatak/satellite-js <https://github.com/shashwatak/satellite-js/blob/master/src/coordinate-transforms.js>`_
and http://ccar.colorado.edu/ASEN5070/handouts/coordsys.doc
"""
if gmst is None:
gmst = dmc.toGMST()
X = (x * math.cos(gmst)) + (y * math.sin(gmst))
Y = (x * (-math.sin(gmst))) + (y * math.cos(gmst))
Z = z
return X, Y, Z | Converts the given ECI coordinates to ECEF at the given Greenwich
Mean Sidereal Time (GMST) (defaults to now).
This code was adapted from
`shashwatak/satellite-js <https://github.com/shashwatak/satellite-js/blob/master/src/coordinate-transforms.js>`_
and http://ccar.colorado.edu/ASEN5070/handouts/coordsys.doc | entailment |
def eci2geodetic (x, y, z, gmst=None, ellipsoid=None):
"""Converts the given ECI coordinates to Geodetic coordinates at the
given Greenwich Mean Sidereal Time (GMST) (defaults to now) and with
the given ellipsoid (defaults to WGS84).
This code was adapted from
`shashwatak/satellite-js <https://github.com/shashwatak/satellite-js/blob/master/src/coordinate-transforms.js>`_
and http://www.celestrak.com/columns/v02n03/
"""
if gmst is None:
gmst = dmc.toGMST()
if ellipsoid is None:
ellipsoid = WGS84
a = WGS84.a
b = WGS84.b
f = WGS84.f
r = math.sqrt((x * x) + (y * y))
e2 = (2 * f) - (f * f)
lon = math.atan2(y, x) - gmst
k = 0
kmax = 20
lat = math.atan2(z, r)
while (k < kmax):
slat = math.sin(lat)
C = 1 / math.sqrt( 1 - e2 * (slat * slat) )
lat = math.atan2(z + (a * C * e2 * slat), r)
k += 1
z = (r / math.cos(lat)) - (a * C)
return lat, lon, z | Converts the given ECI coordinates to Geodetic coordinates at the
given Greenwich Mean Sidereal Time (GMST) (defaults to now) and with
the given ellipsoid (defaults to WGS84).
This code was adapted from
`shashwatak/satellite-js <https://github.com/shashwatak/satellite-js/blob/master/src/coordinate-transforms.js>`_
and http://www.celestrak.com/columns/v02n03/ | entailment |
def process(self, input_data, topic=None):
"""
Invokes each handler in sequence.
Publishes final output data.
Params:
input_data: message received by stream
topic: name of plugin or stream message received from,
if applicable
"""
for handler in self.handlers:
output = handler.handle(input_data)
input_data = output
self.publish(input_data) | Invokes each handler in sequence.
Publishes final output data.
Params:
input_data: message received by stream
topic: name of plugin or stream message received from,
if applicable | entailment |
def valid_workflow(self):
"""
Return true if each handler's output type is the same as
the next handler's input type. Return False if not.
Returns: boolean - True if workflow is valid, False if not
"""
for ix, handler in enumerate(self.handlers[:-1]):
next_input_type = self.handlers[ix + 1].input_type
if (handler.output_type is not None and
next_input_type is not None):
if handler.output_type != next_input_type:
return False
return True | Return true if each handler's output type is the same as
the next handler's input type. Return False if not.
Returns: boolean - True if workflow is valid, False if not | entailment |
def compress (input_filename, output_filename=None, verbose=False):
"""compress(input_filename, output_filename=None, verbose=False) -> integer
Uses zlib to compress input_filename and store the result in
output_filename. The size of output_filename is returned on
success; zero is returned on failure.
The input file is compressed in one fell swoop. The output_filename
defaults to input_filename + ".ait-zlib".
If verbose is True, compress() will use ait.core.log.info() to
report compression statistics.
"""
input_size = 0
output_size = 0
if output_filename is None:
output_filename = input_fillename + '.ait-zlib'
try:
stream = open(input_filename , 'rb')
output = open(output_filename, 'wb')
bytes = stream.read()
input_size = len(bytes)
if verbose:
log.info("Compressing %s (%d bytes).", input_filename, input_size)
compressed = zlib.compress(bytes, 3)
output_size = len(compressed)
output.write(compressed)
stream.close()
output.close()
percent = (1.0 - (output_size / float(input_size) )) * 100
if verbose:
log.info("Wrote %s (%d bytes).", output_filename, output_size)
log.info("Compressed %6.2f percent", percent)
except (IOError, OSError), e:
log.error(str(e) + ".")
return output_size | compress(input_filename, output_filename=None, verbose=False) -> integer
Uses zlib to compress input_filename and store the result in
output_filename. The size of output_filename is returned on
success; zero is returned on failure.
The input file is compressed in one fell swoop. The output_filename
defaults to input_filename + ".ait-zlib".
If verbose is True, compress() will use ait.core.log.info() to
report compression statistics. | entailment |
def hexdump (bytes, addr=None, preamble=None, printfunc=None, stepsize=16):
"""hexdump(bytes[, addr[, preamble[, printfunc[, stepsize=16]]]])
Outputs bytes in hexdump format lines similar to the following (here
preamble='Bank1', stepsize=8, and len(bytes) == 15)::
Bank1: 0xFD020000: 7f45 4c46 0102 0100 *.ELF....*
Bank1: 0xFD020008: 0000 0000 0000 00 *....... *
Where stepsize controls the number of bytes per line. If addr is
omitted, the address portion of the hexdump will not be output.
Lines will be passed to printfunc for output, or Python's builtin
print, if printfunc is omitted.
If a byte is not in the range [32, 127), a period will rendered for
the character portion of the output.
"""
if preamble is None:
preamble = ""
bytes = bytearray(bytes)
size = len(bytes)
for n in xrange(0, size, stepsize):
if addr is not None:
dump = preamble + "0x%04X: " % (addr + n)
else:
dump = preamble
end = min(size, n + stepsize)
dump += hexdumpLine(bytes[n:end], stepsize)
if printfunc is None:
print dump
else:
printfunc(dump) | hexdump(bytes[, addr[, preamble[, printfunc[, stepsize=16]]]])
Outputs bytes in hexdump format lines similar to the following (here
preamble='Bank1', stepsize=8, and len(bytes) == 15)::
Bank1: 0xFD020000: 7f45 4c46 0102 0100 *.ELF....*
Bank1: 0xFD020008: 0000 0000 0000 00 *....... *
Where stepsize controls the number of bytes per line. If addr is
omitted, the address portion of the hexdump will not be output.
Lines will be passed to printfunc for output, or Python's builtin
print, if printfunc is omitted.
If a byte is not in the range [32, 127), a period will rendered for
the character portion of the output. | entailment |
def hexdumpLine (bytes, length=None):
"""hexdumpLine(bytes[, length])
Returns a single hexdump formatted line for bytes. If length is
greater than len(bytes), the line will be padded with ASCII space
characters to indicate no byte data is present.
Used by hexdump().
"""
line = ""
if length is None:
length = len(bytes)
for n in xrange(0, length, 2):
if n < len(bytes) - 1:
line += "%02x%02x " % (bytes[n], bytes[n + 1])
elif n < len(bytes):
line += "%02x " % bytes[n]
else:
line += " "
line += "*"
for n in xrange(length):
if n < len(bytes):
if bytes[n] in xrange(32, 127):
line += "%c" % bytes[n]
else:
line += "."
else:
line += " "
line += "*"
return line | hexdumpLine(bytes[, length])
Returns a single hexdump formatted line for bytes. If length is
greater than len(bytes), the line will be padded with ASCII space
characters to indicate no byte data is present.
Used by hexdump(). | entailment |
def parseArgs (argv, defaults):
"""parseArgs(argv, defaults) -> (dict, list)
Parses command-line arguments according to the given defaults. For
every key in defaults, an argument of the form --key=value will be
parsed. Numeric arguments are converted from strings with errors
reported via ait.core.log.error() and default values used instead.
Returns a copy of defaults with parsed option values and a list of
any non-flag arguments.
"""
options = dict(defaults)
numeric = \
[ k for k, v in options.items() if type(v) is float or type(v) is int ]
try:
longopts = [ "%s=" % key for key in options.keys() ]
opts, args = getopt.getopt(argv, "", longopts)
for key, value in opts:
if key.startswith("--"):
key = key[2:]
options[key] = value
except getopt.GetoptError, err:
log.error( str(err) )
usage( exit=True )
for key in numeric:
value = options[key]
if type(value) is str:
options[key] = util.toNumber(value)
if options[key] is None:
msg = "Option '%s': '%s' is not a number, using default '%s' instead."
log.error(msg, key, value, defaults[key])
options[key] = defaults[key]
return options, args | parseArgs(argv, defaults) -> (dict, list)
Parses command-line arguments according to the given defaults. For
every key in defaults, an argument of the form --key=value will be
parsed. Numeric arguments are converted from strings with errors
reported via ait.core.log.error() and default values used instead.
Returns a copy of defaults with parsed option values and a list of
any non-flag arguments. | entailment |
def usage (exit=False):
"""usage([exit])
Prints the usage statement at the top of a Python program. A usage
statement is any comment at the start of a line that begins with a
double hash marks (##). The double hash marks are removed before
the usage statement is printed. If exit is True, the program is
terminated with a return code of 2 (GNU standard status code for
incorrect usage).
"""
stream = open(sys.argv[0])
for line in stream.readlines():
if line.startswith("##"): print line.replace("##", ""),
stream.close()
if exit:
sys.exit(2) | usage([exit])
Prints the usage statement at the top of a Python program. A usage
statement is any comment at the start of a line that begins with a
double hash marks (##). The double hash marks are removed before
the usage statement is printed. If exit is True, the program is
terminated with a return code of 2 (GNU standard status code for
incorrect usage). | entailment |
def getip():
"""
getip()
Returns the IP address of the computer. Helpful for those hosts that might
sit behind gateways and report a hostname that is a little strange (I'm
looking at you oco3-sim1).
"""
return [(s.connect(('8.8.8.8', 80)), s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1] | getip()
Returns the IP address of the computer. Helpful for those hosts that might
sit behind gateways and report a hostname that is a little strange (I'm
looking at you oco3-sim1). | entailment |
def arg_parse(arguments, description=None):
"""
arg_parse()
Parses the arguments using argparse. Returns a Namespace object. The
arguments dictionary should match the argparse expected data structure:
.. code-block::python
arguments = {
'--port': {
'type' : int,
'default' : 3075,
'help' : 'Port on which to send data'
},
'--verbose': {
'action' : 'store_true',
'default' : False,
'help' : 'Hexdump of the raw command being sent.'
}
}
For positional arguments, be sure to pass in an OrderedDict:
.. code-block::python
arguments = {
'--port': {
'type' : int,
'default' : 3075,
'help' : 'Port on which to send data'
},
'--verbose': {
'action' : 'store_true',
'default' : False,
'help' : 'Hexdump of the raw command being sent.'
}
}
arguments['command'] = {
'type' : str,
'help' : 'Name of the command to send.'
}
arguments['arguments'] = {
'type' : util.toNumberOrStr,
'metavar' : 'argument',
'nargs' : '*',
'help' : 'Command arguments.'
}
"""
if not description:
description = ""
ap = argparse.ArgumentParser(
description = description,
formatter_class = argparse.ArgumentDefaultsHelpFormatter
)
for name, params in arguments.items():
ap.add_argument(name, **params)
args = ap.parse_args()
return args | arg_parse()
Parses the arguments using argparse. Returns a Namespace object. The
arguments dictionary should match the argparse expected data structure:
.. code-block::python
arguments = {
'--port': {
'type' : int,
'default' : 3075,
'help' : 'Port on which to send data'
},
'--verbose': {
'action' : 'store_true',
'default' : False,
'help' : 'Hexdump of the raw command being sent.'
}
}
For positional arguments, be sure to pass in an OrderedDict:
.. code-block::python
arguments = {
'--port': {
'type' : int,
'default' : 3075,
'help' : 'Port on which to send data'
},
'--verbose': {
'action' : 'store_true',
'default' : False,
'help' : 'Hexdump of the raw command being sent.'
}
}
arguments['command'] = {
'type' : str,
'help' : 'Name of the command to send.'
}
arguments['arguments'] = {
'type' : util.toNumberOrStr,
'metavar' : 'argument',
'nargs' : '*',
'help' : 'Command arguments.'
} | entailment |
def extant_file(file):
"""
'Type' for argparse - checks that file exists but does not open.
"""
if not os.path.exists(file):
# Argparse uses the ArgumentTypeError to give a rejection message like:
# error: argument input: file does not exist
raise argparse.ArgumentTypeError("{0} does not exist".format(file))
return file | 'Type' for argparse - checks that file exists but does not open. | entailment |
def dot (self, other):
"""dot (self, other) -> number
Returns the dot product of this Point with another.
"""
if self.z:
return (self.x * other.x) + (self.y * other.y) + (self.z * other.z)
else:
return (self.x * other.x) + (self.y * other.y) | dot (self, other) -> number
Returns the dot product of this Point with another. | entailment |
def slope (self):
"""slope () -> float
"""
return (self.p.y - self.q.y) / (self.p.x - self.q.x) | slope () -> float | entailment |
def intersect (self, line):
"""intersect (line) -> Point | None
Returns the intersection point of this line segment with another.
If this line segment and the other line segment are conincident,
the first point on this line segment is returned. If the line
segments do not intersect, None is returned.
See http://local.wasp.uwa.edu.au/~pbourke/geometry/lineline2d/
Examples:
>>> A = Line( Point(0.0, 0.0), Point(5.0, 5.0) )
>>> B = Line( Point(5.0, 0.0), Point(0.0, 5.0) )
>>> C = Line( Point(1.0, 3.0), Point(9.0, 3.0) )
>>> D = Line( Point(0.5, 3.0), Point(6.0, 4.0) )
>>> E = Line( Point(1.0, 1.0), Point(3.0, 8.0) )
>>> F = Line( Point(0.5, 2.0), Point(4.0, 7.0) )
>>> G = Line( Point(1.0, 2.0), Point(3.0, 6.0) )
>>> H = Line( Point(2.0, 4.0), Point(4.0, 8.0) )
>>> I = Line( Point(3.5, 9.0), Point(3.5, 0.5) )
>>> J = Line( Point(3.0, 1.0), Point(9.0, 1.0) )
>>> K = Line( Point(2.0, 3.0), Point(7.0, 9.0) )
>>> L = Line( Point(1.0, 2.0), Point(5.0, 7.0) )
>>> A.intersect(B)
Point(2.5, 2.5)
>>> C.intersect(D) == None
True
>>> E.intersect(F)
Point(1.8275862069, 3.89655172414)
>>> G.intersect(H)
Point(1.0, 2.0)
>>> I.intersect(J)
Point(3.5, 1.0)
>>> K.intersect(L) == None
True
"""
(x1, y1) = (self.p.x, self.p.y)
(x2, y2) = (self.q.x, self.q.y)
(x3, y3) = (line.p.x, line.p.y)
(x4, y4) = (line.q.x, line.q.y)
denom = ((y4 - y3) * (x2 - x1)) - ((x4 - x3) * (y2 - y1))
num1 = ((x4 - x3) * (y1 - y3)) - ((y4 - y3) * (x1 - x3))
num2 = ((x2 - x1) * (y1 - y3)) - ((y2 - y1) * (x1 - x3))
intersect = None
if num1 == 0 and num2 == 0 and denom == 0: # Coincident lines
intersect = self.p
elif denom != 0: # Parallel lines (denom == 0)
ua = float(num1) / denom
ub = float(num2) / denom
if ua >= 0.0 and ua <= 1.0 and ub >= 0.0 and ub <= 1.0:
x = x1 + (ua * (x2 - x1))
y = y1 + (ua * (y2 - y1))
intersect = Point(x, y)
return intersect | intersect (line) -> Point | None
Returns the intersection point of this line segment with another.
If this line segment and the other line segment are conincident,
the first point on this line segment is returned. If the line
segments do not intersect, None is returned.
See http://local.wasp.uwa.edu.au/~pbourke/geometry/lineline2d/
Examples:
>>> A = Line( Point(0.0, 0.0), Point(5.0, 5.0) )
>>> B = Line( Point(5.0, 0.0), Point(0.0, 5.0) )
>>> C = Line( Point(1.0, 3.0), Point(9.0, 3.0) )
>>> D = Line( Point(0.5, 3.0), Point(6.0, 4.0) )
>>> E = Line( Point(1.0, 1.0), Point(3.0, 8.0) )
>>> F = Line( Point(0.5, 2.0), Point(4.0, 7.0) )
>>> G = Line( Point(1.0, 2.0), Point(3.0, 6.0) )
>>> H = Line( Point(2.0, 4.0), Point(4.0, 8.0) )
>>> I = Line( Point(3.5, 9.0), Point(3.5, 0.5) )
>>> J = Line( Point(3.0, 1.0), Point(9.0, 1.0) )
>>> K = Line( Point(2.0, 3.0), Point(7.0, 9.0) )
>>> L = Line( Point(1.0, 2.0), Point(5.0, 7.0) )
>>> A.intersect(B)
Point(2.5, 2.5)
>>> C.intersect(D) == None
True
>>> E.intersect(F)
Point(1.8275862069, 3.89655172414)
>>> G.intersect(H)
Point(1.0, 2.0)
>>> I.intersect(J)
Point(3.5, 1.0)
>>> K.intersect(L) == None
True | entailment |
def intersect (self, line):
"""intersect(line) -> Point | None
Returns the point at which the line segment and Plane intersect
or None if they do not intersect.
"""
eps = 1e-8
d = (line.q - line.p)
dn = d.dot(self.n)
point = None
if abs(dn) >= eps:
mu = self.n.dot(self.p - line.p) / dn
if mu >= 0 and mu <= 1:
point = line.p + mu * d
return point | intersect(line) -> Point | None
Returns the point at which the line segment and Plane intersect
or None if they do not intersect. | entailment |
def area (self):
"""area() -> number
Returns the area of this Polygon.
"""
area = 0.0
for segment in self.segments():
area += ((segment.p.x * segment.q.y) - (segment.q.x * segment.p.y))/2
return area | area() -> number
Returns the area of this Polygon. | entailment |
def bounds (self):
"""bounds() -> Rect
Returns the bounding Rectangle for this Polygon.
"""
if self._dirty:
min = self.vertices[0].copy()
max = self.vertices[0].copy()
for point in self.vertices[1:]:
if point.x < min.x: min.x = point.x
if point.y < min.y: min.y = point.y
if point.x > max.x: max.x = point.x
if point.y > max.y: max.y = point.y
self._bounds = Rect(min, max)
self._dirty = False
return self._bounds | bounds() -> Rect
Returns the bounding Rectangle for this Polygon. | entailment |
def center (self):
"""center() -> (x, y)
Returns the center (of mass) point of this Polygon.
See http://en.wikipedia.org/wiki/Polygon
Examples:
>>> p = Polygon()
>>> p.vertices = [ Point(3, 8), Point(6, 4), Point(0, 3) ]
>>> p.center()
Point(2.89285714286, 4.82142857143)
"""
Cx = 0.0
Cy = 0.0
denom = 6.0 * self.area()
for segment in self.segments():
x = (segment.p.x + segment.q.x)
y = (segment.p.y + segment.q.y)
xy = (segment.p.x * segment.q.y) - (segment.q.x * segment.p.y)
Cx += (x * xy)
Cy += (y * xy)
Cx /= denom
Cy /= denom
return Point(Cx, Cy) | center() -> (x, y)
Returns the center (of mass) point of this Polygon.
See http://en.wikipedia.org/wiki/Polygon
Examples:
>>> p = Polygon()
>>> p.vertices = [ Point(3, 8), Point(6, 4), Point(0, 3) ]
>>> p.center()
Point(2.89285714286, 4.82142857143) | entailment |
def contains (self, p):
"""Returns True if point is contained inside this Polygon, False
otherwise.
This method uses the Ray Casting algorithm.
Examples:
>>> p = Polygon()
>>> p.vertices = [Point(1, 1), Point(1, -1), Point(-1, -1), Point(-1, 1)]
>>> p.contains( Point(0, 0) )
True
>>> p.contains( Point(2, 3) )
False
"""
inside = False
if p in self.bounds():
for s in self.segments():
if ((s.p.y > p.y) != (s.q.y > p.y) and
(p.x < (s.q.x - s.p.x) * (p.y - s.p.y) / (s.q.y - s.p.y) + s.p.x)):
inside = not inside
return inside | Returns True if point is contained inside this Polygon, False
otherwise.
This method uses the Ray Casting algorithm.
Examples:
>>> p = Polygon()
>>> p.vertices = [Point(1, 1), Point(1, -1), Point(-1, -1), Point(-1, 1)]
>>> p.contains( Point(0, 0) )
True
>>> p.contains( Point(2, 3) )
False | entailment |
def segments (self):
"""Return the Line segments that comprise this Polygon."""
for n in xrange(len(self.vertices) - 1):
yield Line(self.vertices[n], self.vertices[n + 1])
yield Line(self.vertices[-1], self.vertices[0]) | Return the Line segments that comprise this Polygon. | entailment |
def contains (self, point):
"""contains(point) -> True | False
Returns True if point is contained inside this Rectangle, False otherwise.
Examples:
>>> r = Rect( Point(-1, -1), Point(1, 1) )
>>> r.contains( Point(0, 0) )
True
>>> r.contains( Point(2, 3) )
False
"""
return (point.x >= self.ul.x and point.x <= self.lr.x) and \
(point.y >= self.ul.y and point.y <= self.lr.y) | contains(point) -> True | False
Returns True if point is contained inside this Rectangle, False otherwise.
Examples:
>>> r = Rect( Point(-1, -1), Point(1, 1) )
>>> r.contains( Point(0, 0) )
True
>>> r.contains( Point(2, 3) )
False | entailment |
def segments (self):
"""segments () -> [ Line, Line, Line, Line ]
Return a list of Line segments that comprise this Rectangle.
"""
ul = self.ul
lr = self.lr
ur = Point(lr.x, ul.y)
ll = Point(ul.x, lr.y)
return [ Line(ul, ur), Line(ur, lr), Line(lr, ll), Line(ll, ul) ] | segments () -> [ Line, Line, Line, Line ]
Return a list of Line segments that comprise this Rectangle. | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.