repo stringlengths 2 99 | file stringlengths 13 225 | code stringlengths 0 18.3M | file_length int64 0 18.3M | avg_line_length float64 0 1.36M | max_line_length int64 0 4.26M | extension_type stringclasses 1 value |
|---|---|---|---|---|---|---|
sarpy | sarpy-master/sarpy/io/product/sidd3_elements/DigitalElevationData.py | """
Types implementing the SIDD 3.0 DigitalElevationData Parameters
"""
__classification__ = 'UNCLASSIFIED'
# SIDD 3.0 reuses the SIDD 2.0 types. Make those symbols available in this module.
from sarpy.io.product.sidd2_elements.DigitalElevationData import (
GeographicCoordinatesType,
GeopositioningType,
AccuracyType,
PositionalAccuracyType,
DigitalElevationDataType,
)
__REUSED__ = ( # to avoid unused import lint errors
GeographicCoordinatesType,
GeopositioningType,
AccuracyType,
PositionalAccuracyType,
DigitalElevationDataType,
)
| 580 | 25.409091 | 83 | py |
sarpy | sarpy-master/sarpy/io/product/sidd3_elements/ExploitationFeatures.py | """
The ExploitationFeaturesType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
import datetime
from typing import Union, List
import numpy
from sarpy.io.xml.base import Serializable, ParametersCollection
from sarpy.io.xml.descriptors import SerializableDescriptor, ParametersDescriptor, \
FloatDescriptor, FloatModularDescriptor, StringDescriptor, StringRegexDescriptor, \
DateTimeDescriptor, SerializableListDescriptor
from sarpy.io.complex.sicd_elements.SICD import SICDType
from sarpy.geometry.geocoords import ecf_to_geodetic
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from sarpy.io.product.sidd2_elements.ExploitationFeatures import ExploitationCalculator as BaseExploitationCalculator
from sarpy.io.product.sidd2_elements.blocks import RowColDoubleType, RangeAzimuthType, \
RadarModeType
from .blocks import AngleZeroToExclusive360MagnitudeType
from sarpy.io.product.sidd2_elements.ExploitationFeatures import (
InputROIType,
)
__REUSED__ = (
InputROIType,
)
RCV_POLARIZATION_PATTERN = 'V|H|X|Y|S|E|RHC|LHC|UNKNOWN|OTHER[^:]*'
TX_POLARIZATION_PATTERN = RCV_POLARIZATION_PATTERN + '|SEQUENCE'
logger = logging.getLogger(__name__)
_sicd_type_text = 'Requires instance of SICDType,\n\tgot type `{}`'
_exp_calc_text = 'Requires input which is an instance of ExploitationCalculator,\n\tgot type `{}`'
class ExploitationCalculator(BaseExploitationCalculator):
@property
def Shadow(self):
# type: () -> AngleZeroToExclusive360MagnitudeType
"""
AngleMagnitudeType: The shadow angle and magnitude.
"""
shadow = self.ETP - self.slant_x/(self.slant_x.dot(self.ETP))
shadow_prime = shadow - (shadow.dot(self.normal_vector)/self.slant_z.dot(self.normal_vector))*self.slant_z
theta_shadow = numpy.rad2deg(numpy.arctan2(self.row_vector.dot(shadow_prime), self.col_vector.dot(shadow_prime)))
if theta_shadow < 0:
theta_shadow += 360.0
return AngleZeroToExclusive360MagnitudeType(
Angle=theta_shadow,
Magnitude=numpy.linalg.norm(shadow_prime))
@property
def Layover(self):
# type: () -> AngleZeroToExclusive360MagnitudeType
"""
AngleMagnitudeType: The layover angle and magnitude.
"""
layover = self.normal_vector - self.slant_z/(self.slant_z.dot(self.normal_vector))
theta_layover = numpy.rad2deg(numpy.arctan2(self.row_vector.dot(layover), self.col_vector.dot(layover)))
if theta_layover < 0:
theta_layover += 360.0
return AngleZeroToExclusive360MagnitudeType(
Angle=theta_layover,
Magnitude=numpy.linalg.norm(layover))
@property
def North(self):
"""Describes the clockwise angle from increasing column direction to north at the center of
the image. Angle range: [0, 360)
Returns
-------
float: The north angle.
"""
lat, lon, _ = ecf_to_geodetic(self.SCP)
lat_r = numpy.deg2rad(lat)
lon_r = numpy.deg2rad(lon)
north = numpy.array(
[-numpy.sin(lat_r)*numpy.cos(lon_r),
-numpy.sin(lat_r)*numpy.sin(lon_r),
numpy.cos(lat_r)])
north_prime = north - self.slant_z*(north.dot(self.normal_vector)/self.slant_z.dot(self.normal_vector))
theta_north = numpy.rad2deg(numpy.arctan2(self.row_vector.dot(north_prime), self.col_vector.dot(north_prime)))
if theta_north < 0:
theta_north = theta_north + 360.0
return theta_north
@property
def MultiPath(self):
"""
float: The multipath angle.
"""
multipath = self.slant_x - self.slant_z*(
self.slant_x.dot(self.normal_vector)/self.slant_z.dot(self.normal_vector))
theta_multipath = numpy.rad2deg(numpy.arctan2(self.row_vector.dot(multipath), self.col_vector.dot(multipath)))
if theta_multipath < 0:
theta_multipath += 360.0
return theta_multipath
@property
def GroundTrack(self):
"""
float: The ground track angle.
"""
track = self.ARPVel - (self.ARPVel.dot(self.normal_vector))*self.normal_vector
theta_track = numpy.rad2deg(numpy.arctan2(self.row_vector.dot(track), self.col_vector.dot(track)))
if theta_track < 0:
theta_track += 360.0
return theta_track
def _extract_sicd_tx_rcv_pol(str_in):
"""
Extract the tx and rcv components from the sicd style tx/rcv polarization string.
Parameters
----------
str_in : str
Returns
-------
str, str
"""
if str_in is None:
return 'UNKNOWN', 'UNKNOWN'
if not isinstance(str_in, str):
raise TypeError('requires a string type input.')
if str_in in ['OTHER', 'UNKNOWN']:
return 'UNKNOWN', 'UNKNOWN'
return str_in.split(':')
class TxRcvPolarizationType(Serializable):
"""
The transmit/receive polarization information.
"""
_fields = ('TxPolarization', 'RcvPolarization', 'RcvPolarizationOffset')
_required = ('TxPolarization', 'RcvPolarization')
_numeric_format = {'RcvPolarizationOffset': FLOAT_FORMAT}
# Descriptor
TxPolarization = StringRegexDescriptor(
'TxPolarization', TX_POLARIZATION_PATTERN, _required, strict=DEFAULT_STRICT,
docstring='Transmit polarization type.') # type: str
RcvPolarization = StringRegexDescriptor(
'RcvPolarization', RCV_POLARIZATION_PATTERN, _required, strict=DEFAULT_STRICT,
docstring='Receive polarization type.') # type: str
RcvPolarizationOffset = FloatModularDescriptor(
'RcvPolarizationOffset', 180.0, _required, strict=DEFAULT_STRICT,
docstring='Angle offset for the receive polarization defined at aperture center.') # type: float
def __init__(self, TxPolarization=None, RcvPolarization=None, RcvPolarizationOffset=None, **kwargs):
"""
Parameters
----------
TxPolarization : str
RcvPolarization : str
RcvPolarizationOffset : None|float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TxPolarization = TxPolarization
self.RcvPolarization = RcvPolarization
self.RcvPolarizationOffset = RcvPolarizationOffset
super(TxRcvPolarizationType, self).__init__(**kwargs)
@classmethod
def from_sicd_value(cls, str_in):
"""
Construct from the sicd style tx/rcv polarization string.
Parameters
----------
str_in : str
Returns
-------
TxRcvPolarizationType
"""
tx, rcv = _extract_sicd_tx_rcv_pol(str_in)
return cls(TxPolarization=tx, RcvPolarization=rcv)
class ProcTxRcvPolarizationType(Serializable):
"""
The processed transmit/receive polarization.
"""
_fields = ('TxPolarizationProc', 'RcvPolarizationProc')
_required = ('TxPolarizationProc', 'RcvPolarizationProc')
# Descriptor
TxPolarizationProc = StringRegexDescriptor(
'TxPolarizationProc', TX_POLARIZATION_PATTERN, _required, strict=DEFAULT_STRICT,
docstring='Transmit polarization type.') # type: str
RcvPolarizationProc = StringRegexDescriptor(
'RcvPolarizationProc', RCV_POLARIZATION_PATTERN, _required, strict=DEFAULT_STRICT,
docstring='Receive polarization type.') # type: str
def __init__(self, TxPolarizationProc=None, RcvPolarizationProc=None, **kwargs):
"""
Parameters
----------
TxPolarizationProc : str
RcvPolarizationProc : str
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TxPolarizationProc = TxPolarizationProc
self.RcvPolarizationProc = RcvPolarizationProc
super(ProcTxRcvPolarizationType, self).__init__(**kwargs)
@classmethod
def from_sicd_value(cls, str_in):
"""
Construct from the sicd style tx/rcv polarization string.
Parameters
----------
str_in : str
Returns
-------
ProcTxRcvPolarizationType
"""
tx, rcv = _extract_sicd_tx_rcv_pol(str_in)
return cls(TxPolarizationProc=tx, RcvPolarizationProc=rcv)
class ExploitationFeaturesCollectionInformationType(Serializable):
"""
General collection information.
"""
_fields = (
'SensorName', 'RadarMode', 'CollectionDateTime', 'LocalDateTime', 'CollectionDuration',
'Resolution', 'InputROI', 'Polarizations')
_required = ('SensorName', 'RadarMode', 'CollectionDateTime', 'CollectionDuration')
_collections_tags = {'Polarizations': {'array': False, 'child_tag': 'Polarization'}}
_numeric_format = {'CollectionDuration': FLOAT_FORMAT}
# Descriptor
SensorName = StringDescriptor(
'SensorName', _required, strict=DEFAULT_STRICT,
docstring='The name of the sensor.') # str
RadarMode = SerializableDescriptor(
'RadarMode', RadarModeType, _required, strict=DEFAULT_STRICT,
docstring='Radar collection mode.') # type: RadarModeType
CollectionDateTime = DateTimeDescriptor(
'CollectionDateTime', _required, strict=DEFAULT_STRICT,
docstring='Collection date and time defined in Coordinated Universal Time (UTC). The seconds '
'should be followed by a Z to indicate UTC.') # type: numpy.datetime64
CollectionDuration = FloatDescriptor(
'CollectionDuration', _required, strict=DEFAULT_STRICT,
docstring='The duration of the collection (units = seconds).') # type: float
Resolution = SerializableDescriptor(
'Resolution', RangeAzimuthType, _required, strict=DEFAULT_STRICT,
docstring='Uniformly-weighted resolution (range and azimuth) processed in '
'the slant plane.') # type: Union[None, RangeAzimuthType]
InputROI = SerializableDescriptor(
'InputROI', InputROIType, _required, strict=DEFAULT_STRICT,
docstring='ROI representing portion of input data used to make '
'this product.') # type: Union[None, InputROIType]
Polarizations = SerializableListDescriptor(
'Polarizations', TxRcvPolarizationType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Transmit and receive polarization(s).') # type: Union[None, List[TxRcvPolarizationType]]
def __init__(self, SensorName=None, RadarMode=None, CollectionDateTime=None, LocalDateTime=None,
CollectionDuration=None, Resolution=None, Polarizations=None, **kwargs):
"""
Parameters
----------
SensorName : str
RadarMode : RadarModeType
CollectionDateTime : numpy.datetime64|datetime.datetime|datetime.date|str
LocalDateTime : None|str|datetime.datetime
CollectionDuration : float
Resolution : None|RangeAzimuthType|numpy.ndarray|list|tuple
Polarizations : None|List[TxRcvPolarizationType]
kwargs
"""
self._local_date_time = None
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.SensorName = SensorName
self.RadarMode = RadarMode
self.CollectionDateTime = CollectionDateTime
self.CollectionDuration = CollectionDuration
self.LocalDateTime = LocalDateTime
self.Resolution = Resolution
self.Polarizations = Polarizations
super(ExploitationFeaturesCollectionInformationType, self).__init__(**kwargs)
@property
def LocalDateTime(self):
"""None|str: The local date/time string of the collection. *Optional.*"""
return self._local_date_time
@LocalDateTime.setter
def LocalDateTime(self, value): # type: (Union[None, str, datetime.datetime]) -> None
if value is None:
self._local_date_time = None
return
elif isinstance(value, datetime.datetime):
value = value.isoformat('T')
if isinstance(value, str):
self._local_date_time = value
else:
logger.error(
'Attribute LocalDateTime of class ExploitationFeaturesCollectionInformationType\n\t'
'requires a datetime.datetime or string. Got unsupported type {}.\n\t'
'Setting value to None.'.format(type(value)))
self._local_date_time = None
@classmethod
def from_sicd(cls, sicd):
"""
Construct from a sicd element.
Parameters
----------
sicd : SICDType
Returns
-------
ExploitationFeaturesCollectionInformationType
"""
if not isinstance(sicd, SICDType):
raise TypeError(_sicd_type_text.format(type(sicd)))
polarizations = [
TxRcvPolarizationType.from_sicd_value(entry.TxRcvPolarization)
for entry in sicd.RadarCollection.RcvChannels]
return cls(SensorName=sicd.CollectionInfo.CollectorName,
RadarMode=RadarModeType(**sicd.CollectionInfo.RadarMode.to_dict()),
CollectionDateTime=sicd.Timeline.CollectStart,
CollectionDuration=sicd.Timeline.CollectDuration,
Resolution=(sicd.Grid.Row.SS, sicd.Grid.Col.SS),
Polarizations=polarizations)
class ExploitationFeaturesCollectionGeometryType(Serializable):
"""
Key geometry parameters independent of product processing. All values
computed at the center time of the full collection.
"""
_fields = ('Azimuth', 'Slope', 'Squint', 'Graze', 'Tilt', 'DopplerConeAngle', 'Extensions')
_required = ()
_collections_tags = {'Extensions': {'array': False, 'child_tag': 'Extension'}}
_numeric_format = {
'Azimuth': FLOAT_FORMAT, 'Slope': FLOAT_FORMAT, 'Squint': FLOAT_FORMAT, 'Graze': FLOAT_FORMAT,
'Tilt': FLOAT_FORMAT, 'DopplerConeAngle': FLOAT_FORMAT}
# Descriptor
Azimuth = FloatDescriptor(
'Azimuth', _required, strict=DEFAULT_STRICT, bounds=(0.0, 360.0),
docstring='Angle clockwise from north indicating the ETP line of sight vector.') # type: float
Slope = FloatDescriptor(
'Slope', _required, strict=DEFAULT_STRICT, bounds=(0.0, 90.0),
docstring='Angle between the ETP at scene center and the range vector perpendicular to '
'the direction of motion.') # type: float
Squint = FloatModularDescriptor(
'Squint', 180.0, _required, strict=DEFAULT_STRICT,
docstring='Angle from the ground track to platform velocity vector at nadir. '
'Left-look is positive, right-look is negative.') # type: float
Graze = FloatDescriptor(
'Graze', _required, strict=DEFAULT_STRICT, bounds=(0.0, 90.0),
docstring='Angle between the ETP and the line of sight vector.') # type: float
Tilt = FloatModularDescriptor(
'Tilt', 180.0, _required, strict=DEFAULT_STRICT,
docstring='Angle between the ETP and the cross range vector. '
'Also known as the twist angle.') # type: float
DopplerConeAngle = FloatDescriptor(
'DopplerConeAngle', _required, strict=DEFAULT_STRICT, bounds=(0.0, 180.0),
docstring='The angle between the velocity vector and the radar line-of-sight vector. '
'Also known as the slant plane squint angle.') # type: float
Extensions = ParametersDescriptor(
'Extensions', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Exploitation feature extension related to geometry for a '
'single input image.') # type: ParametersCollection
def __init__(self, Azimuth=None, Slope=None, Squint=None, Graze=None, Tilt=None,
DopplerConeAngle=None, Extensions=None, **kwargs):
"""
Parameters
----------
Azimuth : None|float
Slope : None|float
Squint : None|float
Graze : None|float
Tilt : None|float
DopplerConeAngle : None|float
Extensions : None|ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Azimuth = Azimuth
self.Slope = Slope
self.Squint = Squint
self.Graze = Graze
self.Tilt = Tilt
self.DopplerConeAngle = DopplerConeAngle
self.Extensions = Extensions
super(ExploitationFeaturesCollectionGeometryType, self).__init__(**kwargs)
@classmethod
def from_calculator(cls, calculator):
"""
Create from an ExploitationCalculator object.
Parameters
----------
calculator : ExploitationCalculator
Returns
-------
ExploitationFeaturesCollectionGeometryType
"""
if not isinstance(calculator, ExploitationCalculator):
raise TypeError(_exp_calc_text.format(type(calculator)))
return cls(Azimuth=calculator.AzimuthAngle,
Slope=calculator.SlopeAngle,
Graze=calculator.SlopeAngle,
Tilt=calculator.TiltAngle,
DopplerConeAngle=calculator.DopplerConeAngle,
Squint=calculator.SquintAngle)
class ExploitationFeaturesCollectionPhenomenologyType(Serializable):
"""
Phenomenology related to both the geometry and the final product processing.
All values computed at the center time of the full collection.
"""
_fields = ('Shadow', 'Layover', 'MultiPath', 'GroundTrack', 'Extensions')
_required = ()
_collections_tags = {'Extensions': {'array': False, 'child_tag': 'Extension'}}
_numeric_format = {'MultiPath': FLOAT_FORMAT, 'GroundTrack': FLOAT_FORMAT}
# Descriptor
Shadow = SerializableDescriptor(
'Shadow', AngleZeroToExclusive360MagnitudeType, _required, strict=DEFAULT_STRICT,
docstring='The phenomenon where vertical objects occlude radar '
'energy.') # type: Union[None, AngleZeroToExclusive360MagnitudeType]
Layover = SerializableDescriptor(
'Layover', AngleZeroToExclusive360MagnitudeType, _required, strict=DEFAULT_STRICT,
docstring='The phenomenon where vertical objects appear as ground objects with '
'the same range/range rate.') # type: Union[None, AngleZeroToExclusive360MagnitudeType]
MultiPath = FloatDescriptor(
'MultiPath', _required, strict=DEFAULT_STRICT, bounds=(0.0, 360.0),
docstring='This is a range dependent phenomenon which describes the energy from a '
'single scatter returned to the radar via more than one path and results '
'in a nominally constant direction in the ETP.') # type: Union[None, float]
GroundTrack = FloatDescriptor(
'GroundTrack', _required, strict=DEFAULT_STRICT, bounds=(0.0, 360.0),
docstring='Counter-clockwise angle from increasing column direction to ground track '
'at the center of the image.') # type: Union[None, float]
Extensions = ParametersDescriptor(
'Extensions', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Exploitation feature extension related to geometry for a '
'single input image.') # type: ParametersCollection
def __init__(self, Shadow=None, Layover=None, MultiPath=None, GroundTrack=None, Extensions=None, **kwargs):
"""
Parameters
----------
Shadow : None|AngleZeroToExclusive360MagnitudeType|numpy.ndarray|list|tuple
Layover : None|AngleZeroToExclusive360MagnitudeType|numpy.ndarray|list|tuple
MultiPath : None|float
GroundTrack : None|float
Extensions : None|ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Shadow = Shadow
self.Layover = Layover
self.MultiPath = MultiPath
self.GroundTrack = GroundTrack
self.Extensions = Extensions
super(ExploitationFeaturesCollectionPhenomenologyType, self).__init__(**kwargs)
@classmethod
def from_calculator(cls, calculator):
"""
Create from an ExploitationCalculator object.
Parameters
----------
calculator : ExploitationCalculator
Returns
-------
ExploitationFeaturesCollectionPhenomenologyType
"""
if not isinstance(calculator, ExploitationCalculator):
raise TypeError(_exp_calc_text.format(type(calculator)))
return cls(Shadow=calculator.Shadow, Layover=calculator.Layover,
MultiPath=calculator.MultiPath, GroundTrack=calculator.GroundTrack)
class CollectionType(Serializable):
"""
Metadata regarding one of the input collections.
"""
_fields = ('Information', 'Geometry', 'Phenomenology', 'identifier')
_required = ('Information', 'identifier')
_set_as_attribute = ('identifier', )
# Descriptor
Information = SerializableDescriptor(
'Information', ExploitationFeaturesCollectionInformationType, _required, strict=DEFAULT_STRICT,
docstring='General collection information.') # type: ExploitationFeaturesCollectionInformationType
Geometry = SerializableDescriptor(
'Geometry', ExploitationFeaturesCollectionGeometryType, _required, strict=DEFAULT_STRICT,
docstring='Key geometry parameters independent of product '
'processing.') # type: Union[None, ExploitationFeaturesCollectionGeometryType]
Phenomenology = SerializableDescriptor(
'Phenomenology', ExploitationFeaturesCollectionPhenomenologyType, _required, strict=DEFAULT_STRICT,
docstring='Phenomenology related to both the geometry and the final '
'product processing.') # type: Union[None, ExploitationFeaturesCollectionPhenomenologyType]
identifier = StringDescriptor(
'identifier', _required, strict=DEFAULT_STRICT,
docstring='The exploitation feature identifier.') # type: str
def __init__(self, Information=None, Geometry=None, Phenomenology=None, identifier=None, **kwargs):
"""
Parameters
----------
Information : ExploitationFeaturesCollectionInformationType
Geometry : None|ExploitationFeaturesCollectionGeometryType
Phenomenology : None|ExploitationFeaturesCollectionPhenomenologyType
identifier : str
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Information = Information
self.Geometry = Geometry
self.Phenomenology = Phenomenology
self.identifier = identifier
super(CollectionType, self).__init__(**kwargs)
@classmethod
def from_calculator(cls, calculator, sicd):
"""
Create from an ExploitationCalculator object.
Parameters
----------
calculator : ExploitationCalculator
sicd : SICDType
Returns
-------
CollectionType
"""
if not isinstance(calculator, ExploitationCalculator):
raise TypeError(_exp_calc_text.format(type(calculator)))
return cls(identifier=sicd.CollectionInfo.CoreName,
Information=ExploitationFeaturesCollectionInformationType.from_sicd(sicd),
Geometry=ExploitationFeaturesCollectionGeometryType.from_calculator(calculator),
Phenomenology=ExploitationFeaturesCollectionPhenomenologyType.from_calculator(calculator))
class ExploitationFeaturesProductType(Serializable):
"""
Metadata regarding the product.
"""
_fields = ('Resolution', 'Ellipticity', 'Polarizations', 'North', 'Extensions')
_required = ('Resolution', 'Ellipticity', 'Polarizations')
_collections_tags = {
'Polarizations': {'array': False, 'child_tag': 'Polarization'},
'Extensions': {'array': False, 'child_tag': 'Extension'}}
_numeric_format = {'Ellipticity': FLOAT_FORMAT, 'North': FLOAT_FORMAT}
# Descriptor
Resolution = SerializableDescriptor(
'Resolution', RowColDoubleType, _required, strict=DEFAULT_STRICT,
docstring='Uniformly-weighted resolution projected into the Earth Tangent '
'Plane (ETP).') # type: RowColDoubleType
Ellipticity = FloatDescriptor(
'Ellipticity', _required, strict=DEFAULT_STRICT,
docstring="Ellipticity of the 2D-IPR at the ORP, measured in the *Earth Geodetic "
"Tangent Plane (EGTP)*. Ellipticity is the ratio of the IPR ellipse's "
"major axis to minor axis.") # type: float
Polarizations = SerializableListDescriptor(
'Polarizations', ProcTxRcvPolarizationType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Describes the processed transmit and receive polarizations for the '
'product.') # type: List[ProcTxRcvPolarizationType]
North = FloatDescriptor(
'North', _required, strict=DEFAULT_STRICT, bounds=(0.0, 360.0),
docstring='Clockwise angle from increasing column direction to north at the center'
'of the image.') # type: float
Extensions = ParametersDescriptor(
'Extensions', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Exploitation feature extension related to geometry for a '
'single input image.') # type: ParametersCollection
def __init__(self, Resolution=None, Ellipticity=None, Polarizations=None,
North=None, Extensions=None, **kwargs):
"""
Parameters
----------
Resolution : RowColDoubleType|numpy.ndarray|list|tuple
Ellipticity : float
Polarizations : List[ProcTxRcvPolarizationType]
North : None|float
Extensions : None|ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Resolution = Resolution
self.Ellipticity = Ellipticity
self.Polarizations = Polarizations
self.North = North
self.Extensions = Extensions
super(ExploitationFeaturesProductType, self).__init__(**kwargs)
@classmethod
def from_calculator(cls, calculator, sicd):
"""
Construct from a sicd element.
Parameters
----------
calculator : ExploitationCalculator
sicd : SICDType
Returns
-------
ExploitationFeaturesProductType
"""
if not isinstance(sicd, SICDType):
raise TypeError(_sicd_type_text.format(type(sicd)))
row_ground, col_ground = sicd.get_ground_resolution()
ellipticity = row_ground/col_ground if row_ground >= col_ground else col_ground/row_ground
return cls(Resolution=(row_ground, col_ground),
Ellipticity=ellipticity,
Polarizations=[
ProcTxRcvPolarizationType.from_sicd_value(sicd.ImageFormation.TxRcvPolarizationProc), ],
North=calculator.North)
class ExploitationFeaturesType(Serializable):
"""
Computed metadata regarding the collect.
"""
_fields = ('Collections', 'Products')
_required = ('Collections', 'Products')
_collections_tags = {
'Collections': {'array': False, 'child_tag': 'Collection'},
'Products': {'array': False, 'child_tag': 'Product'}}
# Descriptor
Collections = SerializableListDescriptor(
'Collections', CollectionType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='') # type: List[CollectionType]
Products = SerializableListDescriptor(
'Products', ExploitationFeaturesProductType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='') # type: List[ExploitationFeaturesProductType]
def __init__(self, Collections=None, Products=None, **kwargs):
"""
Parameters
----------
Collections : List[CollectionType]
Products : List[ExploitationFeaturesProductType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Collections = Collections
self.Products = Products
super(ExploitationFeaturesType, self).__init__(**kwargs)
@classmethod
def from_sicd(cls, sicd, row_vector, col_vector):
"""
Construct from a sicd element.
Parameters
----------
sicd : SICDType|List[SICDType]
row_vector : numpy.ndarray
col_vector : numpy.ndarray
Returns
-------
ExploitationFeaturesType
"""
if isinstance(sicd, (list, tuple)):
collections = []
feats = []
for i, entry in sicd:
calculator = ExploitationCalculator.from_sicd(entry, row_vector, col_vector)
collections.append(CollectionType.from_calculator(calculator, entry))
feats.append(ExploitationFeaturesProductType.from_calculator(calculator, entry))
return cls(Collections=collections, Products=feats)
if not isinstance(sicd, SICDType):
raise TypeError(_sicd_type_text.format(type(sicd)))
calculator = ExploitationCalculator.from_sicd(sicd, row_vector, col_vector)
return cls(
Collections=[CollectionType.from_calculator(calculator, sicd), ],
Products=[ExploitationFeaturesProductType.from_calculator(calculator, sicd)])
| 30,387 | 38.362694 | 121 | py |
sarpy | sarpy-master/sarpy/io/product/sidd3_elements/SIDD.py | """
The SIDDType 3.0 definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
from typing import Union, Tuple
from collections import OrderedDict
from copy import deepcopy
import numpy
from sarpy.io.xml.base import Serializable, parse_xml_from_string, parse_xml_from_file
from sarpy.io.xml.descriptors import SerializableDescriptor
from sarpy.geometry import point_projection
from sarpy.io.product.sidd_schema import get_specification_identifier, \
get_urn_details, validate_xml_ns
from .base import DEFAULT_STRICT
from .ProductCreation import ProductCreationType
from .Display import ProductDisplayType
from .GeoData import GeoDataType
from .Measurement import MeasurementType
from .ExploitationFeatures import ExploitationFeaturesType
from .DownstreamReprocessing import DownstreamReprocessingType
from .Compression import CompressionType
from .DigitalElevationData import DigitalElevationDataType
from .ProductProcessing import ProductProcessingType
from ..sidd1_elements.SIDD import SIDDType as SIDDType1
from ..sidd2_elements.SIDD import SIDDType as SIDDType2
from .blocks import ErrorStatisticsType, RadiometricType, MatchInfoType
logger = logging.getLogger(__name__)
############
# namespace validate and definition of required entries in the namespace dictionary
_SIDD_SPECIFICATION_IDENTIFIER = get_specification_identifier()
_SIDD_URN = 'urn:SIDD:3.0.0'
_sidd_details = get_urn_details(_SIDD_URN)
_SIDD_SPECIFICATION_VERSION = _sidd_details['version']
_SIDD_SPECIFICATION_DATE = _sidd_details['date']
_ISM_URN = _sidd_details['ism_urn']
_SFA_URN = _sidd_details['sfa_urn']
_SICOMMON_URN = _sidd_details['sicommon_urn']
##########
# The SIDD object
class SIDDType(Serializable):
"""
The root element of the SIDD 3.0 document.
"""
_fields = (
'ProductCreation', 'Display', 'GeoData', 'Measurement', 'ExploitationFeatures',
'DownstreamReprocessing', 'ErrorStatistics', 'Radiometric', 'MatchInfo', 'Compression',
'DigitalElevationData', 'ProductProcessing')
_required = (
'ProductCreation', 'Display', 'GeoData', 'Measurement', 'ExploitationFeatures')
# Descriptor
ProductCreation = SerializableDescriptor(
'ProductCreation', ProductCreationType, _required, strict=DEFAULT_STRICT,
docstring='Information related to processor, classification, and product type.') # type: ProductCreationType
Display = SerializableDescriptor(
'Display', ProductDisplayType, _required, strict=DEFAULT_STRICT,
docstring='Contains information on the parameters needed to display the product in '
'an exploitation tool.') # type: ProductDisplayType
GeoData = SerializableDescriptor(
'GeoData', GeoDataType, _required, strict=DEFAULT_STRICT,
docstring='Contains generic and extensible targeting and geographic region '
'information.') # type: GeoDataType
Measurement = SerializableDescriptor(
'Measurement', MeasurementType, _required, strict=DEFAULT_STRICT,
docstring='Contains the metadata necessary for performing measurements.') # type: MeasurementType
ExploitationFeatures = SerializableDescriptor(
'ExploitationFeatures', ExploitationFeaturesType, _required, strict=DEFAULT_STRICT,
docstring='Computed metadata regarding the input collections and '
'final product.') # type: ExploitationFeaturesType
DownstreamReprocessing = SerializableDescriptor(
'DownstreamReprocessing', DownstreamReprocessingType, _required, strict=DEFAULT_STRICT,
docstring='Metadata describing any downstream processing of the '
'product.') # type: Union[None, DownstreamReprocessingType]
ErrorStatistics = SerializableDescriptor(
'ErrorStatistics', ErrorStatisticsType, _required, strict=DEFAULT_STRICT,
docstring='Error statistics passed through from the SICD metadata.') # type: Union[None, ErrorStatisticsType]
Radiometric = SerializableDescriptor(
'Radiometric', RadiometricType, _required, strict=DEFAULT_STRICT,
docstring='Radiometric information about the product.') # type: Union[None, RadiometricType]
MatchInfo = SerializableDescriptor(
'MatchInfo', MatchInfoType, _required, strict=DEFAULT_STRICT,
docstring='Information about other collections that are matched to the current '
'collection. The current collection is the collection from which this '
'SIDD product was generated.') # type: MatchInfoType
Compression = SerializableDescriptor(
'Compression', CompressionType, _required, strict=DEFAULT_STRICT,
docstring='Contains information regarding any compression that has occurred '
'to the image data.') # type: CompressionType
DigitalElevationData = SerializableDescriptor(
'DigitalElevationData', DigitalElevationDataType, _required, strict=DEFAULT_STRICT,
docstring='This describes any Digital ElevationData included with '
'the SIDD product.') # type: DigitalElevationDataType
ProductProcessing = SerializableDescriptor(
'ProductProcessing', ProductProcessingType, _required, strict=DEFAULT_STRICT,
docstring='Contains metadata related to algorithms used during '
'product generation.') # type: ProductProcessingType
# Annotations omitted due to incomplete definition in NGA.STND.0025
def __init__(self, ProductCreation=None, Display=None, GeoData=None,
Measurement=None, ExploitationFeatures=None, DownstreamReprocessing=None,
ErrorStatistics=None, Radiometric=None, MatchInfo=None, Compression=None,
DigitalElevationData=None, ProductProcessing=None, **kwargs):
"""
Parameters
----------
ProductCreation : ProductCreationType
Display : ProductDisplayType
GeoData : GeoDataType
Measurement : MeasurementType
ExploitationFeatures : ExploitationFeaturesType
DownstreamReprocessing : None|DownstreamReprocessingType
ErrorStatistics : None|ErrorStatisticsType
Radiometric : None|RadiometricType
MatchInfo : None|MatchInfoType
Compression : None|CompressionType
DigitalElevationData : None|DigitalElevationDataType
ProductProcessing : None|ProductProcessingType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
nitf = kwargs.get('_NITF', {})
if not isinstance(nitf, dict):
raise TypeError('Provided NITF options are required to be in dictionary form.')
self._NITF = nitf
self._coa_projection = None
self.ProductCreation = ProductCreation
self.Display = Display
self.GeoData = GeoData
self.Measurement = Measurement
self.ExploitationFeatures = ExploitationFeatures
self.DownstreamReprocessing = DownstreamReprocessing
self.ErrorStatistics = ErrorStatistics
self.Radiometric = Radiometric
self.MatchInfo = MatchInfo
self.Compression = Compression
self.DigitalElevationData = DigitalElevationData
self.ProductProcessing = ProductProcessing
super(SIDDType, self).__init__(**kwargs)
@property
def coa_projection(self):
"""
The COA Projection object, if previously defined through using :func:`define_coa_projection`.
Returns
-------
None|sarpy.geometry.point_projection.COAProjection
"""
return self._coa_projection
@property
def NITF(self):
"""
Optional dictionary of NITF header information, pertains only to subsequent
SIDD file writing.
Returns
-------
Dict
"""
return self._NITF
def can_project_coordinates(self):
"""
Determines whether the necessary elements are populated to permit projection
between image and physical coordinates. If False, then the (first discovered)
reason why not will be logged at error level.
Returns
-------
bool
"""
if self._coa_projection is not None:
return True
if self.Measurement.ProjectionType != 'PlaneProjection':
logger.error(
'Formulating a projection is only supported for PlaneProjection, '
'got {}.'.format(self.Measurement.ProjectionType))
return False
return True
def define_coa_projection(self, delta_arp=None, delta_varp=None, range_bias=None,
adj_params_frame='ECF', override=True):
"""
Define the COAProjection object.
Parameters
----------
delta_arp : None|numpy.ndarray|list|tuple
ARP position adjustable parameter (ECF, m). Defaults to 0 in each coordinate.
delta_varp : None|numpy.ndarray|list|tuple
VARP position adjustable parameter (ECF, m/s). Defaults to 0 in each coordinate.
range_bias : float|int
Range bias adjustable parameter (m), defaults to 0.
adj_params_frame : str
One of ['ECF', 'RIC_ECF', 'RIC_ECI'], specifying the coordinate frame used for
expressing `delta_arp` and `delta_varp` parameters.
override : bool
should we redefine, if it is previously defined?
Returns
-------
None
"""
if not self.can_project_coordinates():
logger.error('The COAProjection object cannot be defined.')
return
if self._coa_projection is not None and not override:
return
self._coa_projection = point_projection.COAProjection.from_sidd(
self, delta_arp=delta_arp, delta_varp=delta_varp, range_bias=range_bias,
adj_params_frame=adj_params_frame)
def project_ground_to_image(self, coords, **kwargs):
"""
Transforms a 3D ECF point to pixel (row/column) coordinates. This is
implemented in accordance with the SICD Image Projections Description Document.
**Really Scene-To-Image projection.**"
Parameters
----------
coords : numpy.ndarray|tuple|list
ECF coordinate to map to scene coordinates, of size `N x 3`.
kwargs
The keyword arguments for the :func:`sarpy.geometry.point_projection.ground_to_image` method.
Returns
-------
Tuple[numpy.ndarray, float, int]
* `image_points` - the determined image point array, of size `N x 2`. Following
the SICD convention, he upper-left pixel is [0, 0].
* `delta_gpn` - residual ground plane displacement (m).
* `iterations` - the number of iterations performed.
See Also
--------
sarpy.geometry.point_projection.ground_to_image
"""
if 'use_structure_coa' not in kwargs:
kwargs['use_structure_coa'] = True
return point_projection.ground_to_image(coords, self, **kwargs)
def project_ground_to_image_geo(self, coords, ordering='latlong', **kwargs):
"""
Transforms a 3D Lat/Lon/HAE point to pixel (row/column) coordinates. This is
implemented in accordance with the SICD Image Projections Description Document.
**Really Scene-To-Image projection.**"
Parameters
----------
coords : numpy.ndarray|tuple|list
ECF coordinate to map to scene coordinates, of size `N x 3`.
ordering : str
If 'longlat', then the input is `[longitude, latitude, hae]`.
Otherwise, the input is `[latitude, longitude, hae]`. Passed through
to :func:`sarpy.geometry.geocoords.geodetic_to_ecf`.
kwargs
The keyword arguments for the :func:`sarpy.geometry.point_projection.ground_to_image_geo` method.
Returns
-------
Tuple[numpy.ndarray, float, int]
* `image_points` - the determined image point array, of size `N x 2`. Following
the SICD convention, he upper-left pixel is [0, 0].
* `delta_gpn` - residual ground plane displacement (m).
* `iterations` - the number of iterations performed.
See Also
--------
sarpy.geometry.point_projection.ground_to_image_geo
"""
if 'use_structure_coa' not in kwargs:
kwargs['use_structure_coa'] = True
return point_projection.ground_to_image_geo(coords, self, ordering=ordering, **kwargs)
def project_image_to_ground(self, im_points, projection_type='HAE', **kwargs):
"""
Transforms image coordinates to ground plane ECF coordinate via the algorithm(s)
described in SICD Image Projections document.
Parameters
----------
im_points : numpy.ndarray|list|tuple
the image coordinate array
projection_type : str
One of `['PLANE', 'HAE', 'DEM']`. Type `DEM` is a work in progress.
kwargs
The keyword arguments for the :func:`sarpy.geometry.point_projection.image_to_ground` method.
Returns
-------
numpy.ndarray
Ground Plane Point (in ECF coordinates) corresponding to the input image coordinates.
See Also
--------
sarpy.geometry.point_projection.image_to_ground
"""
if 'use_structure_coa' not in kwargs:
kwargs['use_structure_coa'] = True
return point_projection.image_to_ground(
im_points, self, projection_type=projection_type, **kwargs)
def project_image_to_ground_geo(self, im_points, ordering='latlong', projection_type='HAE', **kwargs):
"""
Transforms image coordinates to ground plane WGS-84 coordinate via the algorithm(s)
described in SICD Image Projections document.
Parameters
----------
im_points : numpy.ndarray|list|tuple
the image coordinate array
projection_type : str
One of `['PLANE', 'HAE', 'DEM']`. Type `DEM` is a work in progress.
ordering : str
Determines whether return is ordered as `[lat, long, hae]` or `[long, lat, hae]`.
Passed through to :func:`sarpy.geometry.geocoords.ecf_to_geodetic`.
kwargs
The keyword arguments for the :func:`sarpy.geometry.point_projection.image_to_ground_geo` method.
Returns
-------
numpy.ndarray
Ground Plane Point (in ECF coordinates) corresponding to the input image coordinates.
See Also
--------
sarpy.geometry.point_projection.image_to_ground_geo
"""
if 'use_structure_coa' not in kwargs:
kwargs['use_structure_coa'] = True
return point_projection.image_to_ground_geo(
im_points, self, ordering=ordering, projection_type=projection_type, **kwargs)
@staticmethod
def get_xmlns_collection():
"""
Gets the correct SIDD 3.0 dictionary of xml namespace details.
Returns
-------
dict
"""
return OrderedDict([
('xmlns', _SIDD_URN), ('xmlns:sicommon', _SICOMMON_URN),
('xmlns:sfa', _SFA_URN), ('xmlns:ism', _ISM_URN)])
@staticmethod
def get_des_details():
"""
Gets the correct SIDD 3.0 DES subheader details.
Returns
-------
dict
"""
return OrderedDict([
('DESSHSI', _SIDD_SPECIFICATION_IDENTIFIER),
('DESSHSV', _SIDD_SPECIFICATION_VERSION),
('DESSHSD', _SIDD_SPECIFICATION_DATE),
('DESSHTN', _SIDD_URN)])
@classmethod
def from_node(cls, node, xml_ns, ns_key='default', kwargs=None):
if ns_key is None:
raise ValueError('ns_key must be defined.')
if ns_key not in xml_ns:
raise ValueError('ns_key {} is not in the xml namespace'.format(ns_key))
if xml_ns[ns_key].startswith('urn:SIDD:1.'):
return SIDDType1.from_node(node, xml_ns, ns_key=ns_key, kwargs=kwargs)
elif xml_ns[ns_key].startswith('urn:SIDD:2.'):
return SIDDType2.from_node(node, xml_ns, ns_key=ns_key, kwargs=kwargs)
valid_ns = validate_xml_ns(xml_ns, ns_key)
if not xml_ns[ns_key].startswith('urn:SIDD:3.'):
raise ValueError('Cannot use urn {} for SIDD version 3.0'.format(xml_ns[ns_key]))
if not valid_ns:
logger.warning(
'SIDD namespace validation failed,\n\t'
'which may lead to subsequent deserialization failures')
return super(SIDDType, cls).from_node(node, xml_ns, ns_key=ns_key, kwargs=kwargs)
def to_xml_bytes(self, urn=None, tag='SIDD', check_validity=False, strict=DEFAULT_STRICT):
if urn is None:
urn = self.get_xmlns_collection()
return super(SIDDType, self).to_xml_bytes(urn=urn, tag=tag, check_validity=check_validity, strict=strict)
def to_xml_string(self, urn=None, tag='SIDD', check_validity=False, strict=DEFAULT_STRICT):
return self.to_xml_bytes(urn=urn, tag=tag, check_validity=check_validity, strict=strict).decode('utf-8')
def copy(self):
"""
Provides a deep copy.
Returns
-------
SIDDType
"""
out = super(SIDDType, self).copy()
out._NITF = deepcopy(self._NITF)
return out
@classmethod
def from_xml_file(cls, file_path):
"""
Construct the sidd object from a stand-alone xml file path.
Parameters
----------
file_path : str
Returns
-------
SIDDType
"""
root_node, xml_ns = parse_xml_from_file(file_path)
ns_key = 'default' if 'default' in xml_ns else None
return cls.from_node(root_node, xml_ns=xml_ns, ns_key=ns_key)
@classmethod
def from_xml_string(cls, xml_string):
"""
Construct the sidd object from a xml string.
Parameters
----------
xml_string : str|bytes
Returns
-------
SIDDType
"""
root_node, xml_ns = parse_xml_from_string(xml_string)
ns_key = 'default' if 'default' in xml_ns else None
return cls.from_node(root_node, xml_ns=xml_ns, ns_key=ns_key)
| 18,567 | 38.338983 | 118 | py |
sarpy | sarpy-master/sarpy/io/product/sidd3_elements/ProductCreation.py | """
Types implementing the SIDD 3.0 Product Creation Parameters
"""
__classification__ = 'UNCLASSIFIED'
# SIDD 3.0 reuses the SIDD 2.0 types. Make those symbols available in this module.
from sarpy.io.product.sidd2_elements.ProductCreation import (
ProcessorInformationType,
ProductClassificationType,
ProductCreationType,
)
__REUSED__ = ( # to avoid unused import lint errors
ProcessorInformationType,
ProductClassificationType,
ProductCreationType,
)
| 481 | 25.777778 | 83 | py |
sarpy | sarpy-master/sarpy/io/product/sidd3_elements/ProductProcessing.py | """
Types implementing the SIDD 3.0 ProductProcessing Parameters
"""
__classification__ = 'UNCLASSIFIED'
# SIDD 3.0 reuses the SIDD 2.0 types. Make those symbols available in this module.
from sarpy.io.product.sidd2_elements.ProductProcessing import (
ProcessingModuleType,
ProductProcessingType,
)
__REUSED__ = ( # to avoid unused import lint errors
ProcessingModuleType,
ProductProcessingType,
)
| 418 | 25.1875 | 83 | py |
sarpy | sarpy-master/sarpy/io/product/sidd3_elements/__init__.py |
__classification__ = 'UNCLASSIFIED'
| 37 | 11.666667 | 35 | py |
sarpy | sarpy-master/sarpy/io/product/sidd3_elements/GeoData.py | """
Types implementing the SIDD 3.0 GeoData Parameters
"""
__classification__ = 'UNCLASSIFIED'
# SIDD 3.0 reuses the SIDD 2.0 types. Make those symbols available in this module.
from sarpy.io.product.sidd2_elements.GeoData import GeoDataType
__REUSED__ = ( # to avoid unused import lint errors
GeoDataType,
)
| 317 | 25.5 | 83 | py |
sarpy | sarpy-master/sarpy/io/product/sidd3_elements/Display.py | """
Types implementing the SIDD 3.0 Display Parameters
"""
__classification__ = 'UNCLASSIFIED'
# SIDD 3.0 reuses the SIDD 2.0 types. Make those symbols available in this module.
from sarpy.io.product.sidd2_elements.Display import (
BandLUTType,
BandLUTArray,
BandEqualizationType,
ProductGenerationOptionsType,
RRDSType,
NonInteractiveProcessingType,
ScalingType,
OrientationType,
GeometricTransformType,
SharpnessEnhancementType,
ColorManagementModuleType,
ColorSpaceTransformType,
DRAParametersType,
DRAOverridesType,
DynamicRangeAdjustmentType,
InteractiveProcessingType,
ProductDisplayType,
)
__REUSED__ = ( # to avoid unused import lint errors
BandLUTType,
BandLUTArray,
BandEqualizationType,
ProductGenerationOptionsType,
RRDSType,
NonInteractiveProcessingType,
ScalingType,
OrientationType,
GeometricTransformType,
SharpnessEnhancementType,
ColorManagementModuleType,
ColorSpaceTransformType,
DRAParametersType,
DRAOverridesType,
DynamicRangeAdjustmentType,
InteractiveProcessingType,
ProductDisplayType,
)
| 1,154 | 24.108696 | 83 | py |
sarpy | sarpy-master/sarpy/io/product/sidd3_elements/DownstreamReprocessing.py | """
Types implementing the SIDD 3.0 DownstreamReprocessing Parameters
"""
__classification__ = 'UNCLASSIFIED'
# SIDD 3.0 reuses the SIDD 2.0 types. Make those symbols available in this module.
from sarpy.io.product.sidd2_elements.DownstreamReprocessing import (
GeometricChipType,
ProcessingEventType,
DownstreamReprocessingType,
)
__REUSED__ = ( # to avoid unused import lint errors
GeometricChipType,
ProcessingEventType,
DownstreamReprocessingType,
)
| 482 | 25.833333 | 83 | py |
sarpy | sarpy-master/sarpy/io/product/sidd3_elements/blocks.py | """
Multipurpose basic SIDD elements
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, Optional
import numpy
from sarpy.io.xml.base import Serializable, Arrayable
from sarpy.io.xml.descriptors import FloatDescriptor, StringEnumDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT
import sarpy.io.complex.sicd_elements.Radiometric as SicdRadiometric
# Reuse SIDD 2.0 types and make them available here
from sarpy.io.product.sidd2_elements.blocks import (
ErrorStatisticsType,
FilterType,
FilterBankType,
MatchInfoType,
NewLookupTableType,
PredefinedFilterType,
PredefinedLookupType,
Poly2DType,
ReferencePointType,
XYZPolyType,
XYZType,
)
__REUSED__ = ( # to avoid unused imports
ErrorStatisticsType,
FilterType,
FilterBankType,
MatchInfoType,
NewLookupTableType,
PredefinedFilterType,
PredefinedLookupType,
Poly2DType,
ReferencePointType,
XYZPolyType,
XYZType,
)
_len2_array_text = 'Expected array to be of length 2,\n\t' \
'and received `{}`'
_array_type_text = 'Expected array to be numpy.ndarray, list, or tuple,\n\t' \
'got `{}`'
############
# the SICommon namespace elements
class AngleZeroToExclusive360MagnitudeType(Serializable, Arrayable):
"""
Represents a magnitude and angle.
"""
_fields = ('Angle', 'Magnitude')
_required = ('Angle', 'Magnitude')
_numeric_format = {key: FLOAT_FORMAT for key in _fields}
_child_xml_ns_key = {'Angle': 'sicommon', 'Magnitude': 'sicommon'}
# Descriptor
Angle = FloatDescriptor(
'Angle', _required, strict=DEFAULT_STRICT, bounds=(0.0, 360),
docstring='The angle.') # type: float
Magnitude = FloatDescriptor(
'Magnitude', _required, strict=DEFAULT_STRICT, bounds=(0.0, None),
docstring='The magnitude.') # type: float
def __init__(self, Angle=None, Magnitude=None, **kwargs):
"""
Parameters
----------
Angle : float
Magnitude : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Angle = Angle
self.Magnitude = Magnitude
super(AngleZeroToExclusive360MagnitudeType, self).__init__(**kwargs)
def get_array(self, dtype=numpy.float64):
"""
Gets an array representation of the class instance.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return
Returns
-------
numpy.ndarray
array of the form [Angle, Magnitude]
"""
return numpy.array([self.Angle, self.Magnitude], dtype=dtype)
@classmethod
def from_array(cls, array):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed [Angle, Magnitude]
Returns
-------
AngleZeroToExclusive360MagnitudeType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError(_len2_array_text.format(array))
return cls(Angle=array[0], Magnitude=array[1])
raise ValueError(_array_type_text.format(type(array)))
# SIDD 3.0 Radiometric is the same as SICD, but with an added field
class RadiometricType(SicdRadiometric.RadiometricType):
_fields = ('NoiseLevel', 'RCSSFPoly', 'SigmaZeroSFPoly', 'BetaZeroSFPoly', 'SigmaZeroSFIncidenceMap',
'GammaZeroSFPoly')
_child_xml_ns_key = {
'NoiseLevel': 'sicommon', 'RCSSFPoly': 'sicommon', 'SigmaZeroSFPoly': 'sicommon',
'BetaZeroSFPoly': 'sicommon', 'SigmaZeroSFIncidenceMap': 'sicommon', 'GammaZeroSFPoly': 'sicommon'}
_SIGMA_ZERO_SF_INCIDENCE_MAP_VALUES = ('APPLIED', 'NOT_APPLIED')
# descriptors
SigmaZeroSFIncidenceMap = StringEnumDescriptor(
'SigmaZeroSFIncidenceMap', _SIGMA_ZERO_SF_INCIDENCE_MAP_VALUES,
SicdRadiometric.RadiometricType._required, strict=DEFAULT_STRICT,
docstring='Allowed Values: “APPLIED” or “NOT_APPLIED”') # type: str
def __init__(
self,
NoiseLevel: Optional[SicdRadiometric.NoiseLevelType_] = None,
RCSSFPoly: Union[None, Poly2DType, numpy.ndarray, list, tuple] = None,
SigmaZeroSFPoly: Union[None, Poly2DType, numpy.ndarray, list, tuple] = None,
BetaZeroSFPoly: Union[None, Poly2DType, numpy.ndarray, list, tuple] = None,
GammaZeroSFPoly: Union[None, Poly2DType, numpy.ndarray, list, tuple] = None,
SigmaZeroSFIncidenceMap: str = None,
**kwargs):
"""
Parameters
----------
NoiseLevel : NoiseLevelType_
RCSSFPoly : Poly2DType|numpy.ndarray|list|tuple
SigmaZeroSFPoly : Poly2DType|numpy.ndarray|list|tuple
BetaZeroSFPoly : Poly2DType|numpy.ndarray|list|tuple
GammaZeroSFPoly : Poly2DType|numpy.ndarray|list|tuple
SigmaZeroSFIncidenceMap : str
kwargs
"""
self.SigmaZeroSFIncidenceMap = SigmaZeroSFIncidenceMap
super().__init__(NoiseLevel=NoiseLevel,
RCSSFPoly=RCSSFPoly,
SigmaZeroSFPoly=SigmaZeroSFPoly,
BetaZeroSFPoly=BetaZeroSFPoly,
GammaZeroSFPoly=GammaZeroSFPoly,
**kwargs)
# The end of the SICommon namespace
#####################
| 5,698 | 30.661111 | 107 | py |
sarpy | sarpy-master/sarpy/io/product/sidd1_elements/Measurement.py | """
The MeasurementType definition for SIDD 1.0.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union
from sarpy.io.product.sidd2_elements.base import DEFAULT_STRICT
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import SerializableDescriptor
from sarpy.io.product.sidd2_elements.blocks import RowColIntType, XYZPolyType
from sarpy.io.product.sidd2_elements.Measurement import PolynomialProjectionType, \
GeographicProjectionType, PlaneProjectionType, CylindricalProjectionType
class MeasurementType(Serializable):
"""
Geometric SAR information required for measurement/geolocation.
"""
_fields = (
'PolynomialProjection', 'GeographicProjection', 'PlaneProjection', 'CylindricalProjection',
'PixelFootprint', 'ARPPoly')
_required = ('PixelFootprint', 'ARPPoly')
_choice = ({'required': False, 'collection': ('PolynomialProjection', 'GeographicProjection',
'PlaneProjection', 'CylindricalProjection')}, )
# Descriptor
PolynomialProjection = SerializableDescriptor(
'PolynomialProjection', PolynomialProjectionType, _required, strict=DEFAULT_STRICT,
docstring='Polynomial pixel to ground. Should only used for sensor systems where the radar '
'geometry parameters are not recorded.') # type: Union[None, PolynomialProjectionType]
GeographicProjection = SerializableDescriptor(
'GeographicProjection', GeographicProjectionType, _required, strict=DEFAULT_STRICT,
docstring='Geographic mapping of the pixel grid referred to as GGD in the '
'Design and Exploitation document.') # type: Union[None, GeographicProjectionType]
PlaneProjection = SerializableDescriptor(
'PlaneProjection', PlaneProjectionType, _required, strict=DEFAULT_STRICT,
docstring='Planar representation of the pixel grid referred to as PGD in the '
'Design and Exploitation document.') # type: Union[None, PlaneProjectionType]
CylindricalProjection = SerializableDescriptor(
'CylindricalProjection', CylindricalProjectionType, _required, strict=DEFAULT_STRICT,
docstring='Cylindrical mapping of the pixel grid referred to as CGD in the '
'Design and Exploitation document.') # type: Union[None, CylindricalProjectionType]
PixelFootprint = SerializableDescriptor(
'PixelFootprint', RowColIntType, _required, strict=DEFAULT_STRICT,
docstring='Size of the image in pixels.') # type: RowColIntType
ARPPoly = SerializableDescriptor(
'ARPPoly', XYZPolyType, _required, strict=DEFAULT_STRICT,
docstring='Center of aperture polynomial (units = m) based upon time into '
'the collect.') # type: XYZPolyType
def __init__(self, PolynomialProjection=None, GeographicProjection=None, PlaneProjection=None,
CylindricalProjection=None, ARPPoly=None, **kwargs):
"""
Parameters
----------
PolynomialProjection : PolynomialProjectionType
GeographicProjection : GeographicProjectionType
PlaneProjection : PlaneProjectionType
CylindricalProjection : CylindricalProjectionType
ARPPoly : XYZPolyType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.PolynomialProjection = PolynomialProjection
self.GeographicProjection = GeographicProjection
self.PlaneProjection = PlaneProjection
self.CylindricalProjection = CylindricalProjection
self.ARPPoly = ARPPoly
super(MeasurementType, self).__init__(**kwargs)
@property
def ProjectionType(self):
"""str: *READ ONLY* Identifies the specific image projection type supplied."""
for attribute in self._choice[0]['collection']:
if getattr(self, attribute) is not None:
return attribute
return None
@property
def ReferencePoint(self):
"""
None|ReferencePointType: *READ ONLY* Gets the reference point.
"""
for attribute in self._choice[0]['collection']:
if getattr(self, attribute) is not None:
return attribute.ReferencePoint
return None
| 4,466 | 45.051546 | 105 | py |
sarpy | sarpy-master/sarpy/io/product/sidd1_elements/ExploitationFeatures.py | """
The ExploitationFeaturesType definition for SIDD 1.0
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
import datetime
from typing import Union, List
import numpy
from sarpy.io.product.sidd2_elements.base import DEFAULT_STRICT, FLOAT_FORMAT
from sarpy.io.xml.base import Serializable, ParametersCollection
from sarpy.io.xml.descriptors import SerializableDescriptor, SerializableListDescriptor, \
FloatDescriptor, FloatModularDescriptor, StringDescriptor, StringEnumDescriptor, \
DateTimeDescriptor, ParametersDescriptor, BooleanDescriptor
from sarpy.io.product.sidd2_elements.blocks import RowColDoubleType, RangeAzimuthType, RadarModeType
# noinspection PyProtectedMember
from sarpy.io.product.sidd2_elements.ExploitationFeatures import InputROIType, \
ExploitationFeaturesCollectionPhenomenologyType, _extract_sicd_tx_rcv_pol, \
ExploitationCalculator
from sarpy.io.complex.sicd_elements.blocks import POLARIZATION1_VALUES
from sarpy.io.complex.sicd_elements.SICD import SICDType
logger = logging.getLogger(__name__)
_sicd_type_text = 'Requires instance of SICDType,\n\tgot type `{}`'
class TxRcvPolarizationType(Serializable):
"""
The transmit/receive polarization information.
"""
_fields = ('TxPolarization', 'RcvPolarization', 'RcvPolarizationOffset', 'Processed')
_required = ('TxPolarization', 'RcvPolarization')
_numeric_format = {'RcvPolarizationOffset': FLOAT_FORMAT}
# Descriptor
TxPolarization = StringEnumDescriptor(
'TxPolarization', POLARIZATION1_VALUES, _required, strict=DEFAULT_STRICT,
docstring='Transmit polarization type.') # type: str
RcvPolarization = StringEnumDescriptor(
'RcvPolarization', POLARIZATION1_VALUES, _required, strict=DEFAULT_STRICT,
docstring='Receive polarization type.') # type: str
RcvPolarizationOffset = FloatModularDescriptor(
'RcvPolarizationOffset', 180.0, _required, strict=DEFAULT_STRICT,
docstring='Angle offset for the receive polarization defined at aperture '
'center.') # type: Union[None, float]
Processed = BooleanDescriptor(
'Processed', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, bool]
def __init__(self, TxPolarization=None, RcvPolarization=None, RcvPolarizationOffset=None,
Processed=None, **kwargs):
"""
Parameters
----------
TxPolarization : str
RcvPolarization : str
RcvPolarizationOffset : None|float
Processed : None|bool
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TxPolarization = TxPolarization
self.RcvPolarization = RcvPolarization
self.RcvPolarizationOffset = RcvPolarizationOffset
self.Processed = Processed
super(TxRcvPolarizationType, self).__init__(**kwargs)
@classmethod
def from_sicd_value(cls, str_in):
"""
Construct from the sicd style tx/rcv polarization string.
Parameters
----------
str_in : str
Returns
-------
TxRcvPolarizationType
"""
tx, rcv = _extract_sicd_tx_rcv_pol(str_in)
return cls(TxPolarization=tx, RcvPolarization=rcv)
class ExploitationFeaturesCollectionInformationType(Serializable):
"""
General collection information.
"""
_fields = (
'SensorName', 'RadarMode', 'CollectionDateTime', 'LocalDateTime', 'CollectionDuration',
'Resolution', 'InputROI', 'Polarizations')
_required = ('SensorName', 'RadarMode', 'CollectionDateTime', 'CollectionDuration')
_collections_tags = {'Polarizations': {'array': False, 'child_tag': 'Polarization'}}
_numeric_format = {'CollectionDuration': FLOAT_FORMAT}
# Descriptor
SensorName = StringDescriptor(
'SensorName', _required, strict=DEFAULT_STRICT,
docstring='The name of the sensor.') # str
RadarMode = SerializableDescriptor(
'RadarMode', RadarModeType, _required, strict=DEFAULT_STRICT,
docstring='Radar collection mode.') # type: RadarModeType
CollectionDateTime = DateTimeDescriptor(
'CollectionDateTime', _required, strict=DEFAULT_STRICT,
docstring='Collection date and time defined in Coordinated Universal Time (UTC). The seconds '
'should be followed by a Z to indicate UTC.') # type: numpy.datetime64
CollectionDuration = FloatDescriptor(
'CollectionDuration', _required, strict=DEFAULT_STRICT,
docstring='The duration of the collection (units = seconds).') # type: float
Resolution = SerializableDescriptor(
'Resolution', RangeAzimuthType, _required, strict=DEFAULT_STRICT,
docstring='Uniformly-weighted resolution (range and azimuth) processed in '
'the slant plane.') # type: Union[None, RangeAzimuthType]
InputROI = SerializableDescriptor(
'InputROI', InputROIType, _required, strict=DEFAULT_STRICT,
docstring='ROI representing portion of input data used to make '
'this product.') # type: Union[None, InputROIType]
Polarizations = SerializableListDescriptor(
'Polarizations', TxRcvPolarizationType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Transmit and receive polarization(s).') # type: Union[None, List[TxRcvPolarizationType]]
def __init__(self, SensorName=None, RadarMode=None, CollectionDateTime=None, LocalDateTime=None,
CollectionDuration=None, Resolution=None, Polarizations=None, **kwargs):
"""
Parameters
----------
SensorName : str
RadarMode : RadarModeType
CollectionDateTime : numpy.datetime64|datetime.datetime|datetime.date|str
LocalDateTime : None|str|datetime.datetime
CollectionDuration : float
Resolution : None|RangeAzimuthType|numpy.ndarray|list|tuple
Polarizations : None|List[TxTcvPolarizationType]
kwargs
"""
self._local_date_time = None
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.SensorName = SensorName
self.RadarMode = RadarMode
self.CollectionDateTime = CollectionDateTime
self.CollectionDuration = CollectionDuration
self.LocalDateTime = LocalDateTime
self.Resolution = Resolution
self.Polarizations = Polarizations
super(ExploitationFeaturesCollectionInformationType, self).__init__(**kwargs)
@property
def LocalDateTime(self):
"""None|str: The local date/time string of the collection. *Optional.*"""
return self._local_date_time
@LocalDateTime.setter
def LocalDateTime(self, value): # type: (Union[None, str, datetime.datetime]) -> None
if value is None:
self._local_date_time = None
return
elif isinstance(value, datetime.datetime):
value = value.isoformat('T')
if isinstance(value, str):
self._local_date_time = value
else:
logger.error(
'Attribute LocalDateTime of class ExploitationFeaturesCollectionInformationType\n\t'
'requires a datetime.datetime or string. Got unsupported type {}.\n\t'
'Setting value to None.'.format(type(value)))
self._local_date_time = None
@classmethod
def from_sicd(cls, sicd):
"""
Construct from a sicd element.
Parameters
----------
sicd : SICDType
Returns
-------
ExploitationFeaturesCollectionInformationType
"""
if not isinstance(sicd, SICDType):
raise TypeError(_sicd_type_text.format(type(sicd)))
polarizations = [
TxRcvPolarizationType.from_sicd_value(entry.TxRcvPolarization)
for entry in sicd.RadarCollection.RcvChannels]
return cls(SensorName=sicd.CollectionInfo.CollectorName,
RadarMode=RadarModeType(**sicd.CollectionInfo.RadarMode.to_dict()),
CollectionDateTime=sicd.Timeline.CollectStart,
CollectionDuration=sicd.Timeline.CollectDuration,
Resolution=(sicd.Grid.Row.SS, sicd.Grid.Col.SS),
Polarizations=polarizations)
class ExploitationFeaturesCollectionGeometryType(Serializable):
"""
Key geometry parameters independent of product processing. All values
computed at the center time of the full collection.
"""
_fields = ('Azimuth', 'Slope', 'Squint', 'Graze', 'Tilt', 'Extensions')
_required = ()
_collections_tags = {'Extensions': {'array': False, 'child_tag': 'Extension'}}
_numeric_format = {
'Azimuth': FLOAT_FORMAT, 'Slope': FLOAT_FORMAT, 'Squint': FLOAT_FORMAT, 'Graze': FLOAT_FORMAT,
'Tilt': FLOAT_FORMAT}
# Descriptor
Azimuth = FloatDescriptor(
'Azimuth', _required, strict=DEFAULT_STRICT, bounds=(0.0, 360.0),
docstring='Angle clockwise from north indicating the ETP line of sight vector.') # type: float
Slope = FloatDescriptor(
'Slope', _required, strict=DEFAULT_STRICT, bounds=(0.0, 90.0),
docstring='Angle between the ETP at scene center and the range vector perpendicular to '
'the direction of motion.') # type: float
Squint = FloatModularDescriptor(
'Squint', 180.0, _required, strict=DEFAULT_STRICT,
docstring='Angle from the ground track to platform velocity vector at nadir. '
'Left-look is positive, right-look is negative.') # type: float
Graze = FloatDescriptor(
'Graze', _required, strict=DEFAULT_STRICT, bounds=(0.0, 90.0),
docstring='Angle between the ETP and the line of sight vector.') # type: float
Tilt = FloatModularDescriptor(
'Tilt', 180.0, _required, strict=DEFAULT_STRICT,
docstring='Angle between the ETP and the cross range vector. '
'Also known as the twist angle.') # type: float
Extensions = ParametersDescriptor(
'Extensions', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Exploitation feature extension related to geometry for a '
'single input image.') # type: ParametersCollection
def __init__(self, Azimuth=None, Slope=None, Squint=None, Graze=None, Tilt=None,
Extensions=None, **kwargs):
"""
Parameters
----------
Azimuth : None|float
Slope : None|float
Squint : None|float
Graze : None|float
Tilt : None|float
Extensions : None|ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Azimuth = Azimuth
self.Slope = Slope
self.Squint = Squint
self.Graze = Graze
self.Tilt = Tilt
self.Extensions = Extensions
super(ExploitationFeaturesCollectionGeometryType, self).__init__(**kwargs)
@classmethod
def from_calculator(cls, calculator):
"""
Create from an ExploitationCalculator object.
Parameters
----------
calculator : ExploitationCalculator
Returns
-------
ExploitationFeaturesCollectionGeometryType
"""
if not isinstance(calculator, ExploitationCalculator):
raise TypeError(
'Requires input which is an instance of ExploitationCalculator, got type {}'.format(type(calculator)))
return cls(Azimuth=calculator.AzimuthAngle, Slope=calculator.SlopeAngle,
Squint=calculator.SquintAngle, Graze=calculator.GrazeAngle, Tilt=calculator.TiltAngle)
class CollectionType(Serializable):
"""
Metadata regarding one of the input collections.
"""
_fields = ('Information', 'Geometry', 'Phenomenology', 'identifier')
_required = ('Information', 'identifier')
_set_as_attribute = ('identifier', )
# Descriptor
Information = SerializableDescriptor(
'Information', ExploitationFeaturesCollectionInformationType, _required, strict=DEFAULT_STRICT,
docstring='General collection information.') # type: ExploitationFeaturesCollectionInformationType
Geometry = SerializableDescriptor(
'Geometry', ExploitationFeaturesCollectionGeometryType, _required, strict=DEFAULT_STRICT,
docstring='Key geometry parameters independent of product '
'processing.') # type: Union[None, ExploitationFeaturesCollectionGeometryType]
Phenomenology = SerializableDescriptor(
'Phenomenology', ExploitationFeaturesCollectionPhenomenologyType, _required, strict=DEFAULT_STRICT,
docstring='Phenomenology related to both the geometry and the final '
'product processing.') # type: Union[None, ExploitationFeaturesCollectionPhenomenologyType]
identifier = StringDescriptor(
'identifier', _required, strict=DEFAULT_STRICT,
docstring='The exploitation feature identifier.') # type: str
def __init__(self, Information=None, Geometry=None, Phenomenology=None, identifier=None, **kwargs):
"""
Parameters
----------
Information : ExploitationFeaturesCollectionInformationType
Geometry : None|ExploitationFeaturesCollectionGeometryType
Phenomenology : None|ExploitationFeaturesCollectionPhenomenologyType
identifier : str
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Information = Information
self.Geometry = Geometry
self.Phenomenology = Phenomenology
self.identifier = identifier
super(CollectionType, self).__init__(**kwargs)
@classmethod
def from_calculator(cls, calculator, sicd):
"""
Create from an ExploitationCalculator object.
Parameters
----------
calculator : ExploitationCalculator
sicd : SICDType
Returns
-------
CollectionType
"""
if not isinstance(calculator, ExploitationCalculator):
raise TypeError(
'Requires input which is an instance of ExploitationCalculator, got type {}'.format(type(calculator)))
return cls(identifier=sicd.CollectionInfo.CoreName,
Information=ExploitationFeaturesCollectionInformationType.from_sicd(sicd),
Geometry=ExploitationFeaturesCollectionGeometryType.from_calculator(calculator),
Phenomenology=ExploitationFeaturesCollectionPhenomenologyType.from_calculator(calculator))
class ExploitationFeaturesProductType(Serializable):
"""
Metadata regarding the product.
"""
_fields = ('Resolution', 'North', 'Extensions')
_required = ('Resolution', )
_collections_tags = {
'Extensions': {'array': False, 'child_tag': 'Extension'}}
_numeric_format = {'North': FLOAT_FORMAT}
# Descriptor
Resolution = SerializableDescriptor(
'Resolution', RowColDoubleType, _required, strict=DEFAULT_STRICT,
docstring='Uniformly-weighted resolution projected into the Earth Tangent '
'Plane (ETP).') # type: RowColDoubleType
North = FloatModularDescriptor(
'North', 180.0, _required, strict=DEFAULT_STRICT,
docstring='Counter-clockwise angle from increasing row direction to north at the center '
'of the image.') # type: float
Extensions = ParametersDescriptor(
'Extensions', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Exploitation feature extension related to geometry for a '
'single input image.') # type: ParametersCollection
def __init__(self, Resolution=None, North=None, Extensions=None, **kwargs):
"""
Parameters
----------
Resolution : RowColDoubleType|numpy.ndarray|list|tuple
North : None|float
Extensions : None|ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Resolution = Resolution
self.North = North
self.Extensions = Extensions
super(ExploitationFeaturesProductType, self).__init__(**kwargs)
@classmethod
def from_sicd(cls, sicd):
"""
Parameters
----------
sicd : SICDType
Returns
-------
ExploitationFeaturesProductType
"""
if not isinstance(sicd, SICDType):
raise TypeError(_sicd_type_text.format(type(sicd)))
row_ground, col_ground = sicd.get_ground_resolution()
return cls(Resolution=(row_ground, col_ground))
class ExploitationFeaturesType(Serializable):
"""
Computed metadata regarding the collect.
"""
_fields = ('Collections', 'Product')
_required = ('Collections', 'Product')
_collections_tags = {
'Collections': {'array': False, 'child_tag': 'Collection'}}
# Descriptor
Collections = SerializableListDescriptor(
'Collections', CollectionType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='') # type: List[CollectionType]
Product = SerializableDescriptor(
'Product', ExploitationFeaturesProductType, _required, strict=DEFAULT_STRICT,
docstring='') # type: ExploitationFeaturesProductType
def __init__(self, Collections=None, Product=None, **kwargs):
"""
Parameters
----------
Collections : List[CollectionType]
Product : ExploitationFeaturesProductType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Collections = Collections
self.Product = Product
super(ExploitationFeaturesType, self).__init__(**kwargs)
@classmethod
def from_sicd(cls, sicd, row_vector, col_vector):
"""
Construct from a sicd element.
Parameters
----------
sicd : SICDType|List[SICDType]
row_vector : numpy.ndarray
col_vector : numpy.ndarray
Returns
-------
ExploitationFeaturesType
"""
if isinstance(sicd, (list, tuple)):
return cls(
Collections=[
CollectionType.from_calculator(
ExploitationCalculator.from_sicd(entry, row_vector, col_vector), entry) for entry in sicd],
Product=ExploitationFeaturesProductType.from_sicd(sicd[0]))
if not isinstance(sicd, SICDType):
raise TypeError(_sicd_type_text.format(type(sicd)))
return cls(
Collections=[
CollectionType.from_calculator(
ExploitationCalculator.from_sicd(sicd, row_vector, col_vector), sicd), ],
Product=ExploitationFeaturesProductType.from_sicd(sicd))
| 19,562 | 38.204409 | 118 | py |
sarpy | sarpy-master/sarpy/io/product/sidd1_elements/GeographicAndTarget.py | """
The ProductDisplayType definition for SIDD 1.0.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, List
from xml.etree import ElementTree
from collections import OrderedDict
import numpy
from sarpy.io.product.sidd2_elements.base import DEFAULT_STRICT
from sarpy.io.product.sidd2_elements.blocks import LatLonArrayElementType
from sarpy.io.xml.base import Serializable, SerializableArray, ParametersCollection, \
find_children
from sarpy.io.xml.descriptors import SerializableDescriptor, SerializableArrayDescriptor, \
SerializableListDescriptor, StringDescriptor, StringListDescriptor, ParametersDescriptor
class GeographicInformationType(Serializable):
"""
Geographic information.
"""
_fields = ('CountryCodes', 'SecurityInfo', 'GeographicInfoExtensions')
_required = ()
_collections_tags = {
'CountryCodes': {'array': False, 'child_tag': 'CountryCode'},
'GeographicInfoExtensions': {'array': False, 'child_tag': 'GeographicInfoExtension'}}
# descriptors
CountryCodes = StringListDescriptor(
'CountryCodes', _required, strict=DEFAULT_STRICT,
docstring="List of country codes for region covered by the image.") # type: List[str]
SecurityInfo = StringDescriptor(
'SecurityInfo', _required, strict=DEFAULT_STRICT,
docstring='Specifies classification level or special handling designators '
'for this geographic region.') # type: Union[None, str]
GeographicInfoExtensions = ParametersDescriptor(
'GeographicInfoExtensions', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Implementation specific geographic information.') # type: ParametersCollection
def __init__(self, CountryCodes=None, SecurityInfo=None, GeographicInfoExtensions=None, **kwargs):
"""
Parameters
----------
CountryCodes : None|List[str]
SecurityInfo : None|str
GeographicInfoExtensions : None|ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.CountryCodes = CountryCodes
self.SecurityInfo = SecurityInfo
self.GeographicInfoExtensions = GeographicInfoExtensions
super(GeographicInformationType, self).__init__(**kwargs)
class TargetInformationType(Serializable):
"""
Information about the target.
"""
_fields = ('Identifiers', 'Footprint', 'TargetInformationExtensions')
_required = ()
_collections_tags = {
'Identifiers': {'array': False, 'child_tag': 'Identifier'},
'Footprint': {'array': True, 'child_tag': 'Vertex'},
'TargetInformationExtensions': {'array': False, 'child_tag': 'TargetInformationExtension'}}
# Descriptors
Identifiers = ParametersDescriptor(
'Identifiers', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Target may have one or more identifiers. Examples: names, BE numbers, etc. Use '
'the "name" attribute to describe what this is.') # type: ParametersCollection
Footprint = SerializableArrayDescriptor(
'Footprint', LatLonArrayElementType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Target footprint as defined by polygonal '
'shape.') # type: Union[SerializableArray, List[LatLonArrayElementType]]
TargetInformationExtensions = ParametersDescriptor(
'TargetInformationExtensions', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Generic extension. Could be used to indicate type of target, '
'terrain, etc.') # type: ParametersCollection
def __init__(self, Identifiers=None, Footprint=None, TargetInformationExtensions=None, **kwargs):
"""
Parameters
----------
Identifiers : None|ParametersCollection|dict
Footprint : None|List[LatLonArrayElementType]|numpy.ndarray|list|tuple
TargetInformationExtensions : None|ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Identifiers = Identifiers
self.Footprint = Footprint
self.TargetInformationExtensions = TargetInformationExtensions
super(TargetInformationType, self).__init__(**kwargs)
class GeographicCoverageType(Serializable):
"""
The geographic coverage area for the product.
"""
_fields = ('GeoregionIdentifiers', 'Footprint', 'GeographicInfo')
_required = ('Footprint', )
_collections_tags = {
'GeoregionIdentifiers': {'array': False, 'child_tag': 'GeoregionIdentifier'},
'Footprint': {'array': True, 'child_tag': 'Vertex'},
'SubRegions': {'array': False, 'child_tag': 'SubRegion'}}
# Descriptors
GeoregionIdentifiers = ParametersDescriptor(
'GeoregionIdentifiers', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Target may have one or more identifiers. Examples: names, BE numbers, etc. Use '
'the "name" attribute to describe what this is.') # type: ParametersCollection
Footprint = SerializableArrayDescriptor(
'Footprint', LatLonArrayElementType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Estimated ground footprint of the '
'product.') # type: Union[None, SerializableArray, List[LatLonArrayElementType]]
GeographicInfo = SerializableDescriptor(
'GeographicInfo', GeographicInformationType, _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, GeographicInformationType]
def __init__(self, GeoregionIdentifiers=None, Footprint=None, SubRegions=None, GeographicInfo=None, **kwargs):
"""
Parameters
----------
GeoregionIdentifiers : None|ParametersCollection|dict
Footprint : None|List[LatLonArrayElementType]|numpy.ndarray|list|tuple
SubRegions : None|List[GeographicCoverageType]
GeographicInfo : None|GeographicInformationType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.GeoregionIdentifiers = GeoregionIdentifiers
self.Footprint = Footprint
self.GeographicInfo = GeographicInfo
self._SubRegions = []
if SubRegions is None:
pass
elif isinstance(SubRegions, GeographicCoverageType):
self.addSubRegion(SubRegions)
elif isinstance(SubRegions, (list, tuple)):
for el in SubRegions:
self.addSubRegion(el)
else:
raise ValueError('SubRegions got unexpected type {}'.format(type(SubRegions)))
super(GeographicCoverageType, self).__init__(**kwargs)
@property
def SubRegions(self):
"""
List[GeographicCoverageType]: list of sub-regions.
"""
return self._SubRegions
def addSubRegion(self, value):
"""
Add the given SubRegion to the SubRegions list.
Parameters
----------
value : GeographicCoverageType
Returns
-------
None
"""
if isinstance(value, ElementTree.Element):
value = GeographicCoverageType.from_node(value, self._xml_ns, ns_key=self._xml_ns_key)
elif isinstance(value, dict):
value = GeographicCoverageType.from_dict(value)
if isinstance(value, GeographicCoverageType):
self._SubRegions.append(value)
else:
raise TypeError('Trying to set SubRegion element with unexpected type {}'.format(type(value)))
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
if kwargs is None:
kwargs = OrderedDict()
kwargs['SubRegions'] = find_children(node, 'SubRegion', xml_ns, ns_key)
return super(GeographicCoverageType, cls).from_node(node, xml_ns, ns_key=ns_key, kwargs=kwargs)
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
node = super(GeographicCoverageType, self).to_node(
doc, tag, ns_key=ns_key, parent=parent, check_validity=check_validity, strict=strict, exclude=exclude)
# slap on the SubRegion children
sub_key = self._child_xml_ns_key.get('SubRegions', ns_key)
for entry in self._SubRegions:
entry.to_node(doc, 'SubRegion', ns_key=sub_key, parent=node, strict=strict)
return node
def to_dict(self, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
out = super(GeographicCoverageType, self).to_dict(check_validity=check_validity, strict=strict, exclude=exclude)
# slap on the SubRegion children
if len(self.SubRegions) > 0:
out['SubRegions'] = [
entry.to_dict(check_validity=check_validity, strict=strict) for entry in self._SubRegions]
return out
class GeographicAndTargetType(Serializable):
"""
Container specifying the image coverage area in geographic coordinates, as
well as optional data about the target of the collection/product.
"""
_fields = ('GeographicCoverage', 'TargetInformations')
_required = ('GeographicCoverage', )
_collections_tags = {'TargetInformations': {'array': False, 'child_tag': 'TargetInformation'}}
# Descriptors
GeographicCoverage = SerializableDescriptor(
'GeographicCoverage', GeographicCoverageType, _required, strict=DEFAULT_STRICT,
docstring='Provides geographic coverage information.') # type: GeographicCoverageType
TargetInformations = SerializableListDescriptor(
'TargetInformations', TargetInformationType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Provides target specific geographic '
'information.') # type: Union[None, List[TargetInformationType]]
def __init__(self, GeographicCoverage=None, TargetInformations=None, **kwargs):
"""
Parameters
----------
GeographicCoverage : GeographicCoverageType
TargetInformations : None|List[TargetInformationType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.GeographicCoverage = GeographicCoverage
self.TargetInformations = TargetInformations
super(GeographicAndTargetType, self).__init__(**kwargs)
| 10,863 | 41.272374 | 120 | py |
sarpy | sarpy-master/sarpy/io/product/sidd1_elements/SIDD.py | """
The SIDDType 1.0 definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
from typing import Union
from collections import OrderedDict
from copy import deepcopy
from sarpy.io.xml.base import Serializable, parse_xml_from_file, parse_xml_from_string
from sarpy.io.xml.descriptors import SerializableDescriptor
from .ProductCreation import ProductCreationType
from .Display import ProductDisplayType
from .GeographicAndTarget import GeographicAndTargetType
from .Measurement import MeasurementType
from .ExploitationFeatures import ExploitationFeaturesType
from sarpy.io.product.sidd2_elements.DownstreamReprocessing import DownstreamReprocessingType
from sarpy.io.product.sidd2_elements.ProductProcessing import ProductProcessingType
from sarpy.io.product.sidd2_elements.Annotations import AnnotationsType
from sarpy.io.product.sidd2_elements.blocks import ErrorStatisticsType, RadiometricType
from sarpy.geometry import point_projection
from sarpy.io.product.sidd_schema import get_specification_identifier, \
get_urn_details, validate_xml_ns
DEFAULT_STRICT = False
logger = logging.getLogger(__name__)
############
# namespace validate and definition of required entries in the namespace dictionary
_SIDD_SPECIFICATION_IDENTIFIER = get_specification_identifier()
_SIDD_URN = 'urn:SIDD:1.0.0'
_sidd_details = get_urn_details(_SIDD_URN)
_SIDD_SPECIFICATION_VERSION = _sidd_details['version']
_SIDD_SPECIFICATION_DATE = _sidd_details['date']
_ISM_URN = _sidd_details['ism_urn']
_SFA_URN = _sidd_details['sfa_urn']
_SICOMMON_URN = _sidd_details['sicommon_urn']
##########
# The SIDD object
class SIDDType(Serializable):
"""
The root element of the SIDD 1.0 document.
"""
_fields = (
'ProductCreation', 'Display', 'GeographicAndTarget', 'Measurement', 'ExploitationFeatures',
'DownstreamReprocessing', 'ErrorStatistics', 'Radiometric', 'ProductProcessing', 'Annotations')
_required = (
'ProductCreation', 'Display', 'GeographicAndTarget', 'Measurement', 'ExploitationFeatures')
# Descriptor
ProductCreation = SerializableDescriptor(
'ProductCreation', ProductCreationType, _required, strict=DEFAULT_STRICT,
docstring='Information related to processor, classification, '
'and product type.') # type: ProductCreationType
Display = SerializableDescriptor(
'Display', ProductDisplayType, _required, strict=DEFAULT_STRICT,
docstring='Contains information on the parameters needed to display the product in '
'an exploitation tool.') # type: ProductDisplayType
GeographicAndTarget = SerializableDescriptor(
'GeographicAndTarget', GeographicAndTargetType, _required, strict=DEFAULT_STRICT,
docstring='Contains generic and extensible targeting and geographic region '
'information.') # type: GeographicAndTargetType
Measurement = SerializableDescriptor(
'Measurement', MeasurementType, _required, strict=DEFAULT_STRICT,
docstring='Contains the metadata necessary for performing '
'measurements.') # type: MeasurementType
ExploitationFeatures = SerializableDescriptor(
'ExploitationFeatures', ExploitationFeaturesType, _required, strict=DEFAULT_STRICT,
docstring='Computed metadata regarding the input collections and '
'final product.') # type: ExploitationFeaturesType
DownstreamReprocessing = SerializableDescriptor(
'DownstreamReprocessing', DownstreamReprocessingType, _required, strict=DEFAULT_STRICT,
docstring='Metadata describing any downstream processing of the '
'product.') # type: Union[None, DownstreamReprocessingType]
ErrorStatistics = SerializableDescriptor(
'ErrorStatistics', ErrorStatisticsType, _required, strict=DEFAULT_STRICT,
docstring='Error statistics passed through from the SICD '
'metadata.') # type: Union[None, ErrorStatisticsType]
Radiometric = SerializableDescriptor(
'Radiometric', RadiometricType, _required, strict=DEFAULT_STRICT,
docstring='Radiometric information about the product.') # type: Union[None, RadiometricType]
ProductProcessing = SerializableDescriptor(
'ProductProcessing', ProductProcessingType, _required, strict=DEFAULT_STRICT,
docstring='Contains metadata related to algorithms used during '
'product generation.') # type: ProductProcessingType
Annotations = SerializableDescriptor(
'Annotations', AnnotationsType, _required, strict=DEFAULT_STRICT,
docstring='List of annotations for the imagery.') # type: AnnotationsType
def __init__(self, ProductCreation=None, Display=None, GeographicAndTarget=None,
Measurement=None, ExploitationFeatures=None, DownstreamReprocessing=None,
ErrorStatistics=None, Radiometric=None, ProductProcessing=None,
Annotations=None, **kwargs):
"""
Parameters
----------
ProductCreation : ProductCreationType
Display : ProductDisplayType
GeographicAndTarget : GeographicAndTargetType
Measurement : MeasurementType
ExploitationFeatures : ExploitationFeaturesType
DownstreamReprocessing : None|DownstreamReprocessingType
ErrorStatistics : None|ErrorStatisticsType
Radiometric : None|RadiometricType
ProductProcessing : None|ProductProcessingType
Annotations : None|AnnotationsType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
nitf = kwargs.get('_NITF', {})
if not isinstance(nitf, dict):
raise TypeError('Provided NITF options are required to be in dictionary form.')
self._NITF = nitf
self._coa_projection = None
self.ProductCreation = ProductCreation
self.Display = Display
self.GeographicAndTarget = GeographicAndTarget
self.Measurement = Measurement
self.ExploitationFeatures = ExploitationFeatures
self.DownstreamReprocessing = DownstreamReprocessing
self.ErrorStatistics = ErrorStatistics
self.Radiometric = Radiometric
self.ProductProcessing = ProductProcessing
self.Annotations = Annotations
super(SIDDType, self).__init__(**kwargs)
@property
def coa_projection(self):
"""
The COA Projection object, if previously defined through using :func:`define_coa_projection`.
Returns
-------
None|sarpy.geometry.point_projection.COAProjection
"""
return self._coa_projection
@property
def NITF(self):
"""
Optional dictionary of NITF header information, pertains only to subsequent
SIDD file writing.
Returns
-------
Dict
"""
return self._NITF
def can_project_coordinates(self):
"""
Determines whether the necessary elements are populated to permit projection
between image and physical coordinates. If False, then the (first discovered)
reason why not will be logged at error level.
Returns
-------
bool
"""
if self._coa_projection is not None:
return True
if self.Measurement.ProjectionType != 'PlaneProjection':
logger.error(
'Formulating a projection is only supported for PlaneProjection, '
'got {}.'.format(self.Measurement.ProjectionType))
return False
return True
def define_coa_projection(self, delta_arp=None, delta_varp=None, range_bias=None,
adj_params_frame='ECF', override=True):
"""
Define the COAProjection object.
Parameters
----------
delta_arp : None|numpy.ndarray|list|tuple
ARP position adjustable parameter (ECF, m). Defaults to 0 in each coordinate.
delta_varp : None|numpy.ndarray|list|tuple
VARP position adjustable parameter (ECF, m/s). Defaults to 0 in each coordinate.
range_bias : float|int
Range bias adjustable parameter (m), defaults to 0.
adj_params_frame : str
One of ['ECF', 'RIC_ECF', 'RIC_ECI'], specifying the coordinate frame used for
expressing `delta_arp` and `delta_varp` parameters.
override : bool
should we redefine, if it is previously defined?
Returns
-------
None
"""
if not self.can_project_coordinates():
logger.error('The COAProjection object cannot be defined.')
return
if self._coa_projection is not None and not override:
return
self._coa_projection = point_projection.COAProjection.from_sidd(
self, delta_arp=delta_arp, delta_varp=delta_varp, range_bias=range_bias,
adj_params_frame=adj_params_frame)
def project_ground_to_image(self, coords, **kwargs):
"""
Transforms a 3D ECF point to pixel (row/column) coordinates. This is
implemented in accordance with the SICD Image Projections Description Document.
**Really Scene-To-Image projection.**"
Parameters
----------
coords : numpy.ndarray|tuple|list
ECF coordinate to map to scene coordinates, of size `N x 3`.
kwargs
The keyword arguments for the :func:`sarpy.geometry.point_projection.ground_to_image` method.
Returns
-------
Tuple[numpy.ndarray, float, int]
* `image_points` - the determined image point array, of size `N x 2`. Following
the SICD convention, he upper-left pixel is [0, 0].
* `delta_gpn` - residual ground plane displacement (m).
* `iterations` - the number of iterations performed.
See Also
--------
sarpy.geometry.point_projection.ground_to_image
"""
if 'use_structure_coa' not in kwargs:
kwargs['use_structure_coa'] = True
return point_projection.ground_to_image(coords, self, **kwargs)
def project_ground_to_image_geo(self, coords, ordering='latlong', **kwargs):
"""
Transforms a 3D Lat/Lon/HAE point to pixel (row/column) coordinates. This is
implemented in accordance with the SICD Image Projections Description Document.
**Really Scene-To-Image projection.**"
Parameters
----------
coords : numpy.ndarray|tuple|list
ECF coordinate to map to scene coordinates, of size `N x 3`.
ordering : str
If 'longlat', then the input is `[longitude, latitude, hae]`.
Otherwise, the input is `[latitude, longitude, hae]`. Passed through
to :func:`sarpy.geometry.geocoords.geodetic_to_ecf`.
kwargs
The keyword arguments for the :func:`sarpy.geometry.point_projection.ground_to_image_geo` method.
Returns
-------
Tuple[numpy.ndarray, float, int]
* `image_points` - the determined image point array, of size `N x 2`. Following
the SICD convention, he upper-left pixel is [0, 0].
* `delta_gpn` - residual ground plane displacement (m).
* `iterations` - the number of iterations performed.
See Also
--------
sarpy.geometry.point_projection.ground_to_image_geo
"""
if 'use_structure_coa' not in kwargs:
kwargs['use_structure_coa'] = True
return point_projection.ground_to_image_geo(coords, self, ordering=ordering, **kwargs)
def project_image_to_ground(self, im_points, projection_type='HAE', **kwargs):
"""
Transforms image coordinates to ground plane ECF coordinate via the algorithm(s)
described in SICD Image Projections document.
Parameters
----------
im_points : numpy.ndarray|list|tuple
the image coordinate array
projection_type : str
One of `['PLANE', 'HAE', 'DEM']`. Type `DEM` is a work in progress.
kwargs
The keyword arguments for the :func:`sarpy.geometry.point_projection.image_to_ground` method.
Returns
-------
numpy.ndarray
Ground Plane Point (in ECF coordinates) corresponding to the input image coordinates.
See Also
--------
sarpy.geometry.point_projection.image_to_ground
"""
if 'use_structure_coa' not in kwargs:
kwargs['use_structure_coa'] = True
return point_projection.image_to_ground(
im_points, self, projection_type=projection_type, **kwargs)
def project_image_to_ground_geo(self, im_points, ordering='latlong', projection_type='HAE', **kwargs):
"""
Transforms image coordinates to ground plane WGS-84 coordinate via the algorithm(s)
described in SICD Image Projections document.
Parameters
----------
im_points : numpy.ndarray|list|tuple
the image coordinate array
projection_type : str
One of `['PLANE', 'HAE', 'DEM']`. Type `DEM` is a work in progress.
ordering : str
Determines whether return is ordered as `[lat, long, hae]` or `[long, lat, hae]`.
Passed through to :func:`sarpy.geometry.geocoords.ecf_to_geodetic`.
kwargs
The keyword arguments for the :func:`sarpy.geometry.point_projection.image_to_ground_geo` method.
Returns
-------
numpy.ndarray
Ground Plane Point (in ECF coordinates) corresponding to the input image coordinates.
See Also
--------
sarpy.geometry.point_projection.image_to_ground_geo
"""
if 'use_structure_coa' not in kwargs:
kwargs['use_structure_coa'] = True
return point_projection.image_to_ground_geo(
im_points, self, ordering=ordering, projection_type=projection_type, **kwargs)
@staticmethod
def get_xmlns_collection():
"""
Gets the correct SIDD 1.0 dictionary of xml namespace details.
Returns
-------
dict
"""
return OrderedDict([
('xmlns', _SIDD_URN), ('xmlns:sicommon', _SICOMMON_URN),
('xmlns:sfa', _SFA_URN), ('xmlns:ism', _ISM_URN)])
@staticmethod
def get_des_details():
"""
Gets the correct SIDD 1.0 DES subheader details.
Returns
-------
dict
"""
return OrderedDict([
('DESSHSI', _SIDD_SPECIFICATION_IDENTIFIER),
('DESSHSV', _SIDD_SPECIFICATION_VERSION),
('DESSHSD', _SIDD_SPECIFICATION_DATE),
('DESSHTN', _SIDD_URN)])
@classmethod
def from_node(cls, node, xml_ns, ns_key='default', kwargs=None):
if ns_key is None:
raise ValueError('ns_key must be defined.')
if ns_key not in xml_ns:
raise ValueError('ns_key {} is not in the xml namespace'.format(ns_key))
valid_ns = validate_xml_ns(xml_ns, ns_key)
if not xml_ns[ns_key].startswith('urn:SIDD:1.'):
raise ValueError('Cannot use urn {} for SIDD version 1.0'.format(xml_ns[ns_key]))
if not valid_ns:
logger.warning(
'SIDD namespace validation failed,\n\t'
'which may lead to subsequent deserialization failures')
return super(SIDDType, cls).from_node(node, xml_ns, ns_key=ns_key, kwargs=kwargs)
def to_xml_bytes(self, urn=None, tag='SIDD', check_validity=False, strict=DEFAULT_STRICT):
if urn is None:
urn = self.get_xmlns_collection()
return super(SIDDType, self).to_xml_bytes(urn=urn, tag=tag, check_validity=check_validity, strict=strict)
def to_xml_string(self, urn=None, tag='SIDD', check_validity=False, strict=DEFAULT_STRICT):
return self.to_xml_bytes(urn=urn, tag=tag, check_validity=check_validity, strict=strict).decode('utf-8')
def copy(self):
"""
Provides a deep copy.
Returns
-------
SIDDType
"""
out = super(SIDDType, self).copy()
out._NITF = deepcopy(self._NITF)
return out
@classmethod
def from_xml_file(cls, file_path):
"""
Construct the sidd object from a stand-alone xml file path.
Parameters
----------
file_path : str
Returns
-------
SIDDType
"""
root_node, xml_ns = parse_xml_from_file(file_path)
ns_key = 'default' if 'default' in xml_ns else None
return cls.from_node(root_node, xml_ns=xml_ns, ns_key=ns_key)
@classmethod
def from_xml_string(cls, xml_string):
"""
Construct the sidd object from an xml string.
Parameters
----------
xml_string : str|bytes
Returns
-------
SIDDType
"""
root_node, xml_ns = parse_xml_from_string(xml_string)
ns_key = 'default' if 'default' in xml_ns else None
return cls.from_node(root_node, xml_ns=xml_ns, ns_key=ns_key)
| 17,347 | 37.551111 | 113 | py |
sarpy | sarpy-master/sarpy/io/product/sidd1_elements/ProductCreation.py | """
The ProductCreationType definition for version 1.0.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union
from datetime import datetime
import numpy
from sarpy.io.product.sidd2_elements.base import DEFAULT_STRICT
from sarpy.io.product.sidd2_elements.ProductCreation import ProcessorInformationType, \
extract_classification_from_sicd
from sarpy.io.xml.base import Serializable, ParametersCollection
from sarpy.io.xml.descriptors import SerializableDescriptor, StringDescriptor, \
StringEnumDescriptor, IntegerDescriptor, ParametersDescriptor
from sarpy.io.complex.sicd_elements.SICD import SICDType
class ProductClassificationType(Serializable):
"""
The overall classification of the product.
"""
_fields = (
'DESVersion', 'resourceElement', 'createDate', 'compliesWith',
'classification', 'ownerProducer', 'SCIcontrols', 'SARIdentifier',
'disseminationControls', 'FGIsourceOpen', 'FGIsourceProtected', 'releasableTo',
'nonICmarkings', 'classifiedBy', 'compilationReason', 'derivativelyClassifiedBy',
'classificationReason', 'nonUSControls', 'derivedFrom', 'declassDate',
'declassEvent', 'declassException', 'typeOfExemptedSource', 'dateOfExemptedSource',
'SecurityExtensions')
_required = ('DESVersion', 'createDate', 'classification', 'ownerProducer')
_collections_tags = {'SecurityExtensions': {'array': False, 'child_tag': 'SecurityExtension'}}
_set_as_attribute = (
'DESVersion', 'resourceElement', 'createDate', 'compliesWith',
'classification', 'ownerProducer', 'SCIcontrols', 'SARIdentifier',
'disseminationControls', 'FGIsourceOpen', 'FGIsourceProtected', 'releasableTo',
'nonICmarkings', 'classifiedBy', 'compilationReason', 'derivativelyClassifiedBy',
'classificationReason', 'nonUSControls', 'derivedFrom', 'declassDate',
'declassEvent', 'declassException', 'typeOfExemptedSource', 'dateOfExemptedSource')
_child_xml_ns_key = {the_field: 'ism' for the_field in _fields if the_field != 'SecurityExtensions'}
# Descriptor
DESVersion = IntegerDescriptor(
'DESVersion', _required, strict=DEFAULT_STRICT, default_value=4,
docstring='The version number of the DES. Should there be multiple specified in an instance document '
'the one at the root node is the one that will apply to the entire document.') # type: int
createDate = StringDescriptor(
'createDate', _required, strict=DEFAULT_STRICT,
docstring='This should be a date of format :code:`YYYY-MM-DD`, but this is not checked.') # type: str
compliesWith = StringEnumDescriptor(
'compliesWith', ('ICD-710', 'DoD5230.24'), _required, strict=DEFAULT_STRICT, default_value=None,
docstring='') # type: Union[None, str]
classification = StringEnumDescriptor(
'classification', ('U', 'C', 'R', 'S', 'TS'), _required, strict=DEFAULT_STRICT,
docstring='') # type: str
ownerProducer = StringDescriptor(
'ownerProducer', _required, strict=DEFAULT_STRICT,
docstring='Three letter country code') # type: str
SCIcontrols = StringDescriptor(
'SCIcontrols', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
SARIdentifier = StringDescriptor(
'SARIdentifier', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
disseminationControls = StringDescriptor(
'disseminationControls', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
FGIsourceOpen = StringDescriptor(
'FGIsourceOpen', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
FGIsourceProtected = StringDescriptor(
'FGIsourceProtected', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
releasableTo = StringDescriptor(
'releasableTo', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
nonICmarkings = StringDescriptor(
'nonICmarkings', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
classifiedBy = StringDescriptor(
'classifiedBy', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
compilationReason = StringDescriptor(
'compilationReason', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
derivativelyClassifiedBy = StringDescriptor(
'derivativelyClassifiedBy', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
classificationReason = StringDescriptor(
'classificationReason', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
nonUSControls = StringDescriptor(
'nonUSControls', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
derivedFrom = StringDescriptor(
'derivedFrom', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
declassDate = StringDescriptor(
'declassDate', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
declassEvent = StringDescriptor(
'declassEvent', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
declassException = StringDescriptor(
'declassException', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
typeOfExemptedSource = StringDescriptor(
'typeOfExemptedSource', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
dateOfExemptedSource = StringDescriptor(
'dateOfExemptedSource', _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, str]
SecurityExtensions = ParametersDescriptor(
'SecurityExtensions', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Extensible parameters used to support profile-specific needs related to '
'product security.') # type: ParametersCollection
def __init__(self, DESVersion=4, createDate=None, compliesWith=None,
classification='U', ownerProducer='USA', SCIcontrols=None, SARIdentifier=None,
disseminationControls=None, FGIsourceOpen=None, FGIsourceProtected=None, releasableTo=None,
nonICmarkings=None, classifiedBy=None, compilationReason=None, derivativelyClassifiedBy=None,
classificationReason=None, nonUSControls=None, derivedFrom=None, declassDate=None,
declassEvent=None, declassException=None, typeOfExemptedSource=None, dateOfExemptedSource=None,
SecurityExtensions=None, **kwargs):
"""
Parameters
----------
DESVersion : int
createDate : str
compliesWith : None|str
classification : str
ownerProducer : str
SCIcontrols : None|str
SARIdentifier : None|str
disseminationControls : None|str
FGIsourceOpen : None|str
FGIsourceProtected : None|str
releasableTo : None|str
nonICmarkings : None|str
classifiedBy : None|str
compilationReason : None|str
derivativelyClassifiedBy : None|str
classificationReason : None|str
nonUSControls : None|str
derivedFrom : None|str
declassDate : None|str
declassEvent : None|str
declassException : None|str
typeOfExemptedSource : None|str
dateOfExemptedSource : None|str
SecurityExtensions : None|ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.DESVersion = DESVersion
self.createDate = createDate
self.compliesWith = compliesWith
self.classification = classification
self.ownerProducer = ownerProducer
self.SCIcontrols = SCIcontrols
self.SARIdentifier = SARIdentifier
self.disseminationControls = disseminationControls
self.FGIsourceOpen = FGIsourceOpen
self.FGIsourceProtected = FGIsourceProtected
self.releasableTo = releasableTo
self.nonICmarkings = nonICmarkings
self.classifiedBy = classifiedBy
self.compilationReason = compilationReason
self.derivativelyClassifiedBy = derivativelyClassifiedBy
self.classificationReason = classificationReason
self.nonUSControls = nonUSControls
self.derivedFrom = derivedFrom
self.declassDate = declassDate
self.declassEvent = declassEvent
self.declassException = declassException
self.typeOfExemptedSource = typeOfExemptedSource
self.dateOfExemptedSource = dateOfExemptedSource
self.SecurityExtensions = SecurityExtensions
super(ProductClassificationType, self).__init__(**kwargs)
@property
def resourceElement(self):
return 'true'
@classmethod
def from_sicd(cls, sicd, create_date=None):
"""
Extract best guess from SICD.
Parameters
----------
sicd : SICDType
create_date : str
Returns
-------
ProductClassificationType
"""
clas = extract_classification_from_sicd(sicd)
if create_date is None:
create_date = datetime.now().strftime('%Y-%m-%d')
return cls(classification=clas, createDate=create_date)
class ProductCreationType(Serializable):
"""
Contains general information about product creation.
"""
_fields = (
'ProcessorInformation', 'Classification', 'ProductName', 'ProductClass',
'ProductType', 'ProductCreationExtensions')
_required = (
'ProcessorInformation', 'Classification', 'ProductName', 'ProductClass')
_collections_tags = {'ProductCreationExtensions': {'array': False, 'child_tag': 'ProductCreationExtension'}}
# Descriptors
ProcessorInformation = SerializableDescriptor(
'ProcessorInformation', ProcessorInformationType, _required, strict=DEFAULT_STRICT,
docstring='Details regarding processor.') # type: ProcessorInformationType
Classification = SerializableDescriptor(
'Classification', ProductClassificationType, _required, strict=DEFAULT_STRICT,
docstring='The overall classification of the product.') # type: ProductClassificationType
ProductName = StringDescriptor(
'ProductName', _required, strict=DEFAULT_STRICT,
docstring='The output product name defined by the processor.') # type: str
ProductClass = StringDescriptor(
'ProductClass', _required, strict=DEFAULT_STRICT,
docstring='Class of product. Examples - :code:`Dynamic Image, Amplitude Change Detection, '
'Coherent Change Detection`') # type: str
ProductType = StringDescriptor(
'ProductType', _required, strict=DEFAULT_STRICT,
docstring='Type of sub-product. Examples - :code:`Frame #, Reference, Match`. '
'This field is only needed if there is a suite of associated '
'products.') # type: Union[None, str]
ProductCreationExtensions = ParametersDescriptor(
'ProductCreationExtensions', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Extensible parameters used to support profile-specific needs related to '
'product creation.') # type: ParametersCollection
def __init__(self, ProcessorInformation=None, Classification=None, ProductName=None,
ProductClass=None, ProductType=None, ProductCreationExtensions=None, **kwargs):
"""
Parameters
----------
ProcessorInformation : ProcessorInformationType
Classification : ProductClassificationType
ProductName : str
ProductClass : str
ProductType : str
ProductCreationExtensions : ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.ProcessorInformation = ProcessorInformation
self.Classification = Classification
self.ProductName = ProductName
self.ProductClass = ProductClass
self.ProductType = ProductType
self.ProductCreationExtensions = ProductCreationExtensions
super(ProductCreationType, self).__init__(**kwargs)
@classmethod
def from_sicd(cls, sicd, product_class):
"""
Generate from a SICD for the given product class.
Parameters
----------
sicd : SICDType
product_class : str
Returns
-------
ProductCreationType
"""
if not isinstance(sicd, SICDType):
raise TypeError('Requires SICDType instance, got type {}'.format(type(sicd)))
from sarpy.__about__ import __title__, __version__
proc_info = ProcessorInformationType(
Application='{} {}'.format(__title__, __version__),
ProcessingDateTime=numpy.datetime64(datetime.now()),
Site='Unknown')
classification = ProductClassificationType.from_sicd(sicd)
return cls(ProcessorInformation=proc_info,
Classification=classification,
ProductName=product_class,
ProductClass=product_class)
| 13,653 | 43.620915 | 112 | py |
sarpy | sarpy-master/sarpy/io/product/sidd1_elements/__init__.py |
__classification__ = 'UNCLASSIFIED'
| 37 | 11.666667 | 35 | py |
sarpy | sarpy-master/sarpy/io/product/sidd1_elements/Display.py | """
The ProductDisplayType definition for SIDD 1.0.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
from typing import Union
from collections import OrderedDict
import numpy
from sarpy.io.product.sidd2_elements.base import DEFAULT_STRICT, FLOAT_FORMAT
from sarpy.io.xml.base import Serializable, Arrayable, ParametersCollection, \
create_text_node, create_new_node, get_node_value, find_first_child
from sarpy.io.xml.descriptors import SerializableDescriptor, IntegerDescriptor, \
FloatDescriptor, StringDescriptor, StringEnumDescriptor, ParametersDescriptor
logger = logging.getLogger(__name__)
class ColorDisplayRemapType(Serializable, Arrayable):
"""
LUT-base remap indicating that the color display is done through index-based color.
"""
__slots__ = ('_remap_lut', )
def __init__(self, RemapLUT=None, **kwargs):
"""
Parameters
----------
RemapLUT : numpy.ndarray|list|tuple
kwargs
"""
self._remap_lut = None
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.RemapLUT = RemapLUT
super(ColorDisplayRemapType, self).__init__(**kwargs)
@property
def RemapLUT(self):
"""
numpy.ndarray: the two dimensional (:math:`N \times 3`) look-up table, where the dtype must be
`uint8` or `uint16`. The first dimension should correspond to entries (i.e. size of the lookup table), and the
second dimension must have size 3 and corresponds to `RGB` bands.
"""
return self._remap_lut
@RemapLUT.setter
def RemapLUT(self, value):
if value is None:
self._remap_lut = None
return
if isinstance(value, (tuple, list)):
value = numpy.array(value, dtype=numpy.uint8)
if not isinstance(value, numpy.ndarray) or value.dtype.name not in ('uint8', 'uint16'):
raise ValueError(
'RemapLUT for class ColorDisplayRemapType must be a numpy.ndarray of dtype uint8 or uint16.')
if value.ndim != 2 and value.shape[1] != 3:
raise ValueError('RemapLUT for class ColorDisplayRemapType must be an N x 3 array.')
self._remap_lut = value
@property
def size(self):
"""
int: the size of the lookup table
"""
if self._remap_lut is None:
return 0
else:
return self._remap_lut.shape[0]
def __len__(self):
if self._remap_lut is None:
return 0
return self._remap_lut.shape[0]
def __getitem__(self, item):
return self._remap_lut[item]
@classmethod
def from_array(cls, array):
"""
Create from the lookup table array.
Parameters
----------
array: numpy.ndarray|list|tuple
Must be two-dimensional. If not a numpy.ndarray, this will be naively
interpreted as `uint8`.
Returns
-------
LUTInfoType
"""
return cls(RemapLUT=array)
def get_array(self, dtype=numpy.uint8):
"""
Gets **a copy** of the coefficient array of specified data type.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return
Returns
-------
numpy.ndarray
the lookup table array
"""
return numpy.array(self._remap_lut, dtype=dtype)
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
lut_key = cls._child_xml_ns_key.get('RemapLUT', ns_key)
lut_node = find_first_child(node, 'RemapLUT', xml_ns, lut_key)
if lut_node is not None:
dim1 = int(lut_node.attrib['size'])
dim2 = 3
arr = numpy.zeros((dim1, dim2), dtype=numpy.uint16)
entries = get_node_value(lut_node).split()
i = 0
for entry in entries:
if len(entry) == 0:
continue
sentry = entry.split(',')
if len(sentry) != 3:
logger.error(
'Parsing RemapLUT is likely compromised.\n\t'
'Got entry {}, which we are skipping.'.format(entry))
continue
arr[i, :] = [int(el) for el in entry]
i += 1
if numpy.max(arr) < 256:
arr = numpy.cast[numpy.uint8](arr)
return cls(RemapLUT=arr)
return cls()
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
if parent is None:
parent = doc.getroot()
if ns_key is None:
node = create_new_node(doc, tag, parent=parent)
else:
node = create_new_node(doc, '{}:{}'.format(ns_key, tag), parent=parent)
if 'RemapLUT' in self._child_xml_ns_key:
rtag = '{}:RemapLUT'.format(self._child_xml_ns_key['RemapLUT'])
elif ns_key is not None:
rtag = '{}:RemapLUT'.format(ns_key)
else:
rtag = 'RemapLUT'
if self._remap_lut is not None:
value = ' '.join('{0:d},{1:d},{2:d}'.format(*entry) for entry in self._remap_lut)
entry = create_text_node(doc, rtag, value, parent=node)
entry.attrib['size'] = str(self.size)
return node
def to_dict(self, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
out = OrderedDict()
if self.RemapLUT is not None:
out['RemapLUT'] = self.RemapLUT.tolist()
return out
class MonochromeDisplayRemapType(Serializable):
"""
This remap works by taking the input space and using the LUT to map it to a log space (for 8-bit only).
From the log space the C0 and Ch fields are applied to get to display-ready density space. The density
should then be rendered by the TTC and monitor comp. This means that the default DRA should not apply
anything besides the clip points. If a different contrast/brightness is applied it should be done through
modification of the clip points via DRA.
"""
_fields = ('RemapType', 'RemapLUT', 'RemapParameters')
_required = ('RemapType', )
_collections_tags = {'RemapParameters': {'array': False, 'child_tag': 'RemapParameter'}}
# Descriptor
RemapType = StringDescriptor(
'RemapType', _required, strict=DEFAULT_STRICT,
docstring='') # type: str
RemapParameters = ParametersDescriptor(
'RemapParameters', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Textual remap parameter. Filled based upon remap type (for informational purposes only). '
'For example, if the data is linlog encoded a RemapParameter could be used to describe any '
'amplitude scaling that was performed prior to linlog encoding '
'the data.') # type: Union[None, ParametersCollection]
def __init__(self, RemapType=None, RemapLUT=None, RemapParameters=None, **kwargs):
"""
Parameters
----------
RemapType : str
RemapLUT : None|numpy.ndarray
RemapParameters : None|ParametersCollection|dict
kwargs
"""
self._remap_lut = None
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.RemapType = RemapType
self.RemapLUT = RemapLUT
self.RemapParameters = RemapParameters
super(MonochromeDisplayRemapType, self).__init__(**kwargs)
@property
def RemapLUT(self):
"""
numpy.ndarray: the one dimensional Lookup table for remap to log amplitude for display,
where the dtype must be `uint8`. Used during the "Product Generation Option" portion of the SIPS
display chain. Required for 8-bit data, and not to be used for 16-bit data.
"""
return self._remap_lut
@RemapLUT.setter
def RemapLUT(self, value):
if value is None:
self._remap_lut = None
return
if isinstance(value, (tuple, list)):
value = numpy.array(value, dtype=numpy.uint8)
if not isinstance(value, numpy.ndarray) or value.dtype.name != 'uint8':
raise ValueError(
'RemapLUT for class MonochromeDisplayRemapType must be a numpy.ndarray of dtype uint8.')
if value.ndim != 1:
raise ValueError('RemapLUT for class MonochromeDisplayRemapType must be a one-dimensional array.')
self._remap_lut = value
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
if kwargs is None:
kwargs = {}
lut_key = cls._child_xml_ns_key.get('RemapLUT', ns_key)
lut_node = find_first_child(node, 'RemapLUT', xml_ns, lut_key)
if lut_node is not None:
dim1 = int(lut_node.attrib['size'])
arr = numpy.zeros((dim1, ), dtype=numpy.uint8)
entries = get_node_value(lut_node).split()
i = 0
for entry in entries:
if len(entry) == 0:
continue
arr[i] = int(entry)
i += 1
kwargs['RemapLUT'] = arr
return super(MonochromeDisplayRemapType, cls).from_node(node, xml_ns, ns_key=ns_key, **kwargs)
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
node = super(MonochromeDisplayRemapType, self).to_node(
doc, tag, ns_key=ns_key, parent=parent, check_validity=check_validity, strict=strict)
if 'RemapLUT' in self._child_xml_ns_key:
rtag = '{}:RemapLUT'.format(self._child_xml_ns_key['RemapLUT'])
elif ns_key is not None:
rtag = '{}:RemapLUT'.format(ns_key)
else:
rtag = 'RemapLUT'
if self._remap_lut is not None:
value = ' '.join('{0:d}'.format(entry) for entry in self._remap_lut)
entry = create_text_node(doc, rtag, value, parent=node)
entry.attrib['size'] = str(self._remap_lut.size)
return node
def to_dict(self, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
out = super(MonochromeDisplayRemapType, self).to_dict(
check_validity=check_validity, strict=strict, exclude=exclude)
if self.RemapLUT is not None:
out['RemapLUT'] = self.RemapLUT.tolist()
return out
class RemapChoiceType(Serializable):
"""
The remap choice type.
"""
_fields = ('ColorDisplayRemap', 'MonochromeDisplayRemap')
_required = ()
_choice = ({'required': False, 'collection': ('ColorDisplayRemap', 'MonochromeDisplayRemap')}, )
# Descriptor
ColorDisplayRemap = SerializableDescriptor(
'ColorDisplayRemap', ColorDisplayRemapType, _required, strict=DEFAULT_STRICT,
docstring='Information for proper color display of the '
'data.') # type: Union[None, ColorDisplayRemapType]
MonochromeDisplayRemap = SerializableDescriptor(
'MonochromeDisplayRemap', MonochromeDisplayRemapType, _required, strict=DEFAULT_STRICT,
docstring='Information for proper monochrome display of the '
'data.') # type: Union[None, MonochromeDisplayRemapType]
def __init__(self, ColorDisplayRemap=None, MonochromeDisplayRemap=None, **kwargs):
"""
Parameters
----------
ColorDisplayRemap : None|ColorDisplayRemapType|numpy.ndarray|list|type
MonochromeDisplayRemap : None|MonochromeDisplayRemapType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.ColorDisplayRemap = ColorDisplayRemap
self.MonochromeDisplayRemap = MonochromeDisplayRemap
super(RemapChoiceType, self).__init__(**kwargs)
class MonitorCompensationAppliedType(Serializable):
"""
"""
_fields = ('Gamma', 'XMin')
_required = ('Gamma', 'XMin')
_numeric_format = {key: FLOAT_FORMAT for key in _fields}
# Descriptor
Gamma = FloatDescriptor(
'Gamma', _required, strict=DEFAULT_STRICT,
docstring='Gamma value for monitor compensation pre-applied to the image.') # type: float
XMin = FloatDescriptor(
'XMin', _required, strict=DEFAULT_STRICT,
docstring='Xmin value for monitor compensation pre-applied to the image.') # type: float
def __init__(self, Gamma=None, XMin=None, **kwargs):
"""
Parameters
----------
Gamma : float
XMin : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Gamma = Gamma
self.XMin = XMin
super(MonitorCompensationAppliedType, self).__init__(**kwargs)
class DRAHistogramOverridesType(Serializable):
"""
Dynamic range adjustment override parameters.
"""
_fields = ('ClipMin', 'ClipMax')
_required = ('ClipMin', 'ClipMax')
# Descriptor
ClipMin = IntegerDescriptor(
'ClipMin', _required, strict=DEFAULT_STRICT,
docstring='Suggested override for the lower end-point of the display histogram in the '
'ELT DRA application. Referred to as Pmin in SIPS documentation.') # type: int
ClipMax = IntegerDescriptor(
'ClipMax', _required, strict=DEFAULT_STRICT,
docstring='Suggested override for the upper end-point of the display histogram in the '
'ELT DRA application. Referred to as Pmax in SIPS documentation.') # type: int
def __init__(self, ClipMin=None, ClipMax=None, **kwargs):
"""
Parameters
----------
ClipMin : int
ClipMax : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.ClipMin = ClipMin
self.ClipMax = ClipMax
super(DRAHistogramOverridesType, self).__init__(**kwargs)
class ProductDisplayType(Serializable):
"""
"""
_fields = (
'PixelType', 'RemapInformation', 'MagnificationMethod', 'DecimationMethod',
'DRAHistogramOverrides', 'MonitorCompensationApplied', 'DisplayExtensions')
_required = ('PixelType', )
_collections_tags = {
'DisplayExtensions': {'array': False, 'child_tag': 'DisplayExtension'}}
# Descriptors
PixelType = StringEnumDescriptor(
'PixelType', ('MONO8I', 'MONO8LU', 'MONO16I', 'RGB8LU', 'RGB24I'),
_required, strict=DEFAULT_STRICT,
docstring='Enumeration of the pixel type. Definition in '
'Design and Exploitation document.') # type: str
RemapInformation = SerializableDescriptor(
'RemapInformation', RemapChoiceType, _required, strict=DEFAULT_STRICT,
docstring='Information regarding the encoding of the pixel data. '
'Used for 8-bit pixel types.') # type: Union[None, RemapChoiceType]
MagnificationMethod = StringEnumDescriptor(
'MagnificationMethod', ('NEAREST_NEIGHBOR', 'BILINEAR', 'LAGRANGE'),
_required, strict=DEFAULT_STRICT,
docstring='Recommended ELT magnification method for this data.') # type: Union[None, str]
DecimationMethod = StringEnumDescriptor(
'DecimationMethod', ('NEAREST_NEIGHBOR', 'BILINEAR', 'BRIGHTEST_PIXEL', 'LAGRANGE'),
_required, strict=DEFAULT_STRICT,
docstring='Recommended ELT decimation method for this data. Also used as default for '
'reduced resolution dataset generation (if applicable).') # type: Union[None, str]
DRAHistogramOverrides = SerializableDescriptor(
'DRAHistogramOverrides', DRAHistogramOverridesType, _required, strict=DEFAULT_STRICT,
docstring='Recommended ELT DRA overrides.') # type: Union[None, DRAHistogramOverridesType]
MonitorCompensationApplied = SerializableDescriptor(
'MonitorCompensationApplied', MonitorCompensationAppliedType, _required, strict=DEFAULT_STRICT,
docstring='Describes monitor compensation that may have been applied to the product '
'during processing.') # type: Union[None, MonitorCompensationAppliedType]
DisplayExtensions = ParametersDescriptor(
'DisplayExtensions', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Optional extensible parameters used to support profile-specific needs related to '
'product display. Predefined filter types.') # type: Union[None, ParametersCollection]
def __init__(self, PixelType=None, RemapInformation=None, MagnificationMethod=None, DecimationMethod=None,
DRAHistogramOverrides=None, MonitorCompensationApplied=None, DisplayExtensions=None, **kwargs):
"""
Parameters
----------
PixelType : PixelTypeType
RemapInformation : None|RemapChoiceType
MagnificationMethod : None|str
DecimationMethod : None|str
DRAHistogramOverrides : None|DRAHistogramOverridesType
MonitorCompensationApplied : None|MonitorCompensationAppliedType
DisplayExtensions : None|ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.PixelType = PixelType
self.RemapInformation = RemapInformation
self.MagnificationMethod = MagnificationMethod
self.DecimationMethod = DecimationMethod
self.DRAHistogramOverrides = DRAHistogramOverrides
self.MonitorCompensationApplied = MonitorCompensationApplied
self.DisplayExtensions = DisplayExtensions
super(ProductDisplayType, self).__init__(**kwargs)
def get_pixel_size(self) -> int:
"""
Gets the raw size per pixel, in bytes.
Returns
-------
int
"""
if self.PixelType == 'MONO8I':
return 1
elif self.PixelType == 'MONO8LU':
return 1
elif self.PixelType == 'MONO16I':
return 2
elif self.PixelType == 'RGB8LU':
return 1
elif self.PixelType == 'RGB24I':
return 3
else:
raise ValueError('Got unhandled pixel type `{}`'.format(self.PixelType))
| 18,845 | 37.539877 | 118 | py |
sarpy | sarpy-master/sarpy/io/xml/base.py | """
This module contains the base objects for use in base xml/serializable functionality.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
from xml.etree import ElementTree
import json
from datetime import date, datetime
from collections import OrderedDict
import copy
import re
from io import StringIO
from typing import Dict, Optional
import numpy
from sarpy.compliance import bytes_to_string
try:
from lxml import etree
except ImportError:
etree = None
logger = logging.getLogger(__name__)
valid_logger = logging.getLogger('validation')
DEFAULT_STRICT = False
#################
# dom helper functions
def get_node_value(nod: ElementTree.Element) -> Optional[str]:
"""
XML parsing helper for extracting text value from an ElementTree Element. No error checking performed.
Parameters
----------
nod : ElementTree.Element
the xml dom element
Returns
-------
str
the string value of the node.
"""
if nod.text is None:
return None
val = nod.text.strip()
if len(val) == 0:
return None
else:
return val
def create_new_node(
doc: ElementTree.ElementTree,
tag: str,
parent: Optional[ElementTree.Element] = None) -> ElementTree.Element:
"""
XML ElementTree node creation helper function.
Parameters
----------
doc : ElementTree.ElementTree
The xml Document object.
tag : str
Name/tag for new xml element.
parent : None|ElementTree.Element
The parent element for the new element. Defaults to the document root element if unspecified.
Returns
-------
ElementTree.Element
The new element populated as a child of `parent`.
"""
if parent is None:
parent = doc.getroot() # what if there is no root?
if parent is None:
element = ElementTree.Element(tag)
# noinspection PyProtectedMember, PyUnresolvedReferences
doc._setroot(element)
return element
else:
return ElementTree.SubElement(parent, tag)
def create_text_node(
doc: ElementTree.ElementTree,
tag: str,
value: str,
parent: Optional[ElementTree.Element] = None) -> ElementTree.Element:
"""
XML ElementTree text node creation helper function
Parameters
----------
doc : ElementTree.ElementTree
The xml Document object.
tag : str
Name/tag for new xml element.
value : str
The value for the new element.
parent : None|ElementTree.Element
The parent element for the new element. Defaults to the document root element if unspecified.
Returns
-------
ElementTree.Element
The new element populated as a child of `parent`.
"""
node = create_new_node(doc, tag, parent=parent)
node.text = value
return node
def find_first_child(
node: ElementTree.Element,
tag: str,
xml_ns: Optional[Dict[str, str]],
ns_key: Optional[str]) -> ElementTree.Element:
"""
Finds the first child node
Parameters
----------
node : ElementTree.Element
tag : str
xml_ns : None|dict
ns_key : None|str
Returns
-------
ElementTree.Element
"""
if xml_ns is None:
return node.find(tag)
elif ns_key is None:
return node.find('default:{}'.format(tag), xml_ns)
else:
return node.find('{}:{}'.format(ns_key, tag), xml_ns)
def find_children(node, tag, xml_ns, ns_key):
"""
Finds the collection of children nodes
Parameters
----------
node : ElementTree.Element
tag : str
xml_ns : None|dict
ns_key : None|str
"""
if xml_ns is None:
return node.findall(tag)
elif ns_key is None:
return node.findall('default:{}'.format(tag), xml_ns)
else:
return node.findall('{}:{}'.format(ns_key, tag), xml_ns)
def parse_xml_from_string(xml_string):
"""
Parse the ElementTree root node and xml namespace dict from a xml string.
Parameters
----------
xml_string : str|bytes
Returns
-------
root_node: ElementTree.Element
xml_ns: Dict[str, str]
"""
xml_string = bytes_to_string(xml_string, encoding='utf-8')
root_node = ElementTree.fromstring(xml_string)
# define the namespace dictionary
xml_ns = dict([node for _, node in ElementTree.iterparse(StringIO(xml_string), events=('start-ns',))])
if len(xml_ns.keys()) == 0:
xml_ns = None
elif '' in xml_ns:
xml_ns['default'] = xml_ns['']
else:
# default will be the namespace for the root node
namespace_match = re.match(r'\{.*\}', root_node.tag)
if namespace_match is None:
raise ValueError('Trouble finding the default namespace for tag {}'.format(root_node.tag))
xml_ns['default'] = namespace_match[0][1:-1]
return root_node, xml_ns
def parse_xml_from_file(xml_file_path):
"""
Parse the ElementTree root node and xml namespace dict from a xml file.
Parameters
----------
xml_file_path : str
Returns
-------
root_node: ElementTree.Element
xml_ns: Dict[str, str]
"""
with open(xml_file_path, 'rb') as fi:
xml_bytes = fi.read()
return parse_xml_from_string(xml_bytes)
def validate_xml_from_string(xml_string, xsd_path, output_logger=None):
"""
Validate a xml string against a given xsd document.
Parameters
----------
xml_string : str|bytes
xsd_path : str
The path to the relevant xsd document.
output_logger
A desired output logger.
Returns
-------
bool
`True` if valid, `False` otherwise. Failure reasons will be
logged at `'error'` level by the module.
"""
if etree is None:
raise ImportError(
'The lxml package was not successfully imported,\n\t'
'and this xml validation requires lxml.')
xml_doc = etree.fromstring(xml_string)
xml_schema = etree.XMLSchema(file=xsd_path)
validity = xml_schema.validate(xml_doc)
if not validity:
for entry in xml_schema.error_log:
msg = 'XML validation error on line {}\n\t{}'.format(
entry.line, entry.message.encode('utf-8'))
if output_logger is None:
logger.error(msg)
else:
output_logger.error(msg)
return validity
def validate_xml_from_file(xml_path, xsd_path, output_logger=None):
"""
Validate a xml string against a given xsd document.
Parameters
----------
xml_path : str
The path to the relevant xml file
xsd_path : str
The path to the relevant xsd document.
output_logger
A desired output logger.
Returns
-------
bool
`True` if valid, `False` otherwise. Failure reasons will be
logged at `'error'` level by the module.
"""
with open(xml_path, 'rb') as fi:
xml_bytes = fi.read()
return validate_xml_from_string(xml_bytes, xsd_path, output_logger=output_logger)
###
# parsing functions - for reusable functionality in descriptors or other property definitions
def parse_str(value, name, instance):
if value is None:
return None
if isinstance(value, str):
return value
elif isinstance(value, ElementTree.Element):
node_value = get_node_value(value)
return "" if node_value is None else node_value
else:
raise TypeError(
'field {} of class {} requires a string value.'.format(name, instance.__class__.__name__))
def parse_bool(value, name, instance):
def parse_string(val):
if val.lower() in ['0', 'false']:
return False
elif val.lower() in ['1', 'true']:
return True
else:
raise ValueError(
'Boolean field {} of class {} cannot assign from string value {}. '
'It must be one of ["0", "false", "1", "true"]'.format(name, instance.__class__.__name__, val))
if value is None:
return None
if isinstance(value, bool):
return value
elif isinstance(value, int) or isinstance(value, numpy.bool_):
return bool(value)
elif isinstance(value, ElementTree.Element):
# from XML deserialization
return parse_string(get_node_value(value))
elif isinstance(value, str):
return parse_string(value)
else:
raise ValueError('Boolean field {} of class {} cannot assign from type {}.'.format(
name, instance.__class__.__name__, type(value)))
def parse_int(value, name, instance):
if value is None:
return None
if isinstance(value, int):
return value
elif isinstance(value, ElementTree.Element):
# from XML deserialization
return parse_int(get_node_value(value), name, instance)
elif isinstance(value, str):
try:
return int(value)
except ValueError as e:
logger.warning(
'Got non-integer value {}\n\t'
'for integer valued field {} of class {}'.format(
value, name, instance.__class__.__name__))
# noinspection PyBroadException
try:
return int(float(value))
except Exception:
raise e
else:
# user or json deserialization
return int(value)
# noinspection PyUnusedLocal
def parse_float(value, name, instance):
if value is None:
return None
if isinstance(value, float):
return value
elif isinstance(value, ElementTree.Element):
# from XML deserialization
return float(get_node_value(value))
else:
# user or json deserialization
return float(value)
def parse_complex(value, name, instance):
if value is None:
return None
if isinstance(value, complex):
return value
elif isinstance(value, ElementTree.Element):
xml_ns = getattr(instance, '_xml_ns', None)
# noinspection PyProtectedMember
if hasattr(instance, '_child_xml_ns_key') and name in instance._child_xml_ns_key:
# noinspection PyProtectedMember
xml_ns_key = instance._child_xml_ns_key[name]
else:
xml_ns_key = getattr(instance, '_xml_ns_key', None)
# from XML deserialization
rnode = find_children(value, 'Real', xml_ns, xml_ns_key)
inode = find_children(value, 'Imag', xml_ns, xml_ns_key)
if len(rnode) != 1:
raise ValueError(
'There must be exactly one Real component of a complex type node '
'defined for field {} of class {}.'.format(name, instance.__class__.__name__))
if len(inode) != 1:
raise ValueError(
'There must be exactly one Imag component of a complex type node '
'defined for field {} of class {}.'.format(name, instance.__class__.__name__))
real = float(get_node_value(rnode[0]))
imag = float(get_node_value(inode[0]))
return complex(real, imag)
elif isinstance(value, dict):
# from json deserialization
real = None
for key in ['re', 'real', 'Real']:
real = value.get(key, real)
imag = None
for key in ['im', 'imag', 'Imag']:
imag = value.get(key, imag)
if real is None or imag is None:
raise ValueError(
'Cannot convert dict {} to a complex number for field {} of '
'class {}.'.format(value, name, instance.__class__.__name__))
return complex(real, imag)
else:
# from user - I can't imagine that this would ever work
return complex(value)
def parse_datetime(value, name, instance, units='us'):
if value is None:
return None
if isinstance(value, numpy.datetime64):
return value
elif isinstance(value, str):
# handle Z timezone identifier explicitly - any timezone identifier is deprecated
if value[-1] == 'Z':
return numpy.datetime64(value[:-1], units)
else:
return numpy.datetime64(value, units)
elif isinstance(value, ElementTree.Element):
# from XML deserialization - extract the string
return parse_datetime(get_node_value(value), name, instance, units=units)
elif isinstance(value, (date, datetime, numpy.int64, numpy.float64)):
return numpy.datetime64(value, units)
elif isinstance(value, int):
# this is less safe, because the units are unknown...
return numpy.datetime64(value, units)
else:
raise TypeError(
'Field {} for class {} expects datetime convertible input, and '
'got {}'.format(name, instance.__class__.__name__, type(value)))
def parse_serializable(value, name, instance, the_type):
if value is None:
return None
if isinstance(value, the_type):
return value
elif isinstance(value, dict):
return the_type.from_dict(value)
elif isinstance(value, ElementTree.Element):
xml_ns = getattr(instance, '_xml_ns', None)
if hasattr(instance, '_child_xml_ns_key'):
# noinspection PyProtectedMember
xml_ns_key = instance._child_xml_ns_key.get(name, getattr(instance, '_xml_ns_key', None))
else:
xml_ns_key = getattr(instance, '_xml_ns_key', None)
return the_type.from_node(value, xml_ns, ns_key=xml_ns_key)
elif isinstance(value, (numpy.ndarray, list, tuple)):
if issubclass(the_type, Arrayable):
return the_type.from_array(value)
else:
raise TypeError(
'Field {} of class {} is of type {} (not a subclass of Arrayable) and '
'got an argument of type {}.'.format(name, instance.__class__.__name__, the_type, type(value)))
else:
raise TypeError(
'Field {} of class {} is expecting type {}, but got an instance of incompatible '
'type {}.'.format(name, instance.__class__.__name__, the_type, type(value)))
def parse_serializable_array(value, name, instance, child_type, child_tag):
if value is None:
return None
if isinstance(value, child_type):
# this is the child element
return numpy.array([value, ], dtype='object')
elif isinstance(value, numpy.ndarray):
if value.dtype.name != 'object':
if issubclass(child_type, Arrayable):
return numpy.array([child_type.from_array(array) for array in value], dtype='object')
else:
raise ValueError(
'Attribute {} of array type functionality belonging to class {} got an ndarray of dtype {},'
'and child type is not a subclass of Arrayable.'.format(
name, instance.__class__.__name__, value.dtype))
elif len(value.shape) != 1:
raise ValueError(
'Attribute {} of array type functionality belonging to class {} got an ndarray of shape {},'
'but requires a one dimensional array.'.format(
name, instance.__class__.__name__, value.shape))
elif not isinstance(value[0], child_type):
raise TypeError(
'Attribute {} of array type functionality belonging to class {} got an ndarray containing '
'first element of incompatible type {}.'.format(
name, instance.__class__.__name__, type(value[0])))
return value
elif isinstance(value, ElementTree.Element):
xml_ns = getattr(instance, '_xml_ns', None)
if hasattr(instance, '_child_xml_ns_key'):
# noinspection PyProtectedMember
xml_ns_key = instance._child_xml_ns_key.get(name, getattr(instance, '_xml_ns_key', None))
else:
xml_ns_key = getattr(instance, '_xml_ns_key', None)
# this is the parent node from XML deserialization
size = int(value.attrib.get('size', -1)) # NB: Corner Point arrays don't have
# extract child nodes at top level
child_nodes = find_children(value, child_tag, xml_ns, xml_ns_key)
if size == -1: # fill in, if it's missing
size = len(child_nodes)
if len(child_nodes) != size:
raise ValueError(
'Attribute {} of array type functionality belonging to class {} got a ElementTree element '
'with size attribute {}, but has {} child nodes with tag {}.'.format(
name, instance.__class__.__name__, size, len(child_nodes), child_tag))
new_value = numpy.empty((size, ), dtype='object')
for i, entry in enumerate(child_nodes):
new_value[i] = child_type.from_node(entry, xml_ns, ns_key=xml_ns_key)
return new_value
elif isinstance(value, (list, tuple)):
# this would arrive from users or json deserialization
if len(value) == 0:
return numpy.empty((0,), dtype='object')
elif isinstance(value[0], child_type):
return numpy.array(value, dtype='object')
elif isinstance(value[0], dict):
# NB: charming errors are possible here if something stupid has been done.
return numpy.array([child_type.from_dict(node) for node in value], dtype='object')
elif isinstance(value[0], (numpy.ndarray, list, tuple)):
if issubclass(child_type, Arrayable):
return numpy.array([child_type.from_array(array) for array in value], dtype='object')
elif hasattr(child_type, 'Coefs'):
return numpy.array([child_type(Coefs=array) for array in value], dtype='object')
else:
raise ValueError(
'Attribute {} of array type functionality belonging to class {} got an list '
'containing elements type {} and construction failed.'.format(
name, instance.__class__.__name__, type(value[0])))
else:
raise TypeError(
'Attribute {} of array type functionality belonging to class {} got a list containing first '
'element of incompatible type {}.'.format(name, instance.__class__.__name__, type(value[0])))
else:
raise TypeError(
'Attribute {} of array type functionality belonging to class {} got incompatible type {}.'.format(
name, instance.__class__.__name__, type(value)))
def parse_serializable_list(value, name, instance, child_type):
if value is None:
return None
if isinstance(value, child_type):
# this is the child element
return [value, ]
xml_ns = getattr(instance, '_xml_ns', None)
if hasattr(instance, '_child_xml_ns_key'):
# noinspection PyProtectedMember
xml_ns_key = instance._child_xml_ns_key.get(name, getattr(instance, '_xml_ns_key', None))
else:
xml_ns_key = getattr(instance, '_xml_ns_key', None)
if isinstance(value, ElementTree.Element):
# this is the child
return [child_type.from_node(value, xml_ns, ns_key=xml_ns_key), ]
elif isinstance(value, list) or isinstance(value[0], child_type):
if len(value) == 0:
return value
elif isinstance(value[0], child_type):
return [entry for entry in value]
elif isinstance(value[0], dict):
# NB: charming errors are possible if something stupid has been done.
return [child_type.from_dict(node) for node in value]
elif isinstance(value[0], ElementTree.Element):
return [child_type.from_node(node, xml_ns, ns_key=xml_ns_key) for node in value]
else:
raise TypeError(
'Field {} of list type functionality belonging to class {} got a '
'list containing first element of incompatible type '
'{}.'.format(name, instance.__class__.__name__, type(value[0])))
else:
raise TypeError(
'Field {} of class {} got incompatible type {}.'.format(
name, instance.__class__.__name__, type(value)))
def parse_parameters_collection(value, name, instance):
if value is None:
return None
if isinstance(value, dict):
return value
elif isinstance(value, list):
out = OrderedDict()
if len(value) == 0:
return out
if isinstance(value[0], ElementTree.Element):
for entry in value:
out[entry.attrib['name']] = get_node_value(entry)
return out
else:
raise TypeError(
'Field {} of list type functionality belonging to class {} got a '
'list containing first element of incompatible type '
'{}.'.format(name, instance.__class__.__name__, type(value[0])))
else:
raise TypeError(
'Field {} of class {} got incompatible type {}.'.format(
name, instance.__class__.__name__, type(value)))
##################
# Main class defining structure
class Serializable(object):
"""
Basic abstract class specifying the serialization pattern. There are no clearly defined Python conventions
for this issue. Every effort has been made to select sensible choices, but this is an individual effort.
Notes
-----
All fields MUST BE LISTED in the `_fields` tuple. Everything listed in `_required` tuple will be checked
for inclusion in `_fields` tuple. Note that special care must be taken to ensure compatibility of `_fields`
tuple, if inheriting from an extension of this class.
"""
_fields = ()
"""collection of field names"""
_required = ()
"""subset of `_fields` defining the required (for the given object, according to the sicd standard) fields"""
_tag_override = {}
"""On occasion, the xml tag and the corresponding variable name may need to differ.
This dictionary should be populated as `{<variable name> : <tag name>}`."""
_collections_tags = {}
"""
Entries only appropriate for list/array type objects. Entry formatting:
* `{'array': True, 'child_tag': <child_name>}` represents an array object, which will have int attribute `size`.
It has *size* children with tag=<child_name>, each of which has an attribute `index`, which is not always an
integer. Even when it is an integer, it apparently sometimes follows the matlab convention (1 based), and
sometimes the standard convention (0 based). In this case, I will deserialize as though the objects are
properly ordered, and the deserialized objects will have the `index` property from the xml, but it will not
be used to determine array order - which will be simply carried over from file order.
* `{'array': False, 'child_tag': <child_name>}` represents a collection of things with tag=<child_name>.
This entries are not directly below one coherent container tag, but just dumped into an object.
For example of such usage search for "Parameter" in the SICD standard.
In this case, I've have to create an ephemeral variable in the class that doesn't exist in the standard,
and it's not clear what the intent is for this unstructured collection, so used a list object.
For example, I have a variable called `Parameters` in `CollectionInfoType`, whose job it to contain the
parameters objects.
"""
_numeric_format = {}
"""define dict entries of numeric formatting for serialization"""
_set_as_attribute = ()
"""serialize these fields as xml attributes"""
_choice = ()
"""
Entries appropriate for choice selection between attributes. Entry formatting:
* `{'required': True, 'collection': <tuple of attribute names>}` - indicates that EXACTLY only one of the
attributes should be populated.
* `{'required': False, 'collection': <tuple of attribute names>}` - indicates that no more than one of the
attributes should be populated.
"""
_child_xml_ns_key = {}
"""
The expected namespace key for attributes. No entry indicates the default namespace.
This is important for SIDD handling, but not required for SICD handling.
"""
# NB: it may be good practice to use __slots__ to further control class functionality?
def __init__(self, **kwargs):
"""
The default constructor. For each attribute name in `self._fields`, fetches the value (or None) from
the `kwargs` dict, and sets the class instance attribute value. The details for attribute value validation,
present for virtually every attribute, will be implemented specifically as descriptors.
Parameters
----------
**kwargs :
the keyword arguments dictionary - the possible entries match the attributes.
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
unexpected_args = [key for key in kwargs if key not in self._fields and key[0] != '_']
if len(unexpected_args) > 0:
raise ValueError(
'Received unexpected construction argument {} for attribute '
'collection {}'.format(unexpected_args, self._fields))
for attribute in self._fields:
if attribute in kwargs:
try:
setattr(self, attribute, kwargs.get(attribute, None))
except AttributeError:
# NB: this is included to allow for read only properties without breaking the paradigm
# Silently catching errors can potentially cover up REAL issues.
pass
def __str__(self):
return '{}(**{})'.format(self.__class__.__name__, json.dumps(self.to_dict(check_validity=False), indent=1))
def __repr__(self):
return '{}(**{})'.format(self.__class__.__name__, self.to_dict(check_validity=False))
def __setattr__(self, key, value):
if not (key.startswith('_') or (key in self._fields) or hasattr(self.__class__, key) or hasattr(self, key)):
# not expected attribute - descriptors, properties, etc
logger.warning(
'Class {} instance receiving unexpected attribute {}.\n\t'
'Ensure that this is not a typo of an expected field name.'.format(self.__class__.__name__, key))
object.__setattr__(self, key, value)
def __getstate__(self):
"""
Method for allowing copying and/or pickling of state.
Returns
-------
dict
The dict representation for the object.
"""
return self.to_dict(check_validity=False, strict=False)
def __setstate__(self, the_dict):
"""
Method for reconstructing from the serialized state.
"""
return self.__init__(**the_dict)
def set_numeric_format(self, attribute, format_string):
"""Sets the numeric format string for the given attribute.
Parameters
----------
attribute : str
attribute for which the format applies - must be in `_fields`.
format_string : str
format string to be applied
Returns
-------
None
"""
# Extend this to include format function capabilities. Maybe numeric_format is not the right name?
if attribute not in self._fields:
raise ValueError('attribute {} is not permitted for class {}'.format(attribute, self.__class__.__name__))
self._numeric_format[attribute] = format_string
def _get_formatter(self, attribute):
"""Return a formatting function for the given attribute. This will default to `str` if no other
option is presented.
Parameters
----------
attribute : str
the given attribute name
Returns
-------
Callable
format function
"""
entry = self._numeric_format.get(attribute, None)
if isinstance(entry, str):
fmt_str = '{0:' + entry + '}'
return fmt_str.format
elif callable(entry):
return entry
else:
return str
def log_validity_error(self, msg):
"""
Log a validity check error message.
Parameters
----------
msg : str
"""
valid_logger.error('{}: {}'.format(self.__class__.__name__, msg))
def log_validity_warning(self, msg):
"""
Log a validity check warning message.
Parameters
----------
msg : str
"""
valid_logger.warning('{}: {}'.format(self.__class__.__name__, msg))
def log_validity_info(self, msg):
"""
Log a validation info message.
Parameters
----------
msg : str
"""
valid_logger.info('{}: {}'.format(self.__class__.__name__, msg))
def is_valid(self, recursive=False, stack=False):
"""Returns the validity of this object according to the schema. This is done by inspecting that all required
fields (i.e. entries of `_required`) are not `None`.
Parameters
----------
recursive : bool
True if we recursively check that child are also valid. This may result in verbose (i.e. noisy) logging.
stack : bool
Print a recursive error message?
Returns
-------
bool
condition for validity of this element
"""
all_required = self._basic_validity_check()
if not recursive:
return all_required
valid_children = self._recursive_validity_check(stack=stack)
return all_required & valid_children
def _basic_validity_check(self):
"""
Perform the basic validity check on the direct attributes with no recursive checking.
Returns
-------
bool
True if all requirements *AT THIS LEVEL* are satisfied, otherwise False.
"""
all_required = True
for attribute in self._required:
present = (getattr(self, attribute) is not None)
if not present:
self.log_validity_error("Missing required attribute {}".format(attribute))
all_required &= present
choices = True
for entry in self._choice:
required = entry.get('required', False)
collect = entry['collection']
# verify that no more than one of the entries in collect is set.
present = []
for attribute in collect:
if getattr(self, attribute) is not None:
present.append(attribute)
if len(present) == 0 and required:
self.log_validity_error(
"Exactly one of the attributes {} should be set, but none are set".format(collect))
choices = False
elif len(present) > 1:
self.log_validity_error(
"Exactly one of the attributes {} should be set, but multiple ({}) are set".format(collect,
present))
choices = False
return all_required and choices
def _recursive_validity_check(self, stack=False):
"""
Perform a recursive validity check on all present attributes.
Parameters
----------
stack : bool
Print a recursive error message?
Returns
-------
bool
True if requirements are recursively satisfied *BELOW THIS LEVEL*, otherwise False.
"""
def check_item(value):
if isinstance(value, (Serializable, SerializableArray)):
return value.is_valid(recursive=True, stack=stack)
return True
valid_children = True
for attribute in self._fields:
val = getattr(self, attribute)
good = True
if isinstance(val, (Serializable, SerializableArray)):
good = check_item(val)
elif isinstance(val, list):
for entry in val:
good &= check_item(entry)
# any issues will be logged as discovered, but should we help with the "stack"?
if not good and stack:
self.log_validity_error(
"Issue discovered with attribute {} of type {}.".format(attribute, type(val)))
valid_children &= good
return valid_children
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
"""For XML deserialization.
Parameters
----------
node : ElementTree.Element
dom element for serialized class instance
xml_ns : None|dict
The xml namespace dictionary.
ns_key : None|str
The xml namespace key. If `xml_ns` is None, then this is ignored. If `None` and `xml_ns` is not None,
then the string `default` will be used. This will be recursively passed down,
unless overridden by an entry of the cls._child_xml_ns_key dictionary.
kwargs : None|dict
`None` or dictionary of previously serialized attributes. For use in inheritance call, when certain
attributes require specific deserialization.
Returns
-------
Corresponding class instance
"""
if len(node) == 0 and len(node.attrib) == 0:
logger.warning(
'There are no children or attributes associated\n\t'
'with node {}\n\t'
'for class {}.'.format(node, cls))
# return None
def handle_attribute(the_attribute, the_tag, the_xml_ns_key):
if the_xml_ns_key is not None: # handle namespace, if necessary
fetch_tag = '{' + xml_ns[the_xml_ns_key] + '}' + the_tag
else:
fetch_tag = the_tag
kwargs[the_attribute] = node.attrib.get(fetch_tag, None)
def handle_single(the_attribute, the_tag, the_xml_ns_key):
kwargs[the_attribute] = find_first_child(node, the_tag, xml_ns, the_xml_ns_key)
def handle_list(attrib, ch_tag, the_xml_ns_key):
cnodes = find_children(node, ch_tag, xml_ns, the_xml_ns_key)
if len(cnodes) > 0:
kwargs[attrib] = cnodes
if kwargs is None:
kwargs = {}
kwargs['_xml_ns'] = xml_ns
kwargs['_xml_ns_key'] = ns_key
if not isinstance(kwargs, dict):
raise ValueError(
"Named input argument kwargs for class {} must be dictionary instance".format(cls))
for attribute in cls._fields:
if attribute in kwargs:
continue
kwargs[attribute] = None
# This value will be replaced if tags are present
# Note that we want to try explicitly setting to None to trigger descriptor behavior
# for required fields (warning or error)
base_tag_name = cls._tag_override.get(attribute, attribute)
# determine any expected xml namespace for the given entry
if attribute in cls._child_xml_ns_key:
xml_ns_key = cls._child_xml_ns_key[attribute]
else:
xml_ns_key = ns_key
# verify that the xml namespace will work
if xml_ns_key is not None:
if xml_ns is None:
raise ValueError('Attribute {} in class {} expects a xml namespace entry of {}, '
'but xml_ns is None.'.format(attribute, cls, xml_ns_key))
elif xml_ns_key not in xml_ns:
raise ValueError('Attribute {} in class {} expects a xml namespace entry of {}, '
'but xml_ns does not contain this key.'.format(attribute, cls, xml_ns_key))
if attribute in cls._set_as_attribute:
xml_ns_key = cls._child_xml_ns_key.get(attribute, None)
handle_attribute(attribute, base_tag_name, xml_ns_key)
elif attribute in cls._collections_tags:
# it's a collection type parameter
array_tag = cls._collections_tags[attribute]
array = array_tag.get('array', False)
child_tag = array_tag.get('child_tag', None)
if array:
handle_single(attribute, base_tag_name, xml_ns_key)
elif child_tag is not None:
handle_list(attribute, child_tag, xml_ns_key)
else:
# the metadata is broken
raise ValueError(
'Attribute {} in class {} is listed in the _collections_tags dictionary, but the '
'`child_tag` value is either not populated or None.'.format(attribute, cls))
else:
# it's a regular property
handle_single(attribute, base_tag_name, xml_ns_key)
return cls.from_dict(kwargs)
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
"""For XML serialization, to a dom element.
Parameters
----------
doc : ElementTree.ElementTree
The xml Document
tag : None|str
The tag name. Defaults to the value of `self._tag` and then the class name if unspecified.
ns_key : None|str
The namespace prefix. This will be recursively passed down, unless overridden by an entry in the
_child_xml_ns_key dictionary.
parent : None|ElementTree.Element
The parent element. Defaults to the document root element if unspecified.
check_validity : bool
Check whether the element is valid before serializing, by calling :func:`is_valid`.
strict : bool
Only used if `check_validity = True`. In that case, if `True` then raise an
Exception (of appropriate type) if the structure is not valid, if `False` then log a
hopefully helpful message.
exclude : tuple
Attribute names to exclude from this generic serialization. This allows for child classes
to provide specific serialization for special properties, after using this super method.
Returns
-------
ElementTree.Element
The constructed dom element, already assigned to the parent element.
"""
def serialize_attribute(node, the_tag, val, format_function, the_xml_ns_key):
if the_xml_ns_key is None or the_xml_ns_key == 'default':
node.attrib[the_tag] = format_function(val)
else:
node.attrib['{}:{}'.format(the_xml_ns_key, the_tag)] = format_function(val)
def serialize_array(node, the_tag, ch_tag, val, format_function, size_attrib, the_xml_ns_key):
if not isinstance(val, numpy.ndarray):
# this should really never happen, unless someone broke the class badly by fiddling with
# _collections_tag or the descriptor at runtime
raise TypeError(
'The value associated with attribute {} is an instance of class {} should be an array based on '
'the metadata in the _collections_tags dictionary, but we received an instance of '
'type {}'.format(attribute, self.__class__.__name__, type(val)))
if not len(val.shape) == 1:
# again, I have no idea how we'd find ourselves here, unless inconsistencies have been introduced
# into the descriptor
raise ValueError(
'The value associated with attribute {} is an instance of class {}, if None, is required to be'
'a one-dimensional numpy.ndarray, but it has shape {}'.format(
attribute, self.__class__.__name__, val.shape))
if val.size == 0:
return # serializing an empty array is dumb
if val.dtype.name == 'float64':
if the_xml_ns_key is None:
anode = create_new_node(doc, the_tag, parent=node)
else:
anode = create_new_node(doc, '{}:{}'.format(the_xml_ns_key, the_tag), parent=node)
anode.attrib[size_attrib] = str(val.size)
for i, val in enumerate(val):
vnode = create_text_node(doc, ch_tag, format_function(val), parent=anode)
vnode.attrib['index'] = str(i) if ch_tag == 'Amplitude' else str(i + 1)
else:
# I have no idea how we'd find ourselves here, unless inconsistencies have been introduced
# into the descriptor
raise ValueError(
'The value associated with attribute {} is an instance of class {}, if None, is required to be'
'a numpy.ndarray of dtype float64 or object, but it has dtype {}'.format(
attribute, self.__class__.__name__, val.dtype))
def serialize_list(node, ch_tag, val, format_function, the_xml_ns_key):
if not isinstance(val, list):
# this should really never happen, unless someone broke the class badly by fiddling with
# _collections_tags or the descriptor?
raise TypeError(
'The value associated with attribute {} is an instance of class {} should be a list based on '
'the metadata in the _collections_tags dictionary, but we received an instance of '
'type {}'.format(attribute, self.__class__.__name__, type(val)))
if len(val) == 0:
return # serializing an empty list is dumb
else:
for entry in val:
if entry is not None:
serialize_plain(node, ch_tag, entry, format_function, the_xml_ns_key)
def serialize_plain(node, the_tag, val, format_function, the_xml_ns_key):
# may be called not at top level - if object array or list is present
prim_tag = '{}:{}'.format(the_xml_ns_key, the_tag) if the_xml_ns_key is not None else the_tag
if isinstance(val, (Serializable, SerializableArray)):
val.to_node(doc, the_tag, ns_key=the_xml_ns_key, parent=node,
check_validity=check_validity, strict=strict)
elif isinstance(val, ParametersCollection):
val.to_node(doc, ns_key=the_xml_ns_key, parent=node, check_validity=check_validity, strict=strict)
elif isinstance(val, bool): # this must come before int, where it would evaluate as true
create_text_node(doc, prim_tag, 'true' if val else 'false', parent=node)
elif isinstance(val, str):
create_text_node(doc, prim_tag, val, parent=node)
elif isinstance(val, int):
create_text_node(doc, prim_tag, format_function(val), parent=node)
elif isinstance(val, float):
create_text_node(doc, prim_tag, format_function(val), parent=node)
elif isinstance(val, numpy.datetime64):
out2 = str(val)
out2 = out2 + 'Z' if out2[-1] != 'Z' else out2
create_text_node(doc, prim_tag, out2, parent=node)
elif isinstance(val, complex):
cnode = create_new_node(doc, prim_tag, parent=node)
if the_xml_ns_key is None:
create_text_node(doc, 'Real', format_function(val.real), parent=cnode)
create_text_node(doc, 'Imag', format_function(val.imag), parent=cnode)
else:
create_text_node(doc, '{}:Real'.format(the_xml_ns_key), format_function(val.real), parent=cnode)
create_text_node(doc, '{}:Imag'.format(the_xml_ns_key), format_function(val.imag), parent=cnode)
elif isinstance(val, date): # should never exist at present
create_text_node(doc, prim_tag, val.isoformat(), parent=node)
elif isinstance(val, datetime): # should never exist at present
create_text_node(doc, prim_tag, val.isoformat(sep='T'), parent=node)
else:
raise ValueError(
'An entry for class {} using tag {} is of type {},\n'
'and serialization has not been implemented'.format(self.__class__.__name__, the_tag, type(val)))
if check_validity:
if not self.is_valid(stack=False):
msg = "{} is not valid,\n\t" \
"and cannot be SAFELY serialized to XML according to the " \
"SICD standard.".format(self.__class__.__name__)
if strict:
raise ValueError(msg)
logger.warning(msg)
# create the main node
if (ns_key is not None and ns_key != 'default') and not tag.startswith(ns_key + ':'):
nod = create_new_node(doc, '{}:{}'.format(ns_key, tag), parent=parent)
else:
nod = create_new_node(doc, tag, parent=parent)
# serialize the attributes
for attribute in self._fields:
if attribute in exclude:
continue
value = getattr(self, attribute)
if value is None:
continue
fmt_func = self._get_formatter(attribute)
base_tag_name = self._tag_override.get(attribute, attribute)
if attribute in self._set_as_attribute:
xml_ns_key = self._child_xml_ns_key.get(attribute, ns_key)
serialize_attribute(nod, base_tag_name, value, fmt_func, xml_ns_key)
else:
# should we be using some namespace?
if attribute in self._child_xml_ns_key:
xml_ns_key = self._child_xml_ns_key[attribute]
else:
xml_ns_key = getattr(self, '_xml_ns_key', ns_key)
if xml_ns_key == 'default':
xml_ns_key = None
if isinstance(value, (numpy.ndarray, list)):
array_tag = self._collections_tags.get(attribute, None)
if array_tag is None:
raise AttributeError(
'The value associated with attribute {} in an instance of class {} is of type {}, '
'but nothing is populated in the _collection_tags dictionary.'.format(
attribute, self.__class__.__name__, type(value)))
child_tag = array_tag.get('child_tag', None)
if child_tag is None:
raise AttributeError(
'The value associated with attribute {} in an instance of class {} is of type {}, '
'but `child_tag` is not populated in the _collection_tags dictionary.'.format(
attribute, self.__class__.__name__, type(value)))
size_attribute = array_tag.get('size_attribute', 'size')
if isinstance(value, numpy.ndarray):
serialize_array(nod, base_tag_name, child_tag, value, fmt_func, size_attribute, xml_ns_key)
else:
serialize_list(nod, child_tag, value, fmt_func, xml_ns_key)
else:
serialize_plain(nod, base_tag_name, value, fmt_func, xml_ns_key)
return nod
@classmethod
def from_dict(cls, input_dict):
"""For json deserialization, from dict instance.
Parameters
----------
input_dict : dict
Appropriate parameters dict instance for deserialization
Returns
-------
Corresponding class instance
"""
return cls(**input_dict)
def to_dict(self, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
"""
For json serialization.
Parameters
----------
check_validity : bool
Check whether the element is valid before serializing, by calling :func:`is_valid`.
strict : bool
Only used if `check_validity = True`. In that case, if `True` then raise an
Exception (of appropriate type) if the structure is not valid, if `False` then log a
hopefully helpful message.
exclude : tuple
Attribute names to exclude from this generic serialization. This allows for child classes
to provide specific serialization for special properties, after using this super method.
Returns
-------
OrderedDict
dict representation of class instance appropriate for direct json serialization.
"""
# noinspection PyUnusedLocal
def serialize_array(ch_tag, val):
if not len(val.shape) == 1:
# again, I have no idea how we'd find ourselves here, unless inconsistencies have been introduced
# into the descriptor
raise ValueError(
'The value associated with attribute {} is an instance of class {}, if None, is required to be'
'a one-dimensional numpy.ndarray, but it has shape {}'.format(
attribute, self.__class__.__name__, val.shape))
if val.size == 0:
return []
if val.dtype.name == 'float64':
return [float(el) for el in val]
else:
# I have no idea how we'd find ourselves here, unless inconsistencies have been introduced
# into the descriptor
raise ValueError(
'The value associated with attribute {} is an instance of class {}. This is expected to be'
'a numpy.ndarray of dtype float64, but it has dtype {}'.format(
attribute, self.__class__.__name__, val.dtype))
def serialize_list(ch_tag, val):
if len(val) == 0:
return []
else:
return [serialize_plain(ch_tag, entry) for entry in val if entry is not None]
def serialize_plain(field, val):
# may be called not at top level - if object array or list is present
if isinstance(val, Serializable):
return val.to_dict(check_validity=check_validity, strict=strict)
elif isinstance(val, SerializableArray):
return val.to_json_list(check_validity=check_validity, strict=strict)
elif isinstance(val, ParametersCollection):
return val.to_dict()
elif isinstance(val, int) or isinstance(val, str) or isinstance(val, float):
return val
elif isinstance(val, numpy.datetime64):
out2 = str(val)
return out2 + 'Z' if out2[-1] != 'Z' else out2
elif isinstance(val, complex):
return {'Real': val.real, 'Imag': val.imag}
elif isinstance(val, date): # probably never present
return val.isoformat()
elif isinstance(val, datetime): # probably never present
return val.isoformat(sep='T')
else:
raise ValueError(
'a entry for class {} using tag {} is of type {}, and serialization has not '
'been implemented'.format(self.__class__.__name__, field, type(val)))
if check_validity:
if not self.is_valid(stack=False):
msg = "{} is not valid,\n\t" \
"and cannot be SAFELY serialized to a dictionary valid in " \
"the SICD standard.".format(self.__class__.__name__)
if strict:
raise ValueError(msg)
logger.warning(msg)
out = OrderedDict()
for attribute in self._fields:
if attribute in exclude:
continue
value = getattr(self, attribute)
if value is None:
continue
if isinstance(value, (numpy.ndarray, list)):
array_tag = self._collections_tags.get(attribute, None)
if array_tag is None:
raise AttributeError(
'The value associated with attribute {} in an instance of class {} is of type {}, '
'but nothing is populated in the _collection_tags dictionary.'.format(
attribute, self.__class__.__name__, type(value)))
child_tag = array_tag.get('child_tag', None)
if child_tag is None:
raise AttributeError(
'The value associated with attribute {} in an instance of class {} is of type {}, '
'but `child_tag` is not populated in the _collection_tags dictionary.'.format(
attribute, self.__class__.__name__, type(value)))
if isinstance(value, numpy.ndarray):
out[attribute] = serialize_array(child_tag, value)
else:
out[attribute] = serialize_list(child_tag, value)
else:
out[attribute] = serialize_plain(attribute, value)
return out
def copy(self):
"""
Create a deep copy.
Returns
-------
"""
return self.__class__.from_dict(copy.deepcopy(self.to_dict(check_validity=False)))
def to_xml_bytes(self, urn=None, tag=None, check_validity=False, strict=DEFAULT_STRICT):
"""
Gets a bytes array, which corresponds to the xml string in utf-8 encoding,
identified as using the namespace given by `urn` (if given).
Parameters
----------
urn : None|str|dict
The xml namespace string or dictionary describing the xml namespace.
tag : None|str
The root node tag to use. If not given, then the class name will be used.
check_validity : bool
Check whether the element is valid before serializing, by calling :func:`is_valid`.
strict : bool
Only used if `check_validity = True`. In that case, if `True` then raise an
Exception (of appropriate type) if the structure is not valid, if `False` then log a
hopefully helpful message.
Returns
-------
bytes
bytes array from :func:`ElementTree.tostring()` call.
"""
if tag is None:
tag = self.__class__.__name__
the_etree = ElementTree.ElementTree()
node = self.to_node(the_etree, tag, ns_key=getattr(self, '_xml_ns_key', None),
check_validity=check_validity, strict=strict)
if urn is None:
pass
elif isinstance(urn, str):
node.attrib['xmlns'] = urn
elif isinstance(urn, dict):
for key in urn:
node.attrib[key] = urn[key]
else:
raise TypeError('Expected string or dictionary of string for urn, got type {}'.format(type(urn)))
return ElementTree.tostring(node, encoding='utf-8', method='xml')
def to_xml_string(self, urn=None, tag=None, check_validity=False, strict=DEFAULT_STRICT):
"""
Gets a xml string with utf-8 encoding, identified as using the namespace
given by `urn` (if given).
Parameters
----------
urn : None|str|dict
The xml namespace or dictionary describing the xml namespace.
tag : None|str
The root node tag to use. If not given, then the class name will be used.
check_validity : bool
Check whether the element is valid before serializing, by calling :func:`is_valid`.
strict : bool
Only used if `check_validity = True`. In that case, if `True` then raise an
Exception (of appropriate type) if the structure is not valid, if `False` then log a
hopefully helpful message.
Returns
-------
str
xml string from :func:`ElementTree.tostring()` call.
"""
return self.to_xml_bytes(urn=urn, tag=tag, check_validity=check_validity, strict=strict).decode('utf-8')
class Arrayable(object):
"""Abstract class specifying basic functionality for assigning from/to an array"""
@classmethod
def from_array(cls, array):
"""
Create from an array type object.
Parameters
----------
array: numpy.ndarray|list|tuple
Returns
-------
Arrayable
"""
raise NotImplementedError
def get_array(self, dtype=numpy.float64):
"""Gets an array representation of the class instance.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return
Returns
-------
numpy.ndarray
"""
raise NotImplementedError
def __getitem__(self, item):
return self.get_array()[item]
class SerializableArray(object):
__slots__ = (
'_child_tag', '_child_type', '_array', '_name', '_minimum_length',
'_maximum_length', '_xml_ns', '_xml_ns_key')
_default_minimum_length = 0
_default_maximum_length = 2**32
_set_size = True
_size_var_name = 'size'
_set_index = True
_index_var_name = 'index'
def __init__(self, coords=None, name=None, child_tag=None, child_type=None,
minimum_length=None, maximum_length=None, _xml_ns=None, _xml_ns_key=None):
self._xml_ns = _xml_ns
self._xml_ns_key = _xml_ns_key
self._array = None
if name is None:
raise ValueError('The name parameter is required.')
if not isinstance(name, str):
raise TypeError(
'The name parameter is required to be an instance of str, got {}'.format(type(name)))
self._name = name
if child_tag is None:
raise ValueError('The child_tag parameter is required.')
if not isinstance(child_tag, str):
raise TypeError(
'The child_tag parameter is required to be an instance of str, got {}'.format(type(child_tag)))
self._child_tag = child_tag
if child_type is None:
raise ValueError('The child_type parameter is required.')
if not issubclass(child_type, Serializable):
raise TypeError('The child_type is required to be a subclass of Serializable.')
self._child_type = child_type
if minimum_length is None:
self._minimum_length = self._default_minimum_length
else:
self._minimum_length = max(int(minimum_length), 0)
if maximum_length is None:
self._maximum_length = max(self._default_maximum_length, self._minimum_length)
else:
self._maximum_length = max(int(maximum_length), self._minimum_length)
self.set_array(coords)
def __len__(self):
return self.size
def __getitem__(self, index):
return self._array[index]
def __setitem__(self, index, value):
if value is None:
raise TypeError('Elements of {} must be of type {}, not None'.format(self._name, self._child_type))
self._array[index] = parse_serializable(value, self._name, self, self._child_type)
def log_validity_error(self, msg):
"""
Log a validation error message.
Parameters
----------
msg : str
"""
valid_logger.error('{}:{} {}'.format(self.__class__.__name__, self._name, msg))
def log_validity_warning(self, msg):
"""
Log a validation warning message.
Parameters
----------
msg : str
"""
valid_logger.warning('{}:{} {}'.format(self.__class__.__name__, self._name, msg))
def log_validity_info(self, msg):
"""
Log a validation info message.
Parameters
----------
msg : str
"""
valid_logger.info('{}:{} {}'.format(self.__class__.__name__, self._name, msg))
def is_valid(self, recursive=False, stack=False):
"""Returns the validity of this object according to the schema. This is done by inspecting that the
array is populated.
Parameters
----------
recursive : bool
True if we recursively check that children are also valid. This may result in verbose (i.e. noisy) logging.
stack : bool
Should we print error messages recursively, for a stack type situation?
Returns
-------
bool
condition for validity of this element
"""
if self._array is None:
self.log_validity_error("Unpopulated array")
return False
if not recursive:
return True
valid_children = True
for i, entry in enumerate(self._array):
good = entry.is_valid(recursive=True, stack=stack)
if not good and stack:
self.log_validity_error("Issue discovered with entry {}".format(i))
valid_children &= good
return valid_children
@property
def size(self): # type: () -> int
"""
int: the size of the array.
"""
if self._array is None:
return 0
else:
return self._array.size
def get_array(self, dtype='object', **kwargs):
"""Gets an array representation of the class instance.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return.
kwargs : keyword arguments for calls of the form child.get_array(**kwargs)
Returns
-------
numpy.ndarray
* If `dtype` == 'object'`, then the literal array of
child objects is returned. *Note: Beware of mutating the elements.*
* If `dtype` has any other value, then the return value will be tried
as `numpy.array([child.get_array(dtype=dtype, **kwargs) for child in array]`.
* If there is any error, then `None` is returned.
"""
if dtype in ['object', numpy.dtype('object')]:
return self._array
else:
# noinspection PyBroadException
try:
return numpy.array(
[child.get_array(dtype=dtype, **kwargs) for child in self._array], dtype=dtype)
except Exception:
return None
def set_array(self, coords):
"""
Sets the underlying array.
Parameters
----------
coords : numpy.ndarray|list|tuple
Returns
-------
None
"""
if coords is None:
self._array = None
return
array = parse_serializable_array(
coords, 'coords', self, self._child_type, self._child_tag)
if not (self._minimum_length <= array.size <= self._maximum_length):
raise ValueError(
'Field {} is required to be an array with {} <= length <= {}, and input of length {} '
'was received'.format(self._name, self._minimum_length, self._maximum_length, array.size))
self._array = array
self._check_indices()
def _check_indices(self):
if not self._set_index:
return
for i, entry in enumerate(self._array):
try:
setattr(entry, self._index_var_name, i+1)
except (AttributeError, ValueError, TypeError):
continue
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT):
if self.size == 0:
return None # nothing to be done
if ns_key is None:
anode = create_new_node(doc, tag, parent=parent)
else:
anode = create_new_node(doc, '{}:{}'.format(ns_key, tag), parent=parent)
if self._set_size:
anode.attrib[self._size_var_name] = str(self.size)
for i, entry in enumerate(self._array):
entry.to_node(doc, self._child_tag, ns_key=ns_key, parent=anode,
check_validity=check_validity, strict=strict)
return anode
@classmethod
def from_node(cls, node, name, child_tag, child_type, **kwargs):
return cls(coords=node, name=name, child_tag=child_tag, child_type=child_type, **kwargs)
def to_json_list(self, check_validity=False, strict=DEFAULT_STRICT):
"""
For json serialization.
Parameters
----------
check_validity : bool
passed through to child_type.to_dict() method.
strict : bool
passed through to child_type.to_dict() method.
Returns
-------
List[dict]
"""
if self.size == 0:
return []
return [entry.to_dict(check_validity=check_validity, strict=strict) for entry in self._array]
class ParametersCollection(object):
__slots__ = ('_name', '_child_tag', '_dict', '_xml_ns', '_xml_ns_key')
def __init__(self, collection=None, name=None, child_tag='Parameters', _xml_ns=None, _xml_ns_key=None):
self._dict = None
self._xml_ns = _xml_ns
self._xml_ns_key = _xml_ns_key
if name is None:
raise ValueError('The name parameter is required.')
if not isinstance(name, str):
raise TypeError(
'The name parameter is required to be an instance of str, got {}'.format(type(name)))
self._name = name
if child_tag is None:
raise ValueError('The child_tag parameter is required.')
if not isinstance(child_tag, str):
raise TypeError(
'The child_tag parameter is required to be an instance of str, got {}'.format(type(child_tag)))
self._child_tag = child_tag
self.set_collection(collection)
def __delitem__(self, key):
if self._dict is not None:
del self._dict[key]
def __getitem__(self, key):
if self._dict is not None:
return self._dict[key]
raise KeyError('Dictionary does not contain key {}'.format(key))
def __setitem__(self, name, value):
if not isinstance(name, str):
raise ValueError('Parameter name must be of type str, got {}'.format(type(name)))
if not isinstance(value, str):
raise ValueError('Parameter name must be of type str, got {}'.format(type(value)))
if self._dict is None:
self._dict = OrderedDict()
self._dict[name] = value
def get(self, key, default=None):
if self._dict is not None:
return self._dict.get(key, default)
return default
def set_collection(self, value):
if value is None:
self._dict = None
else:
self._dict = parse_parameters_collection(value, self._name, self)
def get_collection(self):
return self._dict
# noinspection PyUnusedLocal
def to_node(self, doc, ns_key=None, parent=None, check_validity=False, strict=False):
if self._dict is None:
return None # nothing to be done
for name in self._dict:
value = self._dict[name]
if not isinstance(value, str):
value = str(value)
if ns_key is None:
node = create_text_node(doc, self._child_tag, value, parent=parent)
else:
node = create_text_node(doc, '{}:{}'.format(ns_key, self._child_tag), value, parent=parent)
node.attrib['name'] = name
# noinspection PyUnusedLocal
def to_dict(self, check_validity=False, strict=False):
return copy.deepcopy(self._dict)
| 68,337 | 38.207114 | 119 | py |
sarpy | sarpy-master/sarpy/io/xml/descriptors.py | """
This module contains the base objects for use in base xml/serializable functionality.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
import re
from xml.etree import ElementTree
from weakref import WeakKeyDictionary
import numpy
from numpy.linalg import norm
from sarpy.io.xml.base import DEFAULT_STRICT, get_node_value, find_children, \
Arrayable, ParametersCollection, SerializableArray, \
parse_str, parse_bool, parse_int, parse_float, parse_complex, parse_datetime, \
parse_serializable, parse_serializable_list
logger = logging.getLogger(__name__)
_length_text = 'Specified minimum length is `{}`,\n\t' \
'while specified maximum length is `{}`'
_type_text = 'Field {} of class {} got incompatible type {}.'
class BasicDescriptor(object):
"""A descriptor object for reusable properties. Note that it is required that the calling instance is hashable."""
_typ_string = None
def __init__(self, name, required, strict=DEFAULT_STRICT, default_value=None, docstring=''):
self.data = WeakKeyDictionary() # our instance reference dictionary
# WeakDictionary use is subtle here. A reference to a particular class instance in this dictionary
# should not be the thing keeping a particular class instance from being destroyed.
self.name = name
self.required = (name in required)
self.strict = strict
self.default_value = default_value
self.__doc__ = docstring
self._format_docstring()
def _format_docstring(self):
docstring = self.__doc__
if docstring is None:
docstring = ''
if (self._typ_string is not None) and (not docstring.startswith(self._typ_string)):
docstring = '{} {}'.format(self._typ_string, docstring)
suff = self._docstring_suffix()
if suff is not None:
docstring = '{} {}'.format(docstring, suff)
lenstr = self._len_string()
if lenstr is not None:
docstring = '{} {}'.format(docstring, lenstr)
if self.required:
docstring = '{} {}'.format(docstring, ' **Required.**')
else:
docstring = '{} {}'.format(docstring, ' **Optional.**')
self.__doc__ = docstring
def _len_string(self):
minl = getattr(self, 'minimum_length', None)
maxl = getattr(self, 'minimum_length', None)
def_minl = getattr(self, '_DEFAULT_MIN_LENGTH', None)
def_maxl = getattr(self, '_DEFAULT_MAX_LENGTH', None)
if minl is not None and maxl is not None:
if minl == def_minl and maxl == def_maxl:
return None
lenstr = ' Must have length '
if minl == def_minl:
lenstr += '<= {0:d}.'.format(maxl)
elif maxl == def_maxl:
lenstr += '>= {0:d}.'.format(minl)
elif minl == maxl:
lenstr += ' exactly {0:d}.'.format(minl)
else:
lenstr += 'in the range [{0:d}, {1:d}].'.format(minl, maxl)
return lenstr
else:
return None
def _docstring_suffix(self):
return None
def __get__(self, instance, owner):
"""The getter.
Parameters
----------
instance : object
the calling class instance
owner : object
the type of the class - that is, the actual object to which this descriptor is assigned
Returns
-------
object
the return value
"""
if instance is None:
# this has been access on the class, so return the class
return self
fetched = self.data.get(instance, self.default_value)
if fetched is not None or not self.required:
return fetched
else:
msg = 'Required field {} of class {} is not populated.'.format(self.name, instance.__class__.__name__)
if self.strict:
raise AttributeError(msg)
else:
logger.debug(msg) # NB: this is at debug level to not be too verbose
return fetched
def __set__(self, instance, value):
"""The setter method.
Parameters
----------
instance : object
the calling class instance
value
the value to use in setting - the type depends on the specific extension of this base class
Returns
-------
bool
this base class, and only this base class, handles the required compliance and None behavior and has
a return. This returns True if this the setting value was None, and False otherwise.
"""
# NOTE: This is intended to handle this case for every extension of this class. Hence, the boolean return,
# which extensions SHOULD NOT implement. This is merely to follow DRY principles.
if value is None:
if self.default_value is not None:
self.data[instance] = self.default_value
return True
elif self.required:
if self.strict:
raise ValueError(
'Attribute {} of class {} cannot be assigned '
'None.'.format(self.name, instance.__class__.__name__))
else:
logger.debug( # NB: this is at debuglevel to not be too verbose
'Required attribute {} of class {} has been set to None.'.format(
self.name, instance.__class__.__name__))
self.data[instance] = None
return True
# note that the remainder must be implemented in each extension
return False # this is probably a bad habit, but this returns something for convenience alone
class StringDescriptor(BasicDescriptor):
"""A descriptor for string type"""
_typ_string = 'str:'
def __init__(self, name, required, strict=DEFAULT_STRICT, default_value=None, docstring=None):
super(StringDescriptor, self).__init__(
name, required, strict=strict, default_value=default_value, docstring=docstring)
def __set__(self, instance, value):
if super(StringDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
self.data[instance] = parse_str(value, self.name, instance)
class StringListDescriptor(BasicDescriptor):
"""A descriptor for properties for an array type item for specified extension of string"""
_typ_string = 'List[str]:'
_DEFAULT_MIN_LENGTH = 0
_DEFAULT_MAX_LENGTH = 2 ** 32
def __init__(self, name, required, strict=DEFAULT_STRICT, minimum_length=None, maximum_length=None,
default_value=None, docstring=None):
self.minimum_length = self._DEFAULT_MIN_LENGTH if minimum_length is None else int(minimum_length)
self.maximum_length = self._DEFAULT_MAX_LENGTH if maximum_length is None else int(maximum_length)
if self.minimum_length > self.maximum_length:
raise ValueError(_length_text.format(self.minimum_length, self.maximum_length))
super(StringListDescriptor, self).__init__(
name, required, strict=strict, default_value=default_value, docstring=docstring)
def __set__(self, instance, value):
def set_value(new_value):
if len(new_value) < self.minimum_length:
msg = 'Attribute {} of class {} is a string list of size {},\n\t' \
'and must have length at least {}.'.format(
self.name, instance.__class__.__name__, value.size, self.minimum_length)
if self.strict:
raise ValueError(msg)
else:
logger.error(msg)
if len(new_value) > self.maximum_length:
msg = 'Attribute {} of class {} is a string list of size {},\n\t' \
'and must have length no greater than {}.'.format(
self.name, instance.__class__.__name__, value.size, self.maximum_length)
if self.strict:
raise ValueError(msg)
else:
logger.error(msg)
self.data[instance] = new_value
if super(StringListDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
if isinstance(value, str):
set_value([value, ])
elif isinstance(value, ElementTree.Element):
set_value([get_node_value(value), ])
elif isinstance(value, list):
if len(value) == 0 or isinstance(value[0], str):
set_value(value)
elif isinstance(value[0], ElementTree.Element):
set_value([get_node_value(nod) for nod in value])
else:
raise TypeError(_type_text.format(self.name, instance.__class__.__name__, type(value)))
class StringEnumDescriptor(BasicDescriptor):
"""A descriptor for enumerated (specified) string type.
**This implicitly assumes that the valid entries are upper case.**"""
_typ_string = 'str:'
def __init__(self, name, values, required, strict=DEFAULT_STRICT, default_value=None, docstring=None):
self.values = values
super(StringEnumDescriptor, self).__init__(
name, required, strict=strict, default_value=default_value, docstring=docstring)
if (self.default_value is not None) and (self.default_value not in self.values):
self.default_value = None
def _docstring_suffix(self):
suff = ' Takes values in :code:`{}`.'.format(self.values)
if self.default_value is not None:
suff += ' Default value is :code:`{}`.'.format(self.default_value)
return suff
def __set__(self, instance, value):
if value is None:
if self.default_value is not None:
self.data[instance] = self.default_value
else:
super(StringEnumDescriptor, self).__set__(instance, value)
return
val = parse_str(value, self.name, instance)
if val in self.values:
self.data[instance] = val
else:
msg = 'Attribute {} of class {} received {},\n\t' \
'but values ARE REQUIRED to be one of {}'.format(
self.name, instance.__class__.__name__, value, self.values)
if self.strict:
raise ValueError(msg)
else:
logger.error(msg)
self.data[instance] = val
class StringRegexDescriptor(BasicDescriptor):
"""A descriptor for a string matching a regex."""
_typ_string = 'str:'
def __init__(self, name, pattern, required, strict=DEFAULT_STRICT, default_value=None, docstring=None):
self.pattern = pattern
self.matcher = re.compile(pattern)
super(StringRegexDescriptor, self).__init__(
name, required, strict=strict, default_value=default_value, docstring=docstring)
if (self.default_value is not None) and (not self.matcher.fullmatch(self.default_value)):
self.default_value = None
def _docstring_suffix(self):
suff = ' Takes values matching :code:`{}`.'.format(self.pattern)
if self.default_value is not None:
suff += ' Default value is :code:`{}`.'.format(self.default_value)
return suff
def __set__(self, instance, value):
if value is None:
if self.default_value is not None:
self.data[instance] = self.default_value
else:
super(StringRegexDescriptor, self).__set__(instance, value)
return
val = parse_str(value, self.name, instance)
if self.matcher.fullmatch(val):
self.data[instance] = val
else:
msg = 'Attribute {} of class {} received {},\n\t' \
'but values ARE REQUIRED to match {}'.format(
self.name, instance.__class__.__name__, value, self.pattern)
if self.strict:
raise ValueError(msg)
else:
logger.error(msg)
self.data[instance] = val
class BooleanDescriptor(BasicDescriptor):
"""A descriptor for boolean type"""
_typ_string = 'bool:'
def __init__(self, name, required, strict=DEFAULT_STRICT, default_value=None, docstring=None):
super(BooleanDescriptor, self).__init__(
name, required, strict=strict, default_value=default_value, docstring=docstring)
def __set__(self, instance, value):
if super(BooleanDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
try:
self.data[instance] = parse_bool(value, self.name, instance)
except Exception as e:
logger.error(
'Failed converting {} of type {} to `bool`\n\t'
'for field {} of class {} with exception {} - {}\n\t'
'Setting value to None, which may be against the standard'.format(
value, type(value), self.name, instance.__class__.__name__, type(e), e))
self.data[instance] = None
class IntegerDescriptor(BasicDescriptor):
"""A descriptor for integer type"""
_typ_string = 'int:'
def __init__(self, name, required, strict=DEFAULT_STRICT, bounds=None, default_value=None, docstring=None):
self.bounds = bounds
super(IntegerDescriptor, self).__init__(
name, required, strict=strict, default_value=default_value, docstring=docstring)
if (self.default_value is not None) and not self._in_bounds(self.default_value):
self.default_value = None
def _docstring_suffix(self):
if self.bounds is not None:
return 'Must be in the range [{}, {}]'.format(*self.bounds)
return ''
def _in_bounds(self, value):
if self.bounds is None:
return True
return (self.bounds[0] is None or self.bounds[0] <= value) and \
(self.bounds[1] is None or value <= self.bounds[1])
def __set__(self, instance, value):
if super(IntegerDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
try:
iv = parse_int(value, self.name, instance)
except Exception as e:
logger.error(
'Failed converting {} of type {} to `int`\n\t'
'for field {} of class {} with exception {} - {}.\n\t'
'Setting value to None, which may be against the standard'.format(
value, type(value), self.name, instance.__class__.__name__, type(e), e))
self.data[instance] = None
return
if self._in_bounds(iv):
self.data[instance] = iv
else:
msg = 'Attribute {} of class {} is required by standard\n\t' \
'to take value between {}. Invalid value {}'.format(
self.name, instance.__class__.__name__, self.bounds, iv)
if self.strict:
raise ValueError(msg)
else:
logger.error(msg)
self.data[instance] = iv
class IntegerEnumDescriptor(BasicDescriptor):
"""A descriptor for enumerated (specified) integer type"""
_typ_string = 'int:'
def __init__(self, name, values, required, strict=DEFAULT_STRICT, default_value=None, docstring=None):
self.values = values
super(IntegerEnumDescriptor, self).__init__(
name, required, strict=strict, default_value=default_value, docstring=docstring)
if (self.default_value is not None) and (self.default_value not in self.values):
self.default_value = None
def _docstring_suffix(self):
return 'Must take one of the values in {}.'.format(self.values)
def __set__(self, instance, value):
if super(IntegerEnumDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
try:
iv = parse_int(value, self.name, instance)
except Exception as e:
logger.error(
'Failed converting {} of type {} to `int`\n\t'
'for field {} of class {} with exception {} - {}.\n\t'
'Setting value to None, which may be against the standard'.format(
value, type(value), self.name, instance.__class__.__name__, type(e), e))
self.data[instance] = None
return
if iv in self.values:
self.data[instance] = iv
else:
msg = 'Attribute {} of class {} must take value in {}.\n\t' \
'Invalid value {}.'.format(
self.name, instance.__class__.__name__, self.values, iv)
if self.strict:
raise ValueError(msg)
else:
logger.error(msg)
self.data[instance] = iv
class IntegerListDescriptor(BasicDescriptor):
"""A descriptor for integer list type properties"""
_typ_string = 'list[int]:'
_DEFAULT_MIN_LENGTH = 0
_DEFAULT_MAX_LENGTH = 2 ** 32
def __init__(self, name, tag_dict, required, strict=DEFAULT_STRICT,
minimum_length=None, maximum_length=None, docstring=None):
self.child_tag = tag_dict[name]['child_tag']
self.minimum_length = self._DEFAULT_MIN_LENGTH if minimum_length is None else int(minimum_length)
self.maximum_length = self._DEFAULT_MAX_LENGTH if maximum_length is None else int(maximum_length)
if self.minimum_length > self.maximum_length:
raise ValueError(_length_text.format(self.minimum_length, self.maximum_length))
super(IntegerListDescriptor, self).__init__(name, required, strict=strict, docstring=docstring)
def __set__(self, instance, value):
def set_value(new_value):
if len(new_value) < self.minimum_length:
msg = 'Attribute {} of class {} is an integer list of size {},\n\t' \
'and must have size at least {}.'.format(
self.name, instance.__class__.__name__, value.size, self.minimum_length)
if self.strict:
raise ValueError(msg)
else:
logger.info(msg)
if len(new_value) > self.maximum_length:
msg = 'Attribute {} of class {} is an integer list of size {},\n\t' \
'and must have size no larger than {}.'.format(
self.name, instance.__class__.__name__, value.size, self.maximum_length)
if self.strict:
raise ValueError(msg)
else:
logger.info(msg)
self.data[instance] = new_value
if super(IntegerListDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
if isinstance(value, int):
set_value([value, ])
elif isinstance(value, ElementTree.Element):
set_value([int(get_node_value(value)), ])
elif isinstance(value, list):
if len(value) == 0 or isinstance(value[0], int):
set_value(value)
elif isinstance(value[0], ElementTree.Element):
set_value([int(get_node_value(nod)) for nod in value])
else:
raise TypeError(_type_text.format(self.name, instance.__class__.__name__, type(value)))
class FloatDescriptor(BasicDescriptor):
"""A descriptor for float type properties"""
_typ_string = 'float:'
def __init__(self, name, required, strict=DEFAULT_STRICT, bounds=None, default_value=None, docstring=None):
self.bounds = bounds
super(FloatDescriptor, self).__init__(
name, required, strict=strict, default_value=default_value, docstring=docstring)
if (self.default_value is not None) and not self._in_bounds(self.default_value):
self.default_value = None
def _docstring_suffix(self):
if self.bounds is not None:
return 'Must be in the range [{}, {}]'.format(*self.bounds)
return ''
def _in_bounds(self, value):
if self.bounds is None:
return True
return (self.bounds[0] is None or self.bounds[0] <= value) and \
(self.bounds[1] is None or value <= self.bounds[1])
def __set__(self, instance, value):
if super(FloatDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
try:
iv = parse_float(value, self.name, instance)
except Exception as e:
logger.error(
'Failed converting {} of type {} to `float`\n\t'
'for field {} of class {} with exception {} - {}.\n\t'
'Setting value to None, which may be against the standard'.format(
value, type(value), self.name, instance.__class__.__name__, type(e), e))
self.data[instance] = None
return
if self._in_bounds(iv):
self.data[instance] = iv
else:
msg = 'Attribute {} of class {}\n\t' \
'is required by standard to take value between {}.'.format(
self.name, instance.__class__.__name__, self.bounds)
if self.strict:
raise ValueError(msg)
else:
logger.info(msg)
self.data[instance] = iv
class FloatListDescriptor(BasicDescriptor):
"""A descriptor for float list type properties"""
_typ_string = 'list[float]:'
_DEFAULT_MIN_LENGTH = 0
_DEFAULT_MAX_LENGTH = 2 ** 32
def __init__(self, name, tag_dict, required, strict=DEFAULT_STRICT,
minimum_length=None, maximum_length=None, docstring=None):
self.child_tag = tag_dict[name]['child_tag']
self.minimum_length = self._DEFAULT_MIN_LENGTH if minimum_length is None else int(minimum_length)
self.maximum_length = self._DEFAULT_MAX_LENGTH if maximum_length is None else int(maximum_length)
if self.minimum_length > self.maximum_length:
raise ValueError(_length_text.format(self.minimum_length, self.maximum_length))
super(FloatListDescriptor, self).__init__(name, required, strict=strict, docstring=docstring)
def __set__(self, instance, value):
def set_value(new_value):
if len(new_value) < self.minimum_length:
msg = 'Attribute {} of class {} is an float list of size {},\n\t' \
'and must have size at least {}.'.format(
self.name, instance.__class__.__name__, value.size, self.minimum_length)
if self.strict:
raise ValueError(msg)
else:
logger.info(msg)
if len(new_value) > self.maximum_length:
msg = 'Attribute {} of class {} is a float list of size {},\n\t' \
'and must have size no larger than {}.'.format(
self.name, instance.__class__.__name__, value.size, self.maximum_length)
if self.strict:
raise ValueError(msg)
else:
logger.info(msg)
self.data[instance] = new_value
if super(FloatListDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
if isinstance(value, float):
set_value([value, ])
elif isinstance(value, ElementTree.Element):
set_value([float(get_node_value(value)), ])
elif isinstance(value, list):
if len(value) == 0 or isinstance(value[0], float):
set_value(value)
elif isinstance(value[0], ElementTree.Element):
set_value([float(get_node_value(nod)) for nod in value])
else:
raise TypeError(_type_text.format(self.name, instance.__class__.__name__, type(value)))
class ComplexDescriptor(BasicDescriptor):
"""A descriptor for complex valued properties"""
_typ_string = 'complex:'
def __init__(self, name, required, strict=DEFAULT_STRICT, default_value=None, docstring=None):
super(ComplexDescriptor, self).__init__(
name, required, strict=strict, default_value=default_value, docstring=docstring)
def __set__(self, instance, value):
if super(ComplexDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
try:
self.data[instance] = parse_complex(value, self.name, instance)
except Exception as e:
logger.error(
'Failed converting {} of type {} to `complex`\n\t'
'for field {} of class {} with exception {} - {}.\n\t'
'Setting value to None, which may be against the standard'.format(
value, type(value), self.name, instance.__class__.__name__, type(e), e))
self.data[instance] = None
class FloatArrayDescriptor(BasicDescriptor):
"""A descriptor for float array type properties"""
_DEFAULT_MIN_LENGTH = 0
_DEFAULT_MAX_LENGTH = 2 ** 32
_typ_string = 'numpy.ndarray[float64]:'
def __init__(self, name, tag_dict, required, strict=DEFAULT_STRICT, minimum_length=None, maximum_length=None,
docstring=None):
self.child_tag = tag_dict[name]['child_tag']
self.minimum_length = self._DEFAULT_MIN_LENGTH if minimum_length is None else int(minimum_length)
self.maximum_length = self._DEFAULT_MAX_LENGTH if maximum_length is None else int(maximum_length)
if self.minimum_length > self.maximum_length:
raise ValueError(_length_text.format(self.minimum_length, self.maximum_length))
super(FloatArrayDescriptor, self).__init__(name, required, strict=strict, docstring=docstring)
def __set__(self, instance, value):
def set_value(new_val):
if len(new_val) < self.minimum_length:
msg = 'Attribute {} of class {} is a double array of size {},\n\t' \
'and must have size at least {}.'.format(
self.name, instance.__class__.__name__, value.size, self.minimum_length)
if self.strict:
raise ValueError(msg)
else:
logger.error(msg)
if len(new_val) > self.maximum_length:
msg = 'Attribute {} of class {} is a double array of size {},\n\t' \
'and must have size no larger than {}.'.format(
self.name, instance.__class__.__name__, value.size, self.maximum_length)
if self.strict:
raise ValueError(msg)
else:
logger.error(msg)
self.data[instance] = new_val
if super(FloatArrayDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
if isinstance(value, numpy.ndarray):
if not (len(value) == 1) and (numpy.dtype.name == 'float64'):
raise ValueError('Only one-dimensional ndarrays of dtype float64 are supported here.')
set_value(value)
elif isinstance(value, ElementTree.Element):
xml_ns = getattr(instance, '_xml_ns', None)
# noinspection PyProtectedMember
if hasattr(instance, '_child_xml_ns_key') and self.name in instance._child_xml_ns_key:
# noinspection PyProtectedMember
xml_ns_key = instance._child_xml_ns_key[self.name]
else:
xml_ns_key = getattr(instance, '_xml_ns_key', None)
size = int(value.attrib['size'])
child_nodes = find_children(value, self.child_tag, xml_ns, xml_ns_key)
if len(child_nodes) != size:
raise ValueError(
'Field {} of double array type functionality belonging to class {} got a ElementTree element '
'with size attribute {}, but has {} child nodes with tag {}.'.format(
self.name, instance.__class__.__name__, size, len(child_nodes), self.child_tag))
new_value = numpy.empty((size,), dtype=numpy.float64)
for i, node in enumerate(child_nodes):
new_value[i] = float(get_node_value(node))
set_value(new_value)
elif isinstance(value, list):
# user or json deserialization
set_value(numpy.array(value, dtype=numpy.float64))
else:
raise TypeError(_type_text.format(self.name, instance.__class__.__name__, type(value)))
class DateTimeDescriptor(BasicDescriptor):
"""A descriptor for date time type properties"""
_typ_string = 'numpy.datetime64:'
def __init__(self, name, required, strict=DEFAULT_STRICT, docstring=None, numpy_datetime_units='us'):
self.units = numpy_datetime_units # s, ms, us, ns are likely choices here, depending on needs
super(DateTimeDescriptor, self).__init__(name, required, strict=strict, docstring=docstring)
def __set__(self, instance, value):
if super(DateTimeDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
self.data[instance] = parse_datetime(value, self.name, instance, self.units)
class FloatModularDescriptor(BasicDescriptor):
"""
A descriptor for float type which will take values in a range [-limit, limit], set using modular
arithmetic operations
"""
_typ_string = 'float:'
def __init__(self, name, limit, required, strict=DEFAULT_STRICT, docstring=None):
self.limit = float(limit)
super(FloatModularDescriptor, self).__init__(name, required, strict=strict, docstring=docstring)
def __set__(self, instance, value):
if super(FloatModularDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
try:
val = parse_float(value, self.name, instance)
except Exception as e:
logger.error(
'Failed converting {} of type {} to `float`\n\t'
'for field {} of class {} with exception {} - {}.\n\t'
'Setting value to None, which may be against the standard'.format(
value, type(value), self.name, instance.__class__.__name__, type(e), e))
self.data[instance] = None
return
# do modular arithmetic manipulations
val = (val % (2 * self.limit)) # NB: % and * have same precedence, so it can be super dumb
self.data[instance] = val if val <= self.limit else val - 2 * self.limit
class SerializableDescriptor(BasicDescriptor):
"""A descriptor for properties of a specified type assumed to be an extension of Serializable"""
def __init__(self, name, the_type, required, strict=DEFAULT_STRICT, docstring=None):
self.the_type = the_type
self._typ_string = str(the_type).strip().split('.')[-1][:-2] + ':'
super(SerializableDescriptor, self).__init__(name, required, strict=strict, docstring=docstring)
def __set__(self, instance, value):
if super(SerializableDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
try:
self.data[instance] = parse_serializable(value, self.name, instance, self.the_type)
except Exception as e:
logger.error(
'Failed converting {} of type {} to Serializable type {}\n\t'
'for field {} of class {} with exception {} - {}.\n\t'
'Setting value to None, which may be against the standard.'.format(
value, type(value), self.the_type, self.name, instance.__class__.__name__, type(e), e))
self.data[instance] = None
class UnitVectorDescriptor(BasicDescriptor):
"""A descriptor for properties of a specified type assumed to be of subtype of Arrayable"""
def __init__(self, name, the_type, required, strict=DEFAULT_STRICT, docstring=None):
if not issubclass(the_type, Arrayable):
raise TypeError(
'The input type {} for field {} must be a subclass of Arrayable.'.format(the_type, name))
self.the_type = the_type
self._typ_string = str(the_type).strip().split('.')[-1][:-2] + ':'
super(UnitVectorDescriptor, self).__init__(name, required, strict=strict, docstring=docstring)
def __set__(self, instance, value):
if super(UnitVectorDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
try:
vec = parse_serializable(value, self.name, instance, self.the_type)
except Exception as e:
logger.error(
'Failed converting {} of type {} to Unit Vector Type type {}\n\t'
'for field {} of class {} with exception {} - {}.\n\t'
'Setting value to None, which may be against the standard'.format(
value, type(value), self.the_type, self.name, instance.__class__.__name__, type(e), e))
self.data[instance] = None
return None
# noinspection PyTypeChecker
coords = vec.get_array(dtype=numpy.float64)
the_norm = norm(coords)
if the_norm == 0:
logger.error(
'The input for field {} is expected to be made into a unit vector.\n\t'
'In this case, the norm of the input is 0.\n\t'
'The value is set to None, which may be against the standard.'.format(
self.name))
self.data[instance] = None
elif the_norm == 1:
self.data[instance] = vec
else:
self.data[instance] = self.the_type.from_array(coords/the_norm)
class ParametersDescriptor(BasicDescriptor):
"""A descriptor for properties of a Parameter type - that is, dictionary"""
def __init__(self, name, tag_dict, required, strict=DEFAULT_STRICT, docstring=None):
self.child_tag = tag_dict[name]['child_tag']
self._typ_string = 'ParametersCollection:'
super(ParametersDescriptor, self).__init__(name, required, strict=strict, docstring=docstring)
def __set__(self, instance, value):
if super(ParametersDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
if isinstance(value, ParametersCollection):
self.data[instance] = value
else:
the_inst = self.data.get(instance, None)
xml_ns = getattr(instance, '_xml_ns', None)
# noinspection PyProtectedMember
if hasattr(instance, '_child_xml_ns_key') and self.name in instance._child_xml_ns_key:
# noinspection PyProtectedMember
xml_ns_key = instance._child_xml_ns_key[self.name]
else:
xml_ns_key = getattr(instance, '_xml_ns_key', None)
if the_inst is None:
self.data[instance] = ParametersCollection(
collection=value, name=self.name, child_tag=self.child_tag,
_xml_ns=xml_ns, _xml_ns_key=xml_ns_key)
else:
the_inst.set_collection(value)
class SerializableListDescriptor(BasicDescriptor):
"""A descriptor for properties of a list or array of specified extension of Serializable"""
def __init__(self, name, child_type, tag_dict, required, strict=DEFAULT_STRICT, docstring=None):
self.child_type = child_type
tags = tag_dict[name]
self.array = tags.get('array', False)
if self.array:
raise ValueError(
'Attribute {} is populated in the `_collection_tags` dictionary with `array`=True. '
'This is inconsistent with using SerializableListDescriptor.'.format(name))
self.child_tag = tags['child_tag']
self._typ_string = 'List[{}]:'.format(str(child_type).strip().split('.')[-1][:-2])
super(SerializableListDescriptor, self).__init__(name, required, strict=strict, docstring=docstring)
def __set__(self, instance, value):
if super(SerializableListDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
try:
self.data[instance] = parse_serializable_list(value, self.name, instance, self.child_type)
except Exception as e:
logger.error(
'Failed converting {} of type {} to serializable list of type {}\n\t'
'for field {} of class {} with exception {} - {}.\n\t'
'Setting value to None, which may be against the standard'.format(
value, type(value), self.child_type, self.name, instance.__class__.__name__, type(e), e))
self.data[instance] = None
class SerializableArrayDescriptor(BasicDescriptor):
"""A descriptor for properties of a list or array of specified extension of Serializable"""
_DEFAULT_MIN_LENGTH = 0
_DEFAULT_MAX_LENGTH = 2 ** 32
def __init__(self, name, child_type, tag_dict, required, strict=DEFAULT_STRICT,
minimum_length=None, maximum_length=None, docstring=None,
array_extension=SerializableArray):
if not issubclass(array_extension, SerializableArray):
raise TypeError('array_extension must be a subclass of SerializableArray.')
self.child_type = child_type
tags = tag_dict[name]
self.array = tags.get('array', False)
if not self.array:
raise ValueError(
'Attribute {} is populated in the `_collection_tags` dictionary without `array`=True. '
'This is inconsistent with using SerializableArrayDescriptor.'.format(name))
self.child_tag = tags['child_tag']
self._typ_string = 'numpy.ndarray[{}]:'.format(str(child_type).strip().split('.')[-1][:-2])
self.array_extension = array_extension
self.minimum_length = self._DEFAULT_MIN_LENGTH if minimum_length is None else int(minimum_length)
self.maximum_length = self._DEFAULT_MAX_LENGTH if maximum_length is None else int(maximum_length)
if self.minimum_length > self.maximum_length:
raise ValueError(_length_text.format(self.minimum_length, self.maximum_length))
super(SerializableArrayDescriptor, self).__init__(name, required, strict=strict, docstring=docstring)
def __set__(self, instance, value):
if super(SerializableArrayDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
if isinstance(value, self.array_extension):
self.data[instance] = value
else:
xml_ns = getattr(instance, '_xml_ns', None)
xml_ns_key = getattr(instance, '_xml_ns_key', None)
the_inst = self.data.get(instance, None)
if the_inst is None:
self.data[instance] = self.array_extension(
coords=value, name=self.name, child_tag=self.child_tag, child_type=self.child_type,
minimum_length=self.minimum_length, maximum_length=self.maximum_length, _xml_ns=xml_ns,
_xml_ns_key=xml_ns_key)
else:
the_inst.set_array(value)
| 39,512 | 43.597065 | 118 | py |
sarpy | sarpy-master/sarpy/io/xml/__init__.py |
__classification__ = 'UNCLASSIFIED'
| 37 | 11.666667 | 35 | py |
sarpy | sarpy-master/sarpy/io/phase_history/base.py | """
Base structures for phase history readers and usage
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, Tuple, Sequence, Dict, Optional
import numpy
from sarpy.io.general.base import BaseReader
from sarpy.io.general.data_segment import DataSegment
from sarpy.io.phase_history.cphd1_elements.CPHD import CPHDType as CPHDType1_0
from sarpy.io.phase_history.cphd0_3_elements.CPHD import CPHDType as CPHDType0_3
class CPHDTypeReader(BaseReader):
"""
A class for common CPHD reading functionality.
**Updated in version 1.3.0**
"""
def __init__(
self,
data_segment: Union[None, DataSegment, Sequence[DataSegment]],
cphd_meta: Union[None, CPHDType1_0, CPHDType0_3],
close_segments: bool = True,
delete_files: Union[None, str, Sequence[str]] = None):
"""
Parameters
----------
data_segment : None|DataSegment|Sequence[DataSegment]
cphd_meta : None|CPHDType1_0|CPHDType0_3
The CPHD metadata object
close_segments : bool
Call segment.close() for each data segment on reader.close()?
delete_files : None|Sequence[str]
Any temp files which should be cleaned up on reader.close()?
This will occur after closing segments.
"""
if cphd_meta is None:
self._cphd_meta = None
elif isinstance(cphd_meta, (CPHDType1_0, CPHDType0_3)):
self._cphd_meta = cphd_meta
else:
raise TypeError(
'The cphd_meta must be of type CPHDType, got `{}`'.format(type(cphd_meta)))
BaseReader.__init__(
self, data_segment, reader_type='CPHD', close_segments=close_segments, delete_files=delete_files)
@property
def cphd_meta(self) -> Union[None, CPHDType1_0, CPHDType0_3]:
"""
None|CPHDType1_0|CPHDType0_3: the cphd meta_data.
"""
return self._cphd_meta
def read_support_array(
self,
index: Union[int, str],
*ranges: Sequence[Union[None, int, Tuple[int, ...], slice]]) -> numpy.ndarray:
"""
Read the support array.
Parameters
----------
index : int|str
The support array integer index.
ranges : Sequence[None|int|Tuple[int, ...]|slice]
The slice definition appropriate for support array usage.
Returns
-------
numpy.ndarray
Raises
------
TypeError
If called on a reader which doesn't support this.
"""
raise TypeError('Class {} does not provide support arrays'.format(type(self)))
def read_support_block(self) -> Dict[str, numpy.ndarray]:
"""
Reads the entirety of support block(s).
Returns
-------
Dict[str, numpy.ndarray]
Dictionary of `numpy.ndarray` containing the support arrays.
"""
raise TypeError('Class {} does not provide support arrays'.format(type(self)))
def read_pvp_variable(
self,
variable: str,
index: Union[int, str],
the_range: Union[None, int, Tuple[int, ...], slice] = None) -> Optional[numpy.ndarray]:
"""
Read the vector parameter for the given `variable` and CPHD channel.
Parameters
----------
variable : str
index : int|str
The channel index or identifier.
the_range : None|int|Tuple[int, ...]|slice
The indices for the vector parameter. `None` returns all,
a integer returns the single value at that location, otherwise
the input determines a slice.
Returns
-------
None|numpy.ndarray
This will return None if there is no such variable, otherwise the data.
"""
raise NotImplementedError
def read_pvp_array(
self,
index: Union[int, str],
the_range: Union[None, int, Tuple[int, ...], slice] = None) -> numpy.ndarray:
"""
Read the PVP array from the requested channel.
Parameters
----------
index : int|str
The support array integer index (of cphd.Data.Channels list) or identifier.
the_range : None|int|Tuple[int, ...]|slice
The indices for the vector parameter. `None` returns all,
a integer returns the single value at that location, otherwise
the input determines a slice.
Returns
-------
pvp_array : numpy.ndarray
"""
raise NotImplementedError
def read_pvp_block(self) -> Dict[Union[int, str], numpy.ndarray]:
"""
Reads the entirety of the PVP block(s).
Returns
-------
Dict[Union[int, str], numpy.ndarray]
Dictionary containing the PVP arrays.
"""
raise NotImplementedError
def read_signal_block(self) -> Dict[Union[int, str], numpy.ndarray]:
"""
Reads the entirety of signal block(s), with data formatted as complex64
(after accounting for AmpSF).
Returns
-------
Dict[Union[int, str], numpy.ndarray]
Dictionary of `numpy.ndarray` containing the signal arrays.
"""
raise NotImplementedError
def read_signal_block_raw(self) -> Dict[Union[int, str], numpy.ndarray]:
"""
Reads the entirety of signal block(s), with data formatted in file
storage format (no converting to complex, no consideration of AmpSF).
Returns
-------
Dict[Union[int, str], numpy.ndarray]
Dictionary of `numpy.ndarray` containing the signal arrays.
"""
raise NotImplementedError
| 5,831 | 29.857143 | 109 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd.py | """
Module for reading and writing CPHD files. Support reading CPHD version 0.3 and 1
and writing version 1.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
import os
from typing import Union, List, Tuple, Dict, BinaryIO, Optional, Sequence
from collections import OrderedDict
import numpy
from sarpy.io.general.utils import is_file_like, is_real_file
from sarpy.io.general.base import BaseReader, BaseWriter, SarpyIOError
from sarpy.io.general.data_segment import DataSegment, NumpyArraySegment, \
NumpyMemmapSegment
from sarpy.io.general.format_function import ComplexFormatFunction
from sarpy.io.general.slice_parsing import verify_subscript, verify_slice
from sarpy.io.phase_history.base import CPHDTypeReader
from sarpy.io.phase_history.cphd1_elements.CPHD import CPHDType as CPHDType1, \
CPHDHeader as CPHDHeader1, CPHD_SECTION_TERMINATOR
from sarpy.io.phase_history.cphd0_3_elements.CPHD import CPHDType as CPHDType0_3, \
CPHDHeader as CPHDHeader0_3
from sarpy.io.phase_history.cphd_schema import get_namespace, get_default_tuple
logger = logging.getLogger(__name__)
_unhandled_version_text = 'Got unhandled CPHD version number `{}`'
_missing_channel_identifier_text = 'Cannot find CPHD channel for identifier `{}`'
_index_range_text = 'index must be in the range `[0, {})`'
#########
# Helper object for initially parses CPHD elements
class AmpScalingFunction(ComplexFormatFunction):
__slots__ = (
'_amplitude_scaling', )
_allowed_ordering = ('IQ', )
def __init__(
self,
raw_dtype: Union[str, numpy.dtype],
raw_shape: Optional[Tuple[int, ...]] = None,
formatted_shape: Optional[Tuple[int, ...]] = None,
reverse_axes: Optional[Tuple[int, ...]] = None,
transpose_axes: Optional[Tuple[int, ...]] = None,
band_dimension: int = -1,
amplitude_scaling: Optional[numpy.ndarray] = None):
"""
Parameters
----------
raw_dtype : str|numpy.dtype
The raw datatype. Valid options dependent on the value of order.
raw_shape : None|Tuple[int, ...]
formatted_shape : None|Tuple[int, ...]
reverse_axes : None|Tuple[int, ...]
transpose_axes : None|Tuple[int, ...]
band_dimension : int
Which band is the complex dimension, **after** the transpose operation.
amplitude_scaling : None|numpy.ndarray
This is here to support the presence of a scaling in CPHD or CRSD usage.
This requires that `raw_dtype` in `[int8, int16]`, `band_dimension`
is the final dimension and neither `reverse_axes` nor `transpose_axes`
is populated.
"""
ComplexFormatFunction.__init__(
self, raw_dtype, 'IQ', raw_shape=raw_shape, formatted_shape=formatted_shape,
reverse_axes=reverse_axes, transpose_axes=transpose_axes, band_dimension=band_dimension)
self._amplitude_scaling = None
self.set_amplitude_scaling(amplitude_scaling)
@property
def amplitude_scaling(self) -> Optional[numpy.ndarray]:
"""
The scaling multiplier array, for CPHD/CRSD usage.
Returns
-------
Optional[numpy.ndarray]
"""
return self._amplitude_scaling
def set_amplitude_scaling(
self,
array: Optional[numpy.ndarray]) -> None:
"""
Set the amplitude scaling array.
Parameters
----------
array : None|numpy.ndarray
Returns
-------
None
"""
if array is None:
self._amplitude_scaling = None
return
if self.order != 'IQ':
raise ValueError(
'CPHD requires data in IQ order.')
if not isinstance(array, numpy.ndarray):
raise ValueError('requires a numpy.ndarray, got {}'.format(type(array)))
if array.ndim != 1:
raise ValueError('requires a one dimensional array')
if array.dtype.name not in ['float32', 'float64']:
raise ValueError('requires a numpy.ndarray of float32 or 64 dtype, got {}'.format(array.dtype))
if array.dtype.name != 'float32':
array = numpy.cast['float32'](array)
# NB: more validation as part of validate_shapes
if self._raw_dtype.name not in ['int8', 'int16']:
raise ValueError(
'A scaling multiplier has been supplied,\n\t'
'but the raw datatype is not `int8` or `int16`.')
self._amplitude_scaling = array
self._validate_amplitude_scaling()
def _validate_amplitude_scaling(self) -> None:
if self._amplitude_scaling is None or self._raw_shape is None:
return
if self.band_dimension not in [-1, self.raw_ndim-1]:
raise ValueError('Use of scaling multiplier requires band is the final dimension')
if self.transpose_axes is not None or self.reverse_axes is not None:
raise ValueError('Use of scaling multiplier requires null reverse_axes and transpose_axes')
if self._amplitude_scaling.size != self.raw_shape[0]:
raise ValueError(
'Use of scaling multiplier requires the array length\n\t'
'and the first dimension of raw_shape match.')
def _forward_functional_step(
self,
data: numpy.ndarray,
subscript: Tuple[slice, ...]) -> numpy.ndarray:
out = ComplexFormatFunction._forward_functional_step(self, data, subscript)
# NB: subscript is in raw coordinates, but we have verified that
# the first dimension is unchanged
if self._amplitude_scaling is not None:
out = self._amplitude_scaling[subscript[0]][:, numpy.newaxis] * out
return out
def _reverse_functional_step(
self,
data: numpy.ndarray,
subscript: Tuple[slice, ...]) -> numpy.ndarray:
# NB: subscript is in formatted coordinates, but we have verified that
# transpose_axes is None and band_dimension is the final dimension
if self._amplitude_scaling is not None:
data = numpy.rint((1./self._amplitude_scaling[subscript[0]])[:, numpy.newaxis] * data)
return ComplexFormatFunction._reverse_functional_step(self, data, subscript)
def validate_shapes(self) -> None:
ComplexFormatFunction.validate_shapes(self)
self._validate_amplitude_scaling()
class CPHDDetails(object):
"""
The basic CPHD element parser.
"""
__slots__ = (
'_file_name', '_file_object', '_closed', '_close_after', '_cphd_version', '_cphd_header', '_cphd_meta')
def __init__(self, file_object: str):
"""
Parameters
----------
file_object : str|BinaryIO
The path to or file like object referencing the CPHD file.
"""
self._closed = False
self._close_after = None
self._cphd_version = None
self._cphd_header = None
self._cphd_meta = None
self._file_object = None # type: Optional[BinaryIO]
if isinstance(file_object, str):
if not os.path.exists(file_object) or not os.path.isfile(file_object):
raise SarpyIOError('path {} does not exist or is not a file'.format(file_object))
self._file_name = file_object
self._file_object = open(file_object, 'rb')
self._close_after = True
elif is_file_like(file_object):
self._file_object = file_object
if hasattr(file_object, 'name') and isinstance(file_object.name, str):
self._file_name = file_object.name
else:
self._file_name = '<file like object>'
self._close_after = False
else:
raise TypeError('Got unsupported input type {}'.format(type(file_object)))
self._file_object.seek(0, os.SEEK_SET)
head_bytes = self._file_object.read(10)
if not isinstance(head_bytes, bytes):
raise ValueError('Input file like object not open in bytes mode.')
if not head_bytes.startswith(b'CPHD'):
raise SarpyIOError('File {} does not appear to be a CPHD file.'.format(self.file_name))
self._extract_version()
self._extract_header()
self._extract_cphd()
@property
def file_name(self) -> str:
"""
str: The CPHD filename.
"""
return self._file_name
@property
def file_object(self) -> BinaryIO:
"""
BinaryIO: The binary file object
"""
# noinspection PyTypeChecker
return self._file_object
@property
def cphd_version(self) -> str:
"""
str: The CPHD version.
"""
return self._cphd_version
@property
def cphd_meta(self) -> Union[CPHDType1, CPHDType0_3]:
"""
CPHDType1|CPHDType0_3: The CPHD metadata object, which is version dependent.
"""
return self._cphd_meta
@property
def cphd_header(self) -> Union[CPHDHeader1, CPHDHeader0_3]:
"""
CPHDHeader1|CPHDHeader0_3: The CPHD header object, which is version dependent.
"""
return self._cphd_header
def _extract_version(self) -> None:
"""
Extract the version number from the file. This will advance the file
object to the end of the initial header line.
"""
self.file_object.seek(0, os.SEEK_SET)
head_line = self.file_object.readline().strip()
parts = head_line.split(b'/')
if len(parts) != 2:
raise ValueError('Cannot extract CPHD version number from line {}'.format(head_line))
cphd_version = parts[1].strip().decode('utf-8')
self._cphd_version = cphd_version
def _extract_header(self) -> None:
"""
Extract the header from the file. The file object is assumed to be advanced
to the header location. This will advance to the file object to the end of
the header section.
"""
if self.cphd_version.startswith('0.3'):
self._cphd_header = CPHDHeader0_3.from_file_object(self._file_object)
elif self.cphd_version.startswith('1.'):
self._cphd_header = CPHDHeader1.from_file_object(self._file_object)
else:
raise ValueError(_unhandled_version_text.format(self.cphd_version))
def _extract_cphd(self) -> None:
"""
Extract and interpret the CPHD structure from the file.
"""
xml = self.get_cphd_bytes()
if self.cphd_version.startswith('0.3'):
the_type = CPHDType0_3
elif self.cphd_version.startswith('1.'):
the_type = CPHDType1
else:
raise ValueError(_unhandled_version_text.format(self.cphd_version))
self._cphd_meta = the_type.from_xml_string(xml)
def get_cphd_bytes(self) -> bytes:
"""
Extract the (uninterpreted) bytes representation of the CPHD structure.
Returns
-------
bytes
"""
header = self.cphd_header
if header is None:
raise ValueError('No cphd_header populated.')
if self.cphd_version.startswith('0.3'):
assert isinstance(header, CPHDHeader0_3)
# extract the xml data
self.file_object.seek(header.XML_BYTE_OFFSET, os.SEEK_SET)
xml = self.file_object.read(header.XML_DATA_SIZE)
elif self.cphd_version.startswith('1.'):
assert isinstance(header, CPHDHeader1)
# extract the xml data
self.file_object.seek(header.XML_BLOCK_BYTE_OFFSET, os.SEEK_SET)
xml = self.file_object.read(header.XML_BLOCK_SIZE)
else:
raise ValueError(_unhandled_version_text.format(self.cphd_version))
return xml
def close(self):
if self._closed:
return
if self._close_after:
if hasattr(self.file_object, 'close'):
self.file_object.close()
self._file_object = None
self._closed = True
def __del__(self):
self.close()
def _validate_cphd_details(
cphd_details: Union[str, CPHDDetails],
version: Union[None, str, Sequence[str]] = None) -> CPHDDetails:
"""
Validate the input argument.
Parameters
----------
cphd_details : str|CPHDDetails
version : None|str|Sequence[str]
Returns
-------
CPHDDetails
Raises
------
TypeError
The input was neither path to a CPHD file nor a CPHDDetails instance
ValueError
The CPHD file was the incorrect (specified) version
"""
if isinstance(cphd_details, str):
cphd_details = CPHDDetails(cphd_details)
if not isinstance(cphd_details, CPHDDetails):
raise TypeError('cphd_details is required to be a file path to a CPHD file '
'or CPHDDetails, got type {}'.format(cphd_details))
if version is not None:
if isinstance(version, str) and not cphd_details.cphd_version.startswith(version):
raise ValueError(
'This CPHD file is required to be version {},\n\t'
'got {}'.format(version, cphd_details.cphd_version))
else:
val = False
for entry in version:
if cphd_details.cphd_version.startswith(entry):
val = True
break
if not val:
raise ValueError(
'This CPHD file is required to be one of version {},\n\t'
'got {}'.format(version, cphd_details.cphd_version))
return cphd_details
##########
# Reading
class CPHDReader(CPHDTypeReader):
"""
The Abstract CPHD reader instance, which just selects the proper CPHD reader
class based on the CPHD version. Note that there is no __init__ method for
this class, and it would be skipped regardless. Ensure that you make a direct
call to the BaseReader.__init__() method when extending this class.
**Updated in version 1.3.0** for reading changes.
"""
__slots__ = ('_cphd_details', )
def __new__(cls, *args, **kwargs):
if len(args) == 0:
raise ValueError(
'The first argument of the constructor is required to be a file_path '
'or CPHDDetails instance.')
if is_file_like(args[0]):
raise ValueError('File like object input not supported for CPHD reading at this time.')
cphd_details = _validate_cphd_details(args[0])
if cphd_details.cphd_version.startswith('0.3'):
return object.__new__(CPHDReader0_3)
elif cphd_details.cphd_version.startswith('1.'):
return object.__new__(CPHDReader1)
else:
raise ValueError('Got unhandled CPHD version {}'.format(cphd_details.cphd_version))
@property
def cphd_details(self) -> CPHDDetails:
"""
CPHDDetails: The cphd details object.
"""
return self._cphd_details
@property
def cphd_version(self) -> str:
"""
str: The CPHD version.
"""
return self.cphd_details.cphd_version
@property
def cphd_header(self) -> Union[CPHDHeader1, CPHDHeader0_3]:
"""
CPHDHeader1|CPHDHeader0_3: The CPHD header object, which is version dependent.
"""
return self.cphd_details.cphd_header
@property
def file_name(self) -> str:
return self.cphd_details.file_name
def read_pvp_variable(
self,
variable: str,
index: Union[int, str],
the_range: Union[None, int, Tuple[int, ...], slice] = None) -> Optional[numpy.ndarray]:
raise NotImplementedError
def read_pvp_array(
self,
index: Union[int, str],
the_range: Union[None, int, Tuple[int, ...], slice] = None) -> numpy.ndarray:
raise NotImplementedError
def read_pvp_block(self) -> Dict[Union[int, str], numpy.ndarray]:
raise NotImplementedError
def read_signal_block(self) -> Dict[Union[int, str], numpy.ndarray]:
raise NotImplementedError
def read_signal_block_raw(self) -> Dict[Union[int, str], numpy.ndarray]:
raise NotImplementedError
def close(self):
CPHDTypeReader.close(self)
if hasattr(self, '_cphd_details'):
if hasattr(self._cphd_details, 'close'):
self._cphd_details.close()
del self._cphd_details
class CPHDReader1(CPHDReader):
"""
The CPHD version 1 reader.
**Updated in version 1.3.0** for reading changes.
"""
_allowed_versions = ('1.0', '1.1')
def __new__(cls, *args, **kwargs):
# we must override here, to avoid recursion with
# the CPHDReader parent
return object.__new__(cls)
def __init__(self, cphd_details: Union[str, CPHDDetails]):
"""
Parameters
----------
cphd_details : str|CPHDDetails
"""
self._channel_map = None # type: Union[None, Dict[str, int]]
self._pvp_memmap = None # type: Union[None, Dict[str, numpy.ndarray]]
self._support_array_memmap = None # type: Union[None, Dict[str, numpy.ndarray]]
self._cphd_details = _validate_cphd_details(cphd_details, version=self._allowed_versions)
CPHDTypeReader.__init__(self, None, self._cphd_details.cphd_meta)
# set data segments after setting up the pvp information, because
# we need the AmpSf to set up the format function for the data segment
self._create_pvp_memmaps()
self._create_support_array_memmaps()
data_segments = self._create_data_segments()
BaseReader.__init__(self, data_segments, reader_type='CPHD')
@property
def cphd_meta(self) -> CPHDType1:
"""
CPHDType1: The CPHD structure.
"""
return self._cphd_meta
@property
def cphd_header(self) -> CPHDHeader1:
"""
CPHDHeader1: The CPHD header object.
"""
return self.cphd_details.cphd_header
def _create_data_segments(self) -> List[DataSegment]:
"""
Helper method for creating the various signal data segments.
Returns
-------
List[DataSegment]
"""
data_segments = []
data = self.cphd_meta.Data
sample_type = data.SignalArrayFormat
if sample_type == "CF8":
raw_dtype = numpy.dtype('>f4')
elif sample_type == "CI4":
raw_dtype = numpy.dtype('>i2')
elif sample_type == "CI2":
raw_dtype = numpy.dtype('>i1')
else:
raise ValueError('Got unhandled signal array format {}'.format(sample_type))
block_offset = self.cphd_header.SIGNAL_BLOCK_BYTE_OFFSET
for entry in data.Channels:
amp_sf = self.read_pvp_variable('AmpSF', entry.Identifier)
format_function = AmpScalingFunction(raw_dtype, amplitude_scaling=amp_sf)
raw_shape = (entry.NumVectors, entry.NumSamples, 2)
data_offset = entry.SignalArrayByteOffset
data_segments.append(
NumpyMemmapSegment(
self.cphd_details.file_object, block_offset+data_offset,
raw_dtype, raw_shape, formatted_dtype='complex64', formatted_shape=raw_shape[:2],
format_function=format_function, close_file=False))
return data_segments
def _create_pvp_memmaps(self) -> None:
"""
Helper method which creates the pvp mem_maps.
Returns
-------
None
"""
self._pvp_memmap = None
if self.cphd_meta.Data.Channels is None:
logger.error('No Data.Channels defined.')
return
if self.cphd_meta.PVP is None:
logger.error('No PVP object defined.')
return
pvp_dtype = self.cphd_meta.PVP.get_vector_dtype()
self._pvp_memmap = OrderedDict()
self._channel_map = OrderedDict()
for i, entry in enumerate(self.cphd_meta.Data.Channels):
self._channel_map[entry.Identifier] = i
offset = self.cphd_header.PVP_BLOCK_BYTE_OFFSET + entry.PVPArrayByteOffset
shape = (entry.NumVectors, )
self._pvp_memmap[entry.Identifier] = numpy.memmap(
self.cphd_details.file_name, dtype=pvp_dtype, mode='r', offset=offset, shape=shape)
def _create_support_array_memmaps(self) -> None:
"""
Helper method which creates the support array mem_maps.
Returns
-------
None
"""
if self.cphd_meta.Data.SupportArrays is None:
self._support_array_memmap = None
return
self._support_array_memmap = OrderedDict()
for i, entry in enumerate(self.cphd_meta.Data.SupportArrays):
# extract the support array metadata details
details = self.cphd_meta.SupportArray.find_support_array(entry.Identifier)
# determine array byte offset
offset = self.cphd_header.SUPPORT_BLOCK_BYTE_OFFSET + entry.ArrayByteOffset
# determine numpy dtype and depth of array
dtype, depth = details.get_numpy_format()
# set up the numpy memory map
shape = (entry.NumRows, entry.NumCols) if depth == 1 else (entry.NumRows, entry.NumCols, depth)
self._support_array_memmap[entry.Identifier] = numpy.memmap(
self.cphd_details.file_name, dtype=dtype, mode='r', offset=offset, shape=shape)
def _validate_index(self, index: Union[int, str]) -> int:
"""
Get corresponding integer index for CPHD channel.
Parameters
----------
index : int|str
Returns
-------
int
"""
cphd_meta = self.cphd_details.cphd_meta
if isinstance(index, str):
if index in self._channel_map:
return self._channel_map[index]
else:
raise KeyError(_missing_channel_identifier_text.format(index))
else:
int_index = int(index)
if not (0 <= int_index < cphd_meta.Data.NumCPHDChannels):
raise ValueError(_index_range_text.format(cphd_meta.Data.NumCPHDChannels))
return int_index
def _validate_index_key(self, index: Union[int, str]) -> str:
"""
Gets the corresponding identifier for the CPHD channel.
Parameters
----------
index : int|str
Returns
-------
str
"""
cphd_meta = self.cphd_details.cphd_meta
if isinstance(index, str):
if index in self._channel_map:
return index
else:
raise KeyError(_missing_channel_identifier_text.format(index))
else:
int_index = int(index)
if not (0 <= int_index < cphd_meta.Data.NumCPHDChannels):
raise ValueError(_index_range_text.format(cphd_meta.Data.NumCPHDChannels))
return cphd_meta.Data.Channels[int_index].Identifier
def read_support_array(
self,
index: Union[int, str],
*ranges: Sequence[Union[None, int, Tuple[int, ...], slice]]) -> numpy.ndarray:
# find the support array identifier
if isinstance(index, int):
the_entry = self.cphd_meta.Data.SupportArrays[index]
index = the_entry.Identifier
if not isinstance(index, str):
raise TypeError('Got unexpected type {} for identifier'.format(type(index)))
the_memmap = self._support_array_memmap[index]
if len(ranges) == 0:
return numpy.copy(the_memmap[:])
# noinspection PyTypeChecker
subscript = verify_subscript(ranges, the_memmap.shape)
return numpy.copy(the_memmap[subscript])
def read_support_block(self) -> Dict[str, numpy.ndarray]:
if self.cphd_meta.Data.SupportArrays:
return {
sa.Identifier: self.read_support_array(sa.Identifier)
for sa in self.cphd_meta.Data.SupportArrays}
else:
return {}
def read_pvp_variable(
self,
variable: str,
index: Union[int, str],
the_range: Union[None, int, Tuple[int, ...], slice] = None) -> Optional[numpy.ndarray]:
index_key = self._validate_index_key(index)
the_memmap = self._pvp_memmap[index_key]
the_slice = verify_slice(the_range, the_memmap.shape[0])
if variable in the_memmap.dtype.fields:
return numpy.copy(the_memmap[variable][the_slice])
else:
return None
def read_pvp_array(
self,
index: Union[int, str],
the_range: Union[None, int, Tuple[int, ...], slice] = None) -> numpy.ndarray:
index_key = self._validate_index_key(index)
the_memmap = self._pvp_memmap[index_key]
the_slice = verify_slice(the_range, the_memmap.shape[0])
return numpy.copy(the_memmap[the_slice])
def read_pvp_block(self) -> Dict[str, numpy.ndarray]:
return {chan.Identifier: self.read_pvp_array(chan.Identifier)
for chan in self.cphd_meta.Data.Channels}
def read_signal_block(self) -> Dict[str, numpy.ndarray]:
return {chan.Identifier: numpy.copy(self.read(index=chan.Identifier))
for chan in self.cphd_meta.Data.Channels}
def read_signal_block_raw(self) -> Dict[Union[int, str], numpy.ndarray]:
return {chan.Identifier: numpy.copy(self.read_raw(index=chan.Identifier))
for chan in self.cphd_meta.Data.Channels}
def read_chip(
self,
*ranges: Sequence[Union[None, int, Tuple[int, ...], slice]],
index: Union[int, str] = 0,
squeeze: bool = True) -> numpy.ndarray:
"""
This is identical to :meth:`read`, and presented for backwards compatibility.
Parameters
----------
ranges : Sequence[Union[None, int, Tuple[int, ...], slice]]
index : int|str
squeeze : bool
Returns
-------
numpy.ndarray
See Also
--------
:meth:`read`.
"""
return self.__call__(*ranges, index=index, raw=False, squeeze=squeeze)
def read(
self,
*ranges: Sequence[Union[None, int, Tuple[int, ...], slice]],
index: Union[int, str] = 0,
squeeze: bool = True) -> numpy.ndarray:
"""
Read formatted data from the given data segment. Note this is an alias to the
:meth:`__call__` called as
:code:`reader(*ranges, index=index, raw=False, squeeze=squeeze)`.
Parameters
----------
ranges : Sequence[Union[None, int, Tuple[int, ...], slice]]
The slice definition appropriate for `data_segment[index].read()` usage.
index : int|str
The data_segment index or channel identifier.
squeeze : bool
Squeeze length 1 dimensions out of the shape of the return array?
Returns
-------
numpy.ndarray
See Also
--------
See :meth:`sarpy.io.general.data_segment.DataSegment.read`.
"""
return self.__call__(*ranges, index=index, raw=False, squeeze=squeeze)
def read_raw(
self,
*ranges: Sequence[Union[None, int, Tuple[int, ...], slice]],
index: Union[int, str] = 0,
squeeze: bool = True) -> numpy.ndarray:
"""
Read raw data from the given data segment. Note this is an alias to the
:meth:`__call__` called as
:code:`reader(*ranges, index=index, raw=True, squeeze=squeeze)`.
Parameters
----------
ranges : Sequence[Union[None, int, Tuple[int, ...], slice]]
The slice definition appropriate for `data_segment[index].read()` usage.
index : int|str
The data_segment index or cphd channel identifier.
squeeze : bool
Squeeze length 1 dimensions out of the shape of the return array?
Returns
-------
numpy.ndarray
See Also
--------
See :meth:`sarpy.io.general.data_segment.DataSegment.read_raw`.
"""
return self.__call__(*ranges, index=index, raw=True, squeeze=squeeze)
def __call__(
self,
*ranges: Sequence[Union[None, int, slice]],
index: int = 0,
raw: bool = False,
squeeze: bool = True) -> numpy.ndarray:
index = self._validate_index(index)
return BaseReader.__call__(self, *ranges, index=index, raw=raw, squeeze=squeeze)
class CPHDReader0_3(CPHDReader):
"""
The CPHD version 0.3 reader.
**Updated in version 1.3.0** for reading changes.
"""
def __new__(cls, *args, **kwargs):
# we must override here, to avoid recursion with
# the CPHDReader parent
return object.__new__(cls)
def __init__(self, cphd_details: Union[str, CPHDDetails]):
"""
Parameters
----------
cphd_details : str|CPHDDetails
"""
self._cphd_details = _validate_cphd_details(cphd_details, version='0.3')
CPHDTypeReader.__init__(self, None, self._cphd_details.cphd_meta)
self._create_pvp_memmaps()
data_segments = self._create_data_segment()
BaseReader.__init__(self, data_segments, reader_type="CPHD")
@property
def cphd_meta(self) -> CPHDType0_3:
"""
CPHDType0_3: The CPHD structure, which is version dependent.
"""
return self._cphd_meta
@property
def cphd_header(self) -> CPHDHeader0_3:
"""
CPHDHeader0_3: The CPHD header object.
"""
return self.cphd_details.cphd_header
def _validate_index(self, index: int) -> int:
"""
Validate integer index value for CPHD channel.
Parameters
----------
index : int
Returns
-------
int
"""
int_index = int(index)
if not (0 <= int_index < self.cphd_meta.Data.NumCPHDChannels):
raise ValueError(_index_range_text.format(self.cphd_meta.Data.NumCPHDChannels))
return int_index
def _create_data_segment(self) -> List[DataSegment]:
data_segments = []
data = self.cphd_meta.Data
sample_type = data.SampleType
if sample_type == "RE32F_IM32F":
raw_dtype = numpy.dtype('>f4')
elif sample_type == "RE16I_IM16I":
raw_dtype = numpy.dtype('>i2')
elif sample_type == "RE08I_IM08I":
raw_dtype = numpy.dtype('>i1')
else:
raise ValueError('Got unhandled sample type {}'.format(sample_type))
data_offset = self.cphd_header.CPHD_BYTE_OFFSET
for index, entry in enumerate(data.ArraySize):
amp_sf = self.read_pvp_variable('AmpSF', index)
format_function = AmpScalingFunction(raw_dtype, amplitude_scaling=amp_sf)
raw_shape = (entry.NumVectors, entry.NumSamples, 2)
data_segments.append(
NumpyMemmapSegment(
self.cphd_details.file_object, data_offset,
raw_dtype, raw_shape, formatted_dtype='complex64', formatted_shape=raw_shape[:2],
format_function=format_function, close_file=False))
data_offset += raw_shape[0]*raw_shape[1]*2*raw_dtype.itemsize
return data_segments
def _create_pvp_memmaps(self) -> None:
"""
Helper method which creates the pvp mem_maps.
Returns
-------
None
"""
self._pvp_memmap = None
pvp_dtype = self.cphd_meta.VectorParameters.get_vector_dtype()
self._pvp_memmap = []
for i, entry in enumerate(self.cphd_meta.Data.ArraySize):
offset = self.cphd_header.VB_BYTE_OFFSET + self.cphd_meta.Data.NumBytesVBP*i
shape = (entry.NumVectors, )
self._pvp_memmap.append(
numpy.memmap(
self.cphd_details.file_name, dtype=pvp_dtype, mode='r', offset=offset, shape=shape))
def read_pvp_variable(
self,
variable: str,
index: int,
the_range: Union[None, int, Tuple[int, ...], slice] = None) -> Optional[numpy.ndarray]:
int_index = self._validate_index(index)
the_memmap = self._pvp_memmap[int_index]
the_slice = verify_slice(the_range, the_memmap.shape[0])
if variable in the_memmap.dtype.fields:
return numpy.copy(the_memmap[variable][the_slice])
else:
return None
def read_pvp_array(
self,
index: int,
the_range: Union[None, int, Tuple[int, ...], slice] = None) -> numpy.ndarray:
int_index = self._validate_index(index)
the_memmap = self._pvp_memmap[int_index]
the_slice = verify_slice(the_range, the_memmap.shape[0])
return numpy.copy(the_memmap[the_slice])
def read_pvp_block(self) -> Dict[int, numpy.ndarray]:
"""
Reads the entirety of the PVP block(s).
Returns
-------
Dict[int, numpy.ndarray]
Dictionary of `numpy.ndarray` containing the PVP arrays.
"""
return {chan: self.read_pvp_array(chan) for chan in range(self.cphd_meta.Data.NumCPHDChannels)}
def read_signal_block(self) -> Dict[int, numpy.ndarray]:
return {chan: self.read(index=chan) for chan in range(self.cphd_meta.Data.NumCPHDChannels)}
def read_signal_block_raw(self) -> Dict[int, numpy.ndarray]:
return {chan: self.read_raw(index=chan) for chan in range(self.cphd_meta.Data.NumCPHDChannels)}
def __call__(
self,
*ranges: Sequence[Union[None, int, slice]],
index: int = 0,
raw: bool = False,
squeeze: bool = True) -> numpy.ndarray:
index = self._validate_index(index)
return BaseReader.__call__(self, *ranges, index=index, raw=raw, squeeze=squeeze)
def is_a(file_name: str) -> Optional[CPHDReader]:
"""
Tests whether a given file_name corresponds to a CPHD file. Returns a reader instance, if so.
Parameters
----------
file_name : str
the file_name to check
Returns
-------
CPHDReader|None
Appropriate `CPHDTypeReader` instance if CPHD file, `None` otherwise
"""
try:
cphd_details = CPHDDetails(file_name)
logger.info('File {} is determined to be a CPHD version {} file.'.format(file_name, cphd_details.cphd_version))
return CPHDReader(cphd_details)
except SarpyIOError:
# we don't want to catch parsing errors, for now?
return None
###########
# Writing
class ElementDetails(object):
__slots__ = (
'_item_offset', '_item_bytes', '_item_written')
def __init__(self, item_offset: int, item_bytes: Optional[bytes] = None):
self._item_offset = None
self._item_bytes = None
self._item_written = False
self.item_offset = item_offset
self.item_bytes = item_bytes
@property
def item_offset(self) -> Optional[int]:
"""
int: The item offset.
"""
return self._item_offset
@item_offset.setter
def item_offset(self, value: int) -> None:
value = int(value)
if self._item_offset is not None and self._item_offset != value:
raise ValueError("item_offset is read only after being initially defined.")
self._item_offset = value
@property
def item_bytes(self) -> Optional[bytes]:
"""
None|bytes: The item bytes.
"""
return self._item_bytes
@item_bytes.setter
def item_bytes(self, value: bytes) -> None:
if self._item_bytes is not None:
raise ValueError("item_bytes is read only after being initially defined.")
if value is None:
self._item_bytes = None
return
if not isinstance(value, bytes):
raise TypeError('item_bytes must be of type bytes')
self._item_bytes = value
@property
def item_written(self) -> bool:
"""
bool: Has the item been written?
"""
return self._item_written
@item_written.setter
def item_written(self, value: bool):
value = bool(value)
if self._item_written and not value:
raise ValueError(
'item_written has already been set to True,\n\t'
'it cannot be reverted to False')
self._item_written = value
def write_item(self, file_object: BinaryIO) -> None:
"""
Write the item bytes (if populated), at its specified offset, to the
file. This requires that the subheader has previously be written. If
writing occurs, the file location will be advanced to the end of the item
location.
Parameters
----------
file_object : BinaryIO
Returns
-------
None
"""
if self.item_written:
return
if self.item_offset is None:
return # nothing to be done
if self.item_bytes is None:
return # nothing to be done
file_object.seek(self.item_offset, os.SEEK_SET)
file_object.write(self.item_bytes)
self.item_written = True
class CPHDWritingDetails(object):
__slots__ = (
'_header', '_header_written', '_meta',
'_channel_map', '_support_map',
'_pvp_details', '_support_details', '_signal_details')
def __init__(self, meta: CPHDType1, check_older_version: bool = False):
self._header = None
self._header_written = False
self._meta = None
self._channel_map = {}
self._support_map = {}
self._pvp_details = None
self._support_details = None
self._signal_details = None
self.meta = meta
self._set_header(check_older_version)
# initialize the information for the pvp, support, and signal details
self._populate_pvp_details()
self._populate_support_details()
self._populate_signal_details()
@property
def header(self) -> CPHDHeader1:
return self._header
def _set_header(self, check_older_version: bool):
if check_older_version:
use_version_tuple = self.meta.version_required()
else:
use_version_tuple = get_default_tuple()
use_version_string = '{}.{}.{}'.format(*use_version_tuple)
self._header = self.meta.make_file_header(use_version=use_version_string)
@property
def use_version(self) -> str:
return self.header.use_version
@property
def meta(self) -> CPHDType1:
"""
CPHDType1: The metadata
"""
return self._meta
@meta.setter
def meta(self, value):
if self._meta is not None:
raise ValueError('meta is read only once initialized.')
if not isinstance(value, CPHDType1):
raise TypeError('meta must be of type {}'.format(CPHDType1))
self._meta = value
def _populate_pvp_details(self) -> None:
if self._pvp_details is not None:
raise ValueError('pvp_details can not be initialized again')
pvp_details = []
for i, entry in enumerate(self.meta.Data.Channels):
self._channel_map[entry.Identifier] = i
offset = self.header.PVP_BLOCK_BYTE_OFFSET + entry.PVPArrayByteOffset
pvp_details.append(ElementDetails(offset))
self._pvp_details = tuple(pvp_details)
def _populate_support_details(self) -> None:
if self._support_details is not None:
raise ValueError('support_details can not be initialized again')
if self.meta.Data.SupportArrays is None:
self._signal_details = None
return
support_details = []
for i, entry in enumerate(self.meta.Data.SupportArrays):
self._support_map[entry.Identifier] = i
offset = self.header.SUPPORT_BLOCK_BYTE_OFFSET + entry.ArrayByteOffset
support_details.append(ElementDetails(offset))
self._support_details = tuple(support_details)
def _populate_signal_details(self) -> None:
if self._signal_details is not None:
raise ValueError('signal_details can not be initialized again')
signal_details = []
for i, entry in enumerate(self.meta.Data.Channels):
offset = self.header.SIGNAL_BLOCK_BYTE_OFFSET + entry.SignalArrayByteOffset
signal_details.append(ElementDetails(offset))
self._signal_details = tuple(signal_details)
@property
def pvp_details(self) -> Optional[Tuple[ElementDetails, ...]]:
return self._pvp_details
@property
def support_details(self) -> Optional[Tuple[ElementDetails, ...]]:
return self._support_details
@property
def signal_details(self) -> Optional[Tuple[ElementDetails, ...]]:
return self._signal_details
@property
def channel_map(self) -> Dict[str, int]:
return self._channel_map
@property
def support_map(self) -> Optional[Dict[str, int]]:
return self._support_map
def _write_items(
self,
details: Optional[Sequence[ElementDetails]],
file_object: BinaryIO) -> None:
if details is None:
return
for index, entry in enumerate(details):
entry.write_item(file_object)
def _verify_item_written(
self,
details: Optional[Sequence[ElementDetails]],
name: str) -> None:
if details is None:
return
for index, entry in enumerate(details):
if not entry.item_written:
logger.error('{} data at index {} not written'.format(name, index))
def write_header(
self,
file_object: BinaryIO,
overwrite: bool = False) -> None:
"""
Write the header.The file object will be advanced to the end of the
block, if writing occurs.
Parameters
----------
file_object : BinaryIO
overwrite : bool
Overwrite, if previously written?
Returns
-------
None
"""
if self._header_written and not overwrite:
return
file_object.write(self.header.to_string().encode())
file_object.write(CPHD_SECTION_TERMINATOR)
# write xml
file_object.seek(self.header.XML_BLOCK_BYTE_OFFSET, os.SEEK_SET)
file_object.write(self.meta.to_xml_bytes(urn=get_namespace(self.use_version)))
file_object.write(CPHD_SECTION_TERMINATOR)
self._header_written = True
def write_all_populated_items(self, file_object: BinaryIO) -> None:
"""
Write everything populated. This assumes that the header will start at the
beginning (position 0) of the file-like object.
Parameters
----------
file_object : BinaryIO
Returns
-------
None
"""
self.write_header(file_object, overwrite=False)
self._write_items(self.pvp_details, file_object)
self._write_items(self.support_details, file_object)
self._write_items(self.signal_details, file_object)
def verify_all_written(self) -> None:
if not self._header_written:
logger.error('header not written')
self._verify_item_written(self.pvp_details, 'pvp')
self._verify_item_written(self.support_details, 'support')
self._verify_item_written(self.signal_details, 'signal')
class CPHDWriter1(BaseWriter):
"""
The CPHD version 1 writer.
**Updated in version 1.3.0** for writing changes.
"""
_writing_details_type = CPHDWritingDetails
__slots__ = (
'_file_name', '_file_object', '_in_memory', '_writing_details',
'_pvp_memmaps', '_support_memmaps', '_signal_data_segments',
'_can_write_regular_data')
def __init__(
self,
file_object: Union[str, BinaryIO],
meta: Optional[CPHDType1] = None,
writing_details: Optional[CPHDWritingDetails] = None,
check_older_version: bool = False,
check_existence: bool = True):
"""
Parameters
----------
file_object : str|BinaryIO
meta : None|CPHDType1
writing_details : None|CPHDWritingDetails
check_older_version : bool
Try to create an older version CPHD for compliance with other
NGA applications
check_existence : bool
Should we check if the given file already exists, and raises an exception if so?
"""
self._writing_details = None
if isinstance(file_object, str):
if check_existence and os.path.exists(file_object):
raise SarpyIOError(
'Given file {} already exists, and a new CPHD file cannot be created here.'.format(file_object))
file_object = open(file_object, 'wb')
if not is_file_like(file_object):
raise ValueError('file_object requires a file path or BinaryIO object')
self._file_object = file_object
if is_real_file(file_object):
self._file_name = file_object.name
self._in_memory = False
else:
self._file_name = None
self._in_memory = True
if meta is None and writing_details is None:
raise ValueError('One of meta or writing_details must be provided.')
if writing_details is None:
writing_details = self._writing_details_type(meta, check_older_version=check_older_version)
self.writing_details = writing_details
self._pvp_memmaps = None # type: Optional[Dict[str, numpy.ndarray]]
self._support_memmaps = None # type: Optional[Dict[str, numpy.ndarray]]
self._signal_data_segments = None # type: Optional[Dict[str, DataSegment]]
self._can_write_regular_data = None # type: Optional[Dict[str, bool]]
self._closed = False
data_segment = self._initialize_data()
BaseWriter.__init__(self, data_segment)
@property
def writing_details(self) -> CPHDWritingDetails:
return self._writing_details
@writing_details.setter
def writing_details(self, value):
if self._writing_details is not None:
raise ValueError('writing_details is read-only')
if not isinstance(value, CPHDWritingDetails):
raise TypeError('writing_details must be of type {}'.format(CPHDWritingDetails))
self._writing_details = value
@property
def file_name(self) -> Optional[str]:
return self._file_name
@property
def meta(self) -> CPHDType1:
"""
CPHDType1: The metadata
"""
return self.writing_details.meta
@staticmethod
def _verify_dtype(
obs_dtype: numpy.dtype,
exp_dtype: numpy.dtype,
purpose: str) -> None:
"""
This is a helper function for comparing two structured array dtypes.
Parameters
----------
obs_dtype : numpy.dtype
exp_dtype : numpy.dtype
purpose : str
"""
if obs_dtype.fields is None or exp_dtype.fields is None:
raise ValueError('structure array dtype required.')
observed_dtype = sorted(
[(field, dtype_info) for field, dtype_info in obs_dtype.fields.items()],
key=lambda x: x[1][1])
expected_dtype = sorted(
[(field, dtype_info) for field, dtype_info in exp_dtype.fields.items()],
key=lambda x: x[1][1])
if len(observed_dtype) != len(expected_dtype):
raise ValueError('Observed dtype for {} does not match the expected dtype.'.format(purpose))
for obs_entry, exp_entry in zip(observed_dtype, expected_dtype):
if obs_entry[1][1] != exp_entry[1][1]:
raise ValueError(
'Observed dtype for {} does not match the expected dtype\nobserved {}\nexpected {}.'.format(
purpose, observed_dtype, expected_dtype))
if obs_entry[0] != exp_entry[0]:
logger.warning(
'Got mismatched field names (observed {}, expected {}) for {}.'.format(
obs_entry[0], exp_entry[0], purpose))
def _validate_channel_index(self, index: Union[int, str]) -> int:
"""
Get corresponding integer index for CPHD channel.
Parameters
----------
index : int|str
Returns
-------
int
"""
if isinstance(index, str):
if index in self.writing_details.channel_map:
return self.writing_details.channel_map[index]
else:
raise KeyError(_missing_channel_identifier_text.format(index))
else:
int_index = int(index)
if not (0 <= int_index < self.meta.Data.NumCPHDChannels):
raise ValueError(_index_range_text.format(self.meta.Data.NumCPHDChannels))
return int_index
def _validate_channel_key(self, index: Union[int, str]) -> str:
"""
Gets the corresponding identifier for the CPHD channel.
Parameters
----------
index : int|str
Returns
-------
str
"""
if isinstance(index, str):
if index in self.writing_details.channel_map:
return index
else:
raise KeyError(_missing_channel_identifier_text.format(index))
else:
int_index = int(index)
if not (0 <= int_index < self.meta.Data.NumCPHDChannels):
raise ValueError(_index_range_text.format(self.meta.Data.NumCPHDChannels))
return self.meta.Data.Channels[int_index].Identifier
def _validate_support_index(self, index: Union[int, str]) -> int:
"""
Get corresponding integer index for support array.
Parameters
----------
index : int|str
Returns
-------
int
"""
if isinstance(index, str):
if index in self.writing_details.support_map:
return self.writing_details.support_map[index]
else:
raise KeyError('Cannot find support array for identifier {}'.format(index))
else:
int_index = int(index)
if not (0 <= int_index < len(self.meta.Data.SupportArrays)):
raise ValueError(_index_range_text.format(len(self.meta.Data.SupportArrays)))
return int_index
def _validate_support_key(self, index: Union[int, str]) -> str:
"""
Gets the corresponding identifier for the support array.
Parameters
----------
index : int|str
Returns
-------
str
"""
if isinstance(index, str):
if index in self.writing_details.support_map:
return index
else:
raise KeyError('Cannot find support array for identifier {}'.format(index))
else:
int_index = int(index)
if not (0 <= int_index < len(self.meta.Data.SupportArrays)):
raise ValueError(_index_range_text.format(len(self.meta.Data.SupportArrays)))
return self.meta.Data.SupportArrays[int_index].Identifier
def _initialize_data(self) -> List[DataSegment]:
self._pvp_memmaps = {}
# set up the PVP memmaps
pvp_dtype = self.meta.PVP.get_vector_dtype()
for i, entry in enumerate(self.meta.Data.Channels):
# create the pvp mem map
offset = self.writing_details.pvp_details[i].item_offset
shape = (entry.NumVectors, )
if self._in_memory:
self._pvp_memmaps[entry.Identifier] = numpy.empty(shape, dtype=pvp_dtype)
else:
self._pvp_memmaps[entry.Identifier] = numpy.memmap(
self._file_name, dtype=pvp_dtype, mode='r+', offset=offset, shape=shape)
self._support_memmaps = {}
if self.meta.Data.SupportArrays is not None:
for i, entry in enumerate(self.meta.Data.SupportArrays):
# extract the support array metadata details
details = self.meta.SupportArray.find_support_array(entry.Identifier)
offset = self.writing_details.support_details[i].item_offset
# determine numpy dtype and depth of array
dtype, depth = details.get_numpy_format()
# set up the numpy memory map
shape = (entry.NumRows, entry.NumCols) if depth == 1 else (entry.NumRows, entry.NumCols, depth)
if self._in_memory:
self._support_memmaps[entry.Identifier] = numpy.empty(shape, dtype=dtype)
else:
self._support_memmaps[entry.Identifier] = numpy.memmap(
self._file_name, dtype=dtype, mode='r+', offset=offset, shape=shape)
# set up the signal data_segment (this is used for formatting issues)
no_amp_sf = (self.meta.PVP.AmpSF is None)
self._signal_data_segments = {}
self._can_write_regular_data = {}
signal_data_segments = []
signal_array_format = self.meta.Data.SignalArrayFormat
if signal_array_format == 'CI2':
signal_dtype = numpy.dtype('>i1')
elif signal_array_format == 'CI4':
signal_dtype = numpy.dtype('>i2')
elif signal_array_format == 'CF8':
signal_dtype = numpy.dtype('>f4')
else:
raise ValueError('Got unhandled SignalArrayFormat {}'.format(signal_array_format))
for i, entry in enumerate(self.meta.Data.Channels):
self._can_write_regular_data[entry.Identifier] = no_amp_sf
raw_shape = (entry.NumVectors, entry.NumSamples, 2)
format_function = AmpScalingFunction(signal_dtype)
offset = self.writing_details.signal_details[i].item_offset
if self._in_memory:
underlying_array = numpy.full(raw_shape, 0, dtype=signal_dtype)
data_segment = NumpyArraySegment(
underlying_array, 'complex64', formatted_shape=raw_shape[:2],
format_function=format_function, mode='w')
else:
data_segment = NumpyMemmapSegment(
self._file_object.name, offset, signal_dtype, raw_shape,
formatted_dtype='complex64', formatted_shape=raw_shape[:2],
format_function=format_function, mode='w', close_file=False)
signal_data_segments.append(data_segment)
self._signal_data_segments[entry.Identifier] = data_segment
return signal_data_segments
def write_support_array(self,
identifier: Union[int, str],
data: numpy.ndarray) -> None:
"""
Write support array data to the file.
Parameters
----------
identifier : int|str
data : numpy.ndarray
"""
self._validate_closed()
int_index = self._validate_support_index(identifier)
identifier = self._validate_support_key(identifier)
out_array = self._support_memmaps[identifier]
if data.shape != out_array.shape:
raise ValueError(
'Support data shape {} is not compatible with\n\t'
'that provided in metadata {}'.format(data.shape, out_array.shape))
# write the data
out_array[:] = data
# mark it as written
details = self.writing_details.support_details[int_index]
if self._in_memory:
# TODO: we can delete the memmap now?
details.item_bytes = out_array.tobytes()
else:
details.item_written = True
def write_pvp_array(self,
identifier: Union[int, str],
data: numpy.ndarray) -> None:
"""
Write the PVP array data to the file.
Parameters
----------
identifier : int|str
data : numpy.ndarray
"""
self._validate_closed()
def validate_dtype():
self._verify_dtype(data.dtype, self._pvp_memmaps[identifier].dtype, 'PVP channel {}'.format(identifier))
int_index = self._validate_channel_index(identifier)
identifier = self._validate_channel_key(identifier)
entry = self.meta.Data.Channels[int_index]
validate_dtype()
if data.ndim != 1:
raise ValueError('Provided data is required to be one dimensional')
if data.shape[0] != entry.NumVectors:
raise ValueError('Provided data must have size determined by NumVectors')
if self.meta.PVP.AmpSF is not None:
amp_sf = numpy.copy(data['AmpSF'][:])
# noinspection PyUnresolvedReferences
self._signal_data_segments[identifier].format_function.set_amplitude_scaling(amp_sf)
self._can_write_regular_data[identifier] = True
# write the data
self._pvp_memmaps[identifier][:] = data
# mark it as written
details = self.writing_details.pvp_details[int_index]
if self._in_memory:
# TODO: we can likely delete the memmap now?
details.item_bytes = self._pvp_memmaps[identifier].tobytes()
else:
details.item_written = True
def write_support_block(self, support_block: Dict[Union[int, str], numpy.ndarray]) -> None:
"""
Write support block to the file.
Parameters
----------
support_block: dict
Dictionary of `numpy.ndarray` containing the support arrays.
"""
expected_support_ids = {s.Identifier for s in self.meta.Data.SupportArrays}
assert expected_support_ids == set(support_block), 'support_block keys do not match those in meta'
for identifier, array in support_block.items():
self.write_support_array(identifier, array)
def write_pvp_block(self, pvp_block: Dict[Union[int, str], numpy.ndarray]) -> None:
"""
Write PVP block to the file.
Parameters
----------
pvp_block: dict
Dictionary of `numpy.ndarray` containing the PVP arrays.
"""
expected_channels = {c.Identifier for c in self.meta.Data.Channels}
assert expected_channels == set(pvp_block), 'pvp_block keys do not match those in meta'
for identifier, array in pvp_block.items():
self.write_pvp_array(identifier, array)
def write_signal_block(self, signal_block: Dict[Union[int, str], numpy.ndarray]) -> None:
"""
Write signal block to the file.
Parameters
----------
signal_block: dict
Dictionary of `numpy.ndarray` containing the signal arrays in complex64 format.
"""
expected_channels = {c.Identifier for c in self.meta.Data.Channels}
assert expected_channels == set(signal_block), 'signal_block keys do not match those in meta'
for identifier, array in signal_block.items():
self.write(array, index=identifier)
def write_signal_block_raw(self, signal_block):
"""
Write signal block to the file.
Parameters
----------
signal_block: dict
Dictionary of `numpy.ndarray` containing the raw formatted
(i.e. file storage format) signal arrays.
"""
expected_channels = {c.Identifier for c in self.meta.Data.Channels}
assert expected_channels == set(signal_block), 'signal_block keys do not match those in meta'
for identifier, array in signal_block.items():
self.write_raw(array, index=identifier)
def write_file(
self,
pvp_block: Dict[Union[int, str], numpy.ndarray],
signal_block: Dict[Union[int, str], numpy.ndarray],
support_block: Optional[Dict[Union[int, str], numpy.ndarray]] = None):
"""
Write the blocks to the file.
Parameters
----------
pvp_block: Dict[str, numpy.ndarray]
Dictionary of `numpy.ndarray` containing the PVP arrays.
Keys must be consistent with `self.meta`
signal_block: Dict[str, numpy.ndarray]
Dictionary of `numpy.ndarray` containing the complex64 formatted signal
arrays.
Keys must be consistent with `self.meta`
support_block: None|Dict[str, numpy.ndarray]
Dictionary of `numpy.ndarray` containing the support arrays.
"""
self.write_pvp_block(pvp_block)
if support_block:
self.write_support_block(support_block)
self.write_signal_block(signal_block)
def write_file_raw(
self,
pvp_block: Dict[Union[int, str], numpy.ndarray],
signal_block: Dict[Union[int, str], numpy.ndarray],
support_block: Optional[Dict[Union[int, str], numpy.ndarray]] = None):
"""
Write the blocks to the file.
Parameters
----------
pvp_block: Dict[str, numpy.ndarray]
Dictionary of `numpy.ndarray` containing the PVP arrays.
Keys must be consistent with `self.meta`
signal_block: Dict[str, numpy.ndarray]
Dictionary of `numpy.ndarray` containing the raw formatted
(i.e. file storage format) signal arrays.
Keys must be consistent with `self.meta`
support_block: None|Dict[str, numpy.ndarray]
Dictionary of `numpy.ndarray` containing the support arrays.
"""
self.write_pvp_block(pvp_block)
if support_block:
self.write_support_block(support_block)
self.write_signal_block_raw(signal_block)
def write_chip(
self,
data: numpy.ndarray,
start_indices: Union[None, int, Tuple[int, ...]] = None,
subscript: Union[None, Tuple[slice, ...]] = None,
index: Union[int, str] = 0) -> None:
self.__call__(data, start_indices=start_indices, subscript=subscript, index=index, raw=False)
def write(
self,
data: numpy.ndarray,
start_indices: Union[None, int, Tuple[int, ...]] = None,
subscript: Union[None, Tuple[slice, ...]] = None,
index: Union[int, str] = 0) -> None:
self.__call__(data, start_indices=start_indices, subscript=subscript, index=index, raw=False)
def write_raw(
self,
data: numpy.ndarray,
start_indices: Union[None, int, Tuple[int, ...]] = None,
subscript: Union[None, Tuple[slice, ...]] = None,
index: Union[int, str] = 0) -> None:
self.__call__(data, start_indices=start_indices, subscript=subscript, index=index, raw=True)
def __call__(
self,
data: numpy.ndarray,
start_indices: Union[None, int, Tuple[int, ...]] = None,
subscript: Union[None, Tuple[slice, ...]] = None,
index: Union[int, str] = 0,
raw: bool = False) -> None:
int_index = self._validate_channel_index(index)
identifier = self._validate_channel_key(index)
if not raw and not self._can_write_regular_data[identifier]:
raise ValueError(
'The channel `{}` has an AmpSF which has not been determined,\n\t'
'but the corresponding PVP block has not yet been written'.format(identifier))
BaseWriter.__call__(self, data, start_indices=start_indices, subscript=subscript, index=int_index, raw=raw)
# check if it's fully written
# NB: this could be refactored out, but leaving it makes the most logical
# sense given the pvp/support approach
fully_written = self.data_segment[int_index].check_fully_written(warn=False)
if fully_written:
self.writing_details.signal_details[int_index].item_written = True
def flush(self, force: bool = False) -> None:
self._validate_closed()
BaseWriter.flush(self, force=force)
try:
if self._in_memory:
if self.data_segment is not None:
for index, entry in enumerate(self.data_segment):
details = self.writing_details.signal_details[index]
if details.item_written:
continue
if details.item_bytes is not None:
continue
if force or entry.check_fully_written(warn=force):
details.item_bytes = entry.get_raw_bytes(warn=False)
self.writing_details.write_all_populated_items(self._file_object)
except AttributeError:
return
def close(self):
"""
This should perform any necessary final steps, like closing
open file handles, deleting any temp files, etc.
Trying to read newly created file without closing may raise a ValueError.
"""
if hasattr(self, '_closed') and self._closed:
return
BaseWriter.close(self) # NB: flush called here
try:
if self.writing_details is not None:
self.writing_details.verify_all_written()
except AttributeError:
pass
self._writing_details = None
self._file_object = None
| 66,150 | 34.412741 | 119 | py |
sarpy | sarpy-master/sarpy/io/phase_history/converter.py | """
This module provide utilities for reading essentially Compensated Phase History Data.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import os
from typing import Callable
from sarpy.io.general.base import SarpyIOError, check_for_openers
from sarpy.io.phase_history.base import CPHDTypeReader
###########
# Module variables
_openers = []
_parsed_openers = False
def register_opener(open_func: Callable) -> None:
"""
Provide a new opener.
Parameters
----------
open_func : Callable
This is required to be a function which takes a single argument (file name).
This function should return a sarpy.io.phase_history.base.CPHDTypeReader instance
if the referenced file is viable for the underlying type, and None otherwise.
Returns
-------
None
"""
if not callable(open_func):
raise TypeError('open_func must be a callable')
if open_func not in _openers:
_openers.append(open_func)
def parse_openers() -> None:
"""
Automatically find the viable openers (i.e. :func:`is_a`) in the various modules.
"""
global _parsed_openers
if _parsed_openers:
return
_parsed_openers = True
check_for_openers('sarpy.io.phase_history', register_opener)
def open_phase_history(file_name: str) -> CPHDTypeReader:
"""
Given a file, try to find and return the appropriate reader object.
Parameters
----------
file_name : str
Returns
-------
CPHDTypeReader
Raises
------
SarpyIOError
"""
if not os.path.exists(file_name):
raise SarpyIOError('File {} does not exist.'.format(file_name))
# parse openers, if not already done
parse_openers()
# see if we can find a reader though trial and error
for opener in _openers:
reader = opener(file_name)
if reader is not None:
return reader
# If for loop completes, no matching file format was found.
raise SarpyIOError('Unable to determine phase history image format.')
| 2,063 | 23.282353 | 89 | py |
sarpy | sarpy-master/sarpy/io/phase_history/__init__.py | """
This package contains the elements for interpreting phase history data.
"""
__classification__ = 'UNCLASSIFIED'
def open(*args, **kwargs):
from .converter import open_phase_history
return open_phase_history(*args, **kwargs)
| 239 | 20.818182 | 71 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/SceneCoordinates.py | """
The SceneCoordinates type definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, List
from sarpy.io.xml.base import Serializable, SerializableArray, create_text_node
from sarpy.io.xml.descriptors import FloatDescriptor, IntegerDescriptor, \
StringDescriptor, StringEnumDescriptor, \
SerializableDescriptor, SerializableArrayDescriptor, UnitVectorDescriptor
from sarpy.io.complex.sicd_elements.base import SerializableCPArray, SerializableCPArrayDescriptor
from sarpy.io.complex.sicd_elements.blocks import XYZType, LatLonType, LatLonCornerType
from sarpy.io.complex.sicd_elements.GeoData import SCPType
from .base import DEFAULT_STRICT
from .blocks import AreaType, LSType, LSVertexType
class IARPType(SCPType):
"""
The Image Area Reference Point (IARP), which is the origin of the Image Area Coordinate system.
Note that setting one of ECF or LLH will implicitly set the other to its corresponding matched value.
"""
class ECFPlanarType(Serializable):
"""
Parameters for a planar surface defined in ECF coordinates. The reference
surface is a plane that contains the IARP.
"""
_fields = ('uIAX', 'uIAY')
_required = _fields
# descriptors
uIAX = UnitVectorDescriptor(
'uIAX', XYZType, _required, strict=DEFAULT_STRICT,
docstring='Image Area X-coordinate (IAX) unit vector in ECF coordinates. '
'For stripmap collections, uIAX ALWAYS points in the direction '
'of the scanning footprint.') # type: XYZType
uIAY = UnitVectorDescriptor(
'uIAY', XYZType, _required, strict=DEFAULT_STRICT,
docstring='Image Area Y-coordinate (IAY) unit vector in ECF '
'coordinates. This should be perpendicular to '
'uIAX.') # type: XYZType
def __init__(self, uIAX=None, uIAY=None, **kwargs):
"""
Parameters
----------
uIAX : XYZType|numpy.ndarray|list|tuple
uIAY : XYZType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.uIAX = uIAX
self.uIAY = uIAY
super(ECFPlanarType, self).__init__(**kwargs)
class LLPlanarType(Serializable):
"""
Parameters for Lat/Lon planar surface of constant HAE, implicitly assumed to be
the HAE at the `IARP`.
"""
_fields = ('uIAXLL', 'uIAYLL')
_required = _fields
# descriptors
uIAXLL = SerializableDescriptor(
'uIAXLL', LatLonType, _required, strict=DEFAULT_STRICT,
docstring='Image coordinate IAX *"unit vector"* expressed as an increment '
'in latitude and longitude corresponding to a 1.0 meter increment '
'in image coordinate `IAX`.') # type: LatLonType
uIAYLL = SerializableDescriptor(
'uIAYLL', LatLonType, _required, strict=DEFAULT_STRICT,
docstring='Image coordinate IAY *"unit vector"* expressed as an increment '
'in latitude and longitude corresponding to a 1.0 meter increment '
'in image coordinate `IAY`.') # type: LatLonType
def __init__(self, uIAXLL=None, uIAYLL=None, **kwargs):
"""
Parameters
----------
uIAXLL : LatLonType|numpy.ndarray|list|tuple
uIAYLL : LatLonType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.uIAXLL = uIAXLL
self.uIAYLL = uIAYLL
super(LLPlanarType, self).__init__(**kwargs)
class ReferenceSurfaceType(Serializable):
"""
Parameters that define the Reference Surface used for the product.
"""
_fields = ('Planar', 'HAE')
_required = ()
_choice = ({'required': True, 'collection': _fields}, )
# descriptors
Planar = SerializableDescriptor(
'Planar', ECFPlanarType, _required, strict=DEFAULT_STRICT,
docstring='The ECF planar surface definition.') # type: Union[None, ECFPlanarType]
HAE = SerializableDescriptor(
'HAE', LLPlanarType, _required, strict=DEFAULT_STRICT,
docstring='The HAE surface definition.') # type: Union[None, LLPlanarType]
def __init__(self, Planar=None, HAE=None, **kwargs):
"""
Parameters
----------
Planar : ECFPlanarType|None
HAE : LLPlanarType|None
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Planar = Planar
self.HAE = HAE
super(ReferenceSurfaceType, self).__init__(**kwargs)
###########
# Image grid definition
class IAXExtentType(Serializable):
"""
Increasing line index is in the +IAX direction.
"""
_fields = ('LineSpacing', 'FirstLine', 'NumLines')
_required = _fields
_numeric_format = {'LineSpacing': '0.17G'}
# descriptors
LineSpacing = FloatDescriptor(
'LineSpacing', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='The line spacing, in meters.') # type: float
FirstLine = IntegerDescriptor(
'FirstLine', _required, strict=DEFAULT_STRICT,
docstring='Index of the first line.') # type: int
NumLines = IntegerDescriptor(
'NumLines', _required, strict=DEFAULT_STRICT, bounds=(1, None),
docstring='Number of lines.') # type: int
def __init__(self, LineSpacing=None, FirstLine=None, NumLines=None, **kwargs):
"""
Parameters
----------
LineSpacing : float
FirstLine : int
NumLines : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.LineSpacing = LineSpacing
self.FirstLine = FirstLine
self.NumLines = NumLines
super(IAXExtentType, self).__init__(**kwargs)
class IAYExtentType(Serializable):
"""
Increasing sample index is in the +IAY direction.
"""
_fields = ('SampleSpacing', 'FirstSample', 'NumSamples')
_required = _fields
_numeric_format = {'SampleSpacing': '0.17G'}
# descriptors
SampleSpacing = FloatDescriptor(
'SampleSpacing', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Sample spacing, in meters.') # type: float
FirstSample = IntegerDescriptor(
'FirstSample', _required, strict=DEFAULT_STRICT,
docstring='Index of the first sample.') # type: int
NumSamples = IntegerDescriptor(
'NumSamples', _required, strict=DEFAULT_STRICT, bounds=(1, None),
docstring='Number of samples.') # type: int
def __init__(self, SampleSpacing=None, FirstSample=None, NumSamples=None, **kwargs):
"""
Parameters
----------
SampleSpacing : float
FirstSample : int
NumSamples : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.SampleSpacing = SampleSpacing
self.FirstSample = FirstSample
self.NumSamples = NumSamples
super(IAYExtentType, self).__init__(**kwargs)
class SegmentType(Serializable):
"""
Rectangle segment.
"""
_fields = ('Identifier', 'StartLine', 'StartSample', 'EndLine', 'EndSample', 'SegmentPolygon')
_required = ('Identifier', 'StartLine', 'StartSample', 'EndLine', 'EndSample')
_collections_tags = {'SegmentPolygon': {'array': True, 'child_tag': 'SV'}}
# descriptors
Identifier = StringDescriptor(
'Identifier', _required, strict=DEFAULT_STRICT,
docstring='String that uniquely identifies the Image Segment.') # type: str
StartLine = IntegerDescriptor(
'StartLine', _required, strict=DEFAULT_STRICT,
docstring='Start line of the segment.') # type: int
StartSample = IntegerDescriptor(
'StartSample', _required, strict=DEFAULT_STRICT,
docstring='Start sample of the segment.') # type: int
EndLine = IntegerDescriptor(
'EndLine', _required, strict=DEFAULT_STRICT,
docstring='End line of the segment.') # type: int
EndSample = IntegerDescriptor(
'EndSample', _required, strict=DEFAULT_STRICT,
docstring='End sample of the segment.') # type: int
SegmentPolygon = SerializableArrayDescriptor(
'SegmentPolygon', LSVertexType, _collections_tags, _required,
strict=DEFAULT_STRICT, minimum_length=3,
docstring='Polygon that describes a portion of the segment '
'rectangle.') # type: Union[SerializableArray, List[LSVertexType]]
def __init__(self, Identifier=None, StartLine=None, StartSample=None, EndLine=None,
EndSample=None, SegmentPolygon=None, **kwargs):
"""
Parameters
----------
Identifier : str
StartLine : int
StartSample : int
EndLine : int
EndSample : int
SegmentPolygon : SerializableArray|List[LSVertexType]|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Identifier = Identifier
self.StartLine = StartLine
self.StartSample = StartSample
self.EndLine = EndLine
self.EndSample = EndSample
self.SegmentPolygon = SegmentPolygon
super(SegmentType, self).__init__(**kwargs)
class SegmentListType(SerializableArray):
_set_size = False
_set_index = False
@property
def NumSegments(self):
return self.size
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT):
anode = super(SegmentListType, self).to_node(
doc, tag, ns_key=ns_key, parent=parent, check_validity=check_validity, strict=strict)
create_text_node(
doc, 'NumSegments' if ns_key is None else '{}:NumSegments'.format(ns_key),
'{0:d}'.format(self.NumSegments), parent=anode)
return anode
class ImageGridType(Serializable):
"""
Parameters that describe a geo-referenced image grid for image data products
that may be formed from the CPHD signal array(s).
"""
_fields = ('Identifier', 'IARPLocation', 'IAXExtent', 'IAYExtent', 'SegmentList')
_required = ('IARPLocation', 'IAXExtent', 'IAYExtent')
_collections_tags = {'SegmentList': {'array': True, 'child_tag': 'Segment'}}
# descriptors
Identifier = StringDescriptor(
'Identifier', _required, strict=DEFAULT_STRICT,
docstring='String that uniquely identifies the Image Grid.') # type: Union[None, str]
IARPLocation = SerializableDescriptor(
'IARPLocation', LSType, _required, strict=DEFAULT_STRICT,
docstring='IARP grid location. Grid locations indexed by (line, sample) or (L,S). '
'Image grid line and sample are pixel-centered indices.') # type: LSType
IAXExtent = SerializableDescriptor(
'IAXExtent', IAXExtentType, _required, strict=DEFAULT_STRICT,
docstring='Increasing line index is in the +IAX direction.') # type: IAXExtentType
IAYExtent = SerializableDescriptor(
'IAYExtent', IAYExtentType, _required, strict=DEFAULT_STRICT,
docstring='Increasing sample index is in the +IAY direction.') # type: IAYExtentType
SegmentList = SerializableArrayDescriptor(
'SegmentList', SegmentType, _collections_tags, _required, strict=DEFAULT_STRICT,
array_extension=SegmentListType,
docstring='List of image grid segments defined relative to the image '
'grid.') # type: Union[SegmentListType, List[SegmentType]]
def __init__(self, Identifier=None, IARPLocation=None, IAXExtent=None, IAYExtent=None,
SegmentList=None, **kwargs):
"""
Parameters
----------
Identifier : None|str
IARPLocation : LSType
IAXExtent : IAXExtentType
IAYExtent : IAYExtentType
SegmentList : SegmentListType|List[SegmentType]|numpy.array|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Identifier = Identifier
self.IARPLocation = IARPLocation
self.IAXExtent = IAXExtent
self.IAYExtent = IAYExtent
self.SegmentList = SegmentList
super(ImageGridType, self).__init__(**kwargs)
###############
class SceneCoordinatesType(Serializable):
"""
Parameters that define geographic coordinates for in the imaged scene.
"""
_fields = (
'EarthModel', 'IARP', 'ReferenceSurface', 'ImageArea', 'ImageAreaCornerPoints',
'ExtendedArea', 'ImageGrid')
_required = ('EarthModel', 'IARP', 'ReferenceSurface', 'ImageArea', 'ImageAreaCornerPoints')
_collections_tags = {
'ImageAreaCornerPoints': {'array': True, 'child_tag': 'IACP'}}
# descriptors
EarthModel = StringEnumDescriptor(
'EarthModel', ('WGS_84', ), _required, strict=DEFAULT_STRICT, default_value='WGS_84',
docstring='Specifies the earth model used for specifying geodetic coordinates. All heights are '
'Height Above the Ellipsoid (HAE) unless specifically '
'noted.') # type: str
IARP = SerializableDescriptor(
'IARP', IARPType, _required, strict=DEFAULT_STRICT,
docstring='Image Area Reference Point (IARP). The IARP is the origin of '
'the Image Area Coordinate system.') # type: IARPType
ReferenceSurface = SerializableDescriptor(
'ReferenceSurface', ReferenceSurfaceType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that define the Reference Surface used for the '
'product.') # type: ReferenceSurfaceType
ImageArea = SerializableDescriptor(
'ImageArea', AreaType, _required, strict=DEFAULT_STRICT,
docstring='Image Area is defined by a rectangle aligned with Image Area coordinates (IAX, IAY). '
'May be reduced by the optional polygon.') # type: AreaType
ImageAreaCornerPoints = SerializableCPArrayDescriptor(
'ImageAreaCornerPoints', LatLonCornerType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Image Area Corner Points (IACPs) that bound the full resolution '
'image area.') # type: Union[SerializableCPArray, List[LatLonCornerType]]
ExtendedArea = SerializableDescriptor(
'ExtendedArea', AreaType, _required, strict=DEFAULT_STRICT,
docstring='Extended Area is defined by a rectangle aligned with Image Area coordinates '
'(IAX, IAY). May be reduced by the optional polygon.') # type: Union[None, AreaType]
ImageGrid = SerializableDescriptor(
'ImageGrid', ImageGridType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe a geo-referenced image grid for image data '
'products that may be formed from the CPHD signal '
'array(s).') # type: ImageGridType
def __init__(self, EarthModel='WGS_84', IARP=None, ReferenceSurface=None,
ImageArea=None, ImageAreaCornerPoints=None, ExtendedArea=None,
ImageGrid=None, **kwargs):
"""
Parameters
----------
EarthModel : None|str
IARP : IARPType
ReferenceSurface : ReferenceSurfaceType
ImageArea : AreaType
ImageAreaCornerPoints : SerializableCPArray|List[LatLonCornerType]|numpy.ndarray|list|tuple
ExtendedArea : None|AreaType
ImageGrid : None|ImageGridType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.EarthModel = EarthModel
self.IARP = IARP
self.ReferenceSurface = ReferenceSurface
self.ImageArea = ImageArea
self.ImageAreaCornerPoints = ImageAreaCornerPoints
self.ExtendedArea = ExtendedArea
self.ImageGrid = ImageGrid
super(SceneCoordinatesType, self).__init__(**kwargs)
| 16,766 | 38.359155 | 105 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/Dwell.py | """
The Dwell parameters definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import List
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import StringDescriptor, SerializableDescriptor, SerializableListDescriptor
from sarpy.io.complex.sicd_elements.blocks import Poly2DType
from .base import DEFAULT_STRICT
class CODTimeType(Serializable):
"""
Center of Dwell (COD) time polynomial object.
"""
_fields = ('Identifier', 'CODTimePoly')
_required = _fields
# descriptors
Identifier = StringDescriptor(
'Identifier', _required, strict=DEFAULT_STRICT,
docstring='String that uniquely identifies this COD Time '
'polynomial.') # type: str
CODTimePoly = SerializableDescriptor(
'CODTimePoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='The polynomial.') # type: Poly2DType
def __init__(self, Identifier=None, CODTimePoly=None, **kwargs):
"""
Parameters
----------
Identifier : str
CODTimePoly : Poly2DType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Identifier = Identifier
self.CODTimePoly = CODTimePoly
super(CODTimeType, self).__init__(**kwargs)
class DwellTimeType(Serializable):
"""
The dwell time polynomial object.
"""
_fields = ('Identifier', 'DwellTimePoly')
_required = _fields
# descriptors
Identifier = StringDescriptor(
'Identifier', _required, strict=DEFAULT_STRICT,
docstring='String that uniquely identifies this Dwell Time '
'polynomial.') # type: str
DwellTimePoly = SerializableDescriptor(
'DwellTimePoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='The polynomial.') # type: Poly2DType
def __init__(self, Identifier=None, DwellTimePoly=None, **kwargs):
"""
Parameters
----------
Identifier : str
DwellTimePoly : Poly2DType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Identifier = Identifier
self.DwellTimePoly = DwellTimePoly
super(DwellTimeType, self).__init__(**kwargs)
class DwellType(Serializable):
"""
Parameters that specify the dwell time supported by the signal arrays
contained in the CPHD product.
"""
_fields = ('NumCODTimes', 'CODTimes', 'NumDwellTimes', 'DwellTimes')
_required = ('CODTimes', 'DwellTimes')
_collections_tags = {
'CODTimes': {'array': False, 'child_tag': 'CODTime'},
'DwellTimes': {'array': False, 'child_tag': 'DwellTime'}}
# descriptors
CODTimes = SerializableListDescriptor(
'CODTimes', CODTimeType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='The Center of Dwell (COD) time polynomials.') # type: List[CODTimeType]
DwellTimes = SerializableListDescriptor(
'DwellTimes', DwellTimeType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='The dwell time polynomials.') # type: List[DwellTimeType]
def __init__(self, CODTimes=None, DwellTimes=None, **kwargs):
"""
Parameters
----------
CODTimes : List[CODTimeType]
DwellTimes : List[DwellTimeType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.CODTimes = CODTimes
self.DwellTimes = DwellTimes
super(DwellType, self).__init__(**kwargs)
@property
def NumCODTimes(self):
"""
int: The number of cod time polynomial elements.
"""
if self.CODTimes is None:
return 0
else:
return len(self.CODTimes)
@property
def NumDwellTimes(self):
"""
int: The number of dwell time polynomial elements.
"""
if self.DwellTimes is None:
return 0
else:
return len(self.DwellTimes)
| 4,422 | 29.503448 | 105 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/base.py |
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
DEFAULT_STRICT = False
FLOAT_FORMAT = '0.17E'
| 118 | 13.875 | 35 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/TxRcv.py | """
The TxRcv type definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, List
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import FloatDescriptor, StringDescriptor, \
StringEnumDescriptor, SerializableListDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from .blocks import POLARIZATION_TYPE
class TxWFParametersType(Serializable):
"""
Parameters that describe a Transmit Waveform.
"""
_fields = (
'Identifier', 'PulseLength', 'RFBandwidth', 'FreqCenter', 'LFMRate',
'Polarization', 'Power')
_required = (
'Identifier', 'PulseLength', 'RFBandwidth', 'FreqCenter', 'Polarization')
_numeric_format = {
'PulseLength': FLOAT_FORMAT, 'RFBandwidth': FLOAT_FORMAT, 'FreqCenter': FLOAT_FORMAT,
'LFMRate': FLOAT_FORMAT, 'Power': FLOAT_FORMAT}
# descriptors
Identifier = StringDescriptor(
'Identifier', _required, strict=DEFAULT_STRICT,
docstring='String that uniquely identifies this Transmit '
'Waveform.') # type: str
PulseLength = FloatDescriptor(
'PulseLength', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Length of transmitted pulse, '
'in seconds.') # type: float
RFBandwidth = FloatDescriptor(
'RFBandwidth', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Bandwidth of transmitted pulse, '
'in Hz.') # type: float
FreqCenter = FloatDescriptor(
'FreqCenter', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Center frequency of the transmitted waveform, '
'in Hz.') # type: float
LFMRate = FloatDescriptor(
'LFMRate', _required, strict=DEFAULT_STRICT,
docstring='Chirp rate of transmitted pulse if LFM, '
'in Hz/s.') # type: Union[None, float]
Polarization = StringEnumDescriptor(
'Polarization', POLARIZATION_TYPE, _required, strict=DEFAULT_STRICT,
docstring='The transmit polarization mode.') # type: str
Power = FloatDescriptor(
'Power', _required, strict=DEFAULT_STRICT,
docstring='Peak transmitted power at the interface to the antenna '
'in dBW.') # type: Union[None, float]
def __init__(self, Identifier=None, PulseLength=None, RFBandwidth=None,
FreqCenter=None, LFMRate=None, Polarization=None, Power=None, **kwargs):
"""
Parameters
----------
Identifier : str
PulseLength : float
RFBandwidth : float
FreqCenter : float
LFMRate : None|float
Polarization : str
Power : None|float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Identifier = Identifier
self.PulseLength = PulseLength
self.RFBandwidth = RFBandwidth
self.FreqCenter = FreqCenter
self.LFMRate = LFMRate
self.Polarization = Polarization
self.Power = Power
super(TxWFParametersType, self).__init__(**kwargs)
class RcvParametersType(Serializable):
"""
Parameters that describe a Receive configuration.
"""
_fields = (
'Identifier', 'WindowLength', 'SampleRate', 'IFFilterBW', 'FreqCenter',
'LFMRate', 'Polarization', 'PathGain')
_required = (
'Identifier', 'WindowLength', 'SampleRate', 'IFFilterBW', 'FreqCenter',
'Polarization')
_numeric_format = {
'WindowLength': FLOAT_FORMAT, 'SampleRate': FLOAT_FORMAT, 'IFFilterBW': FLOAT_FORMAT,
'FreqCenter': FLOAT_FORMAT, 'LFMRate': FLOAT_FORMAT, 'PathGain': FLOAT_FORMAT}
# descriptors
Identifier = StringDescriptor(
'Identifier', _required, strict=DEFAULT_STRICT,
docstring='String that uniquely identifies this Receive '
'configuration.') # type: str
WindowLength = FloatDescriptor(
'WindowLength', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Length of the receive window, in seconds.') # type: float
SampleRate = FloatDescriptor(
'SampleRate', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Rate at which the signal in the receive window is sampled, '
'in Hz.') # type: float
IFFilterBW = FloatDescriptor(
'IFFilterBW', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Bandwidth of the anti-aliasing filter prior to '
'sampling.') # type: float
FreqCenter = FloatDescriptor(
'FreqCenter', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Center frequency of the demodulation signal, '
'in Hz.') # type: float
LFMRate = FloatDescriptor(
'LFMRate', _required, strict=DEFAULT_STRICT,
docstring='Chirp rate of the demodulation signal if LFM, '
'in Hz/s.') # type: Union[None, float]
Polarization = StringEnumDescriptor(
'Polarization', POLARIZATION_TYPE, _required, strict=DEFAULT_STRICT,
docstring='The receive polarization mode.') # type: str
PathGain = FloatDescriptor(
'PathGain', _required, strict=DEFAULT_STRICT,
docstring='Receiver gain from the antenna interface to the ADC, '
'in dB.') # type: Union[None, float]
def __init__(self, Identifier=None, WindowLength=None, SampleRate=None, IFFilterBW=None,
FreqCenter=None, LFMRate=None, Polarization=None, PathGain=None, **kwargs):
"""
Parameters
----------
Identifier : str
WindowLength : float
SampleRate : float
IFFilterBW : float
FreqCenter : float
LFMRate : None|float
Polarization : str
PathGain : None|float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Identifier = Identifier
self.WindowLength = WindowLength
self.SampleRate = SampleRate
self.IFFilterBW = IFFilterBW
self.FreqCenter = FreqCenter
self.LFMRate = LFMRate
self.Polarization = Polarization
self.PathGain = PathGain
super(RcvParametersType, self).__init__(**kwargs)
class TxRcvType(Serializable):
"""
Parameters that describe the transmitted waveform(s) and receiver
configurations used in the collection
"""
_fields = ('NumTxWFs', 'TxWFParameters', 'NumRcvs', 'RcvParameters')
_required = ('TxWFParameters', 'RcvParameters')
_collections_tags = {
'TxWFParameters': {'array': False, 'child_tag': 'TxWFParameters'},
'RcvParameters': {'array': False, 'child_tag': 'RcvParameters'}}
# descriptors
TxWFParameters = SerializableListDescriptor(
'TxWFParameters', TxWFParametersType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe a Transmit Waveform.') # type: List[TxWFParametersType]
RcvParameters = SerializableListDescriptor(
'RcvParameters', RcvParametersType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe a Receive configuration.') # type: List[RcvParametersType]
def __init__(self, TxWFParameters=None, RcvParameters=None, **kwargs):
"""
Parameters
----------
TxWFParameters : List[TxWFParametersType]
RcvParameters : List[RcvParametersType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TxWFParameters = TxWFParameters
self.RcvParameters = RcvParameters
super(TxRcvType, self).__init__(**kwargs)
@property
def NumTxWFs(self):
"""
int: The number of transmit waveforms used.
"""
if self.TxWFParameters is None:
return 0
return len(self.TxWFParameters)
@property
def NumRcvs(self):
"""
int: The number of receive configurations used.
"""
if self.RcvParameters is None:
return 0
return len(self.RcvParameters)
| 8,516 | 37.022321 | 103 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/Antenna.py | """
The Antenna type definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, List, Tuple, Optional
import numpy
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from sarpy.io.complex.sicd_elements.blocks import Poly1DType, XYZType, \
XYZPolyType, Poly2DType
from sarpy.io.xml.base import Serializable, Arrayable
from sarpy.io.xml.descriptors import FloatDescriptor, StringDescriptor, SerializableDescriptor, \
BooleanDescriptor, SerializableListDescriptor
class AntCoordFrameType(Serializable):
"""
Unit vectors that describe the orientation of an Antenna Coordinate Frame
(ACF) as function of time.
"""
_fields = ('Identifier', 'XAxisPoly', 'YAxisPoly', 'UseACFPVP')
_required = ('Identifier', 'XAxisPoly', 'YAxisPoly')
# descriptors
Identifier = StringDescriptor(
'Identifier', _required, strict=DEFAULT_STRICT,
docstring='String that uniquely identifies this ACF.') # type: str
XAxisPoly = SerializableDescriptor(
'XAxisPoly', XYZPolyType, _required, strict=DEFAULT_STRICT,
docstring='Antenna X-Axis unit vector in ECF coordinates as a function '
'of time.') # type: XYZPolyType
YAxisPoly = SerializableDescriptor(
'YAxisPoly', XYZPolyType, _required, strict=DEFAULT_STRICT,
docstring='Antenna Y-Axis unit vector in ECF coordinates as a function '
'of time.') # type: XYZPolyType
UseACFPVP = BooleanDescriptor(
'UseACFPVP', _required, strict=DEFAULT_STRICT,
docstring='') # type: bool
def __init__(
self,
Identifier: str = None,
XAxisPoly: XYZPolyType = None,
YAxisPoly: XYZPolyType = None,
UseACFPVP: Optional[bool] = None,
**kwargs):
"""
Parameters
----------
Identifier : str
XAxisPoly : XYZPolyType
YAxisPoly : XYZPolyType
UseACFPVP : None|bool
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Identifier = Identifier
self.XAxisPoly = XAxisPoly
self.YAxisPoly = YAxisPoly
self.UseACFPVP = UseACFPVP
super(AntCoordFrameType, self).__init__(**kwargs)
def version_required(self) -> Tuple[int, int, int]:
required = (1, 0, 1)
if self.UseACFPVP is not None:
required = max(required, (1, 1, 0))
return required
class AntPhaseCenterType(Serializable):
"""
Parameters that describe each Antenna Phase Center (APC).
"""
_fields = ('Identifier', 'ACFId', 'APCXYZ')
_required = _fields
# descriptors
Identifier = StringDescriptor(
'Identifier', _required, strict=DEFAULT_STRICT,
docstring='String that uniquely identifies this APC.') # type: str
ACFId = StringDescriptor(
'ACFId', _required, strict=DEFAULT_STRICT,
docstring='Identifier of Antenna Coordinate Frame used for computing the '
'antenna gain and phase patterns.') # type: str
APCXYZ = SerializableDescriptor(
'APCXYZ', XYZType, _required, strict=DEFAULT_STRICT,
docstring='The APC location in the ACF XYZ coordinate '
'frame.') # type: XYZType
def __init__(self, Identifier=None, ACFId=None, APCXYZ=None, **kwargs):
"""
Parameters
----------
Identifier : str
ACFId : str
APCXYZ : XYZType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Identifier = Identifier
self.ACFId = ACFId
self.APCXYZ = APCXYZ
super(AntPhaseCenterType, self).__init__(**kwargs)
class GainPhaseArrayType(Serializable):
"""
Parameters that identify 2-D sampled Gain & Phase patterns at single
frequency value.
"""
_fields = ('Freq', 'ArrayId', 'ElementId')
_required = ('Freq', 'ArrayId')
_numeric_format = {'Freq': FLOAT_FORMAT}
# descriptors
Freq = FloatDescriptor(
'Freq', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Frequency value for which the sampled Array and Element '
'pattern(s) are provided, in Hz.') # type: float
ArrayId = StringDescriptor(
'ArrayId', _required, strict=DEFAULT_STRICT,
docstring='Support array identifier of the sampled gain/phase of the array '
'at ref Frequency.') # type: str
ElementId = StringDescriptor(
'ElementId', _required, strict=DEFAULT_STRICT,
docstring='Support array identifier of the sampled gain/phase of the element '
'at ref frequency.') # type: str
def __init__(self, Freq=None, ArrayId=None, ElementId=None, **kwargs):
"""
Parameters
----------
Freq : float
ArrayId : str
ElementId : None|str
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Freq = Freq
self.ArrayId = ArrayId
self.ElementId = ElementId
super(GainPhaseArrayType, self).__init__(**kwargs)
class FreqSFType(Serializable, Arrayable):
_fields = ('DCXSF', 'DCYSF')
_required = _fields
_numeric_format = {key: '0.17E' for key in _fields}
DCXSF = FloatDescriptor(
'DCXSF', _required, strict=DEFAULT_STRICT, bounds=(0.0, 1.0),
docstring='') # type: float
DCYSF = FloatDescriptor(
'DCYSF', _required, strict=DEFAULT_STRICT, bounds=(0.0, 1.0),
docstring='') # type: float
def __init__(self, DCXSF=None, DCYSF=None, **kwargs):
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.DCXSF = DCXSF
self.DCYSF = DCYSF
super(FreqSFType, self).__init__(**kwargs)
def get_array(self, dtype=numpy.float64) -> numpy.ndarray:
"""
Gets an array representation of the class instance.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return
Returns
-------
numpy.ndarray
array of the form [DCXSF, DCYSF]
"""
return numpy.array([self.DCXSF, self.DCYSF], dtype=dtype)
@classmethod
def from_array(cls, array: numpy.ndarray):
"""
Construct from an iterable.
Parameters
----------
array : numpy.ndarray|list|tuple
Returns
-------
FreqSFType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError(
'Expected array to be of length 2,\n\t'
'and received `{}`'.format(array))
return cls(DCXSF=array[0], DCYSF=array[1])
raise ValueError(
'Expected array to be numpy.ndarray, list, or tuple,\n\t'
'got `{}`'.format(type(array)))
class AntPolRefType(Serializable, Arrayable):
"""
Polarization reference type.
"""
_fields = ('AmpX', 'AmpY', 'PhaseY')
_required = _fields
_numeric_format = {key: '0.17E' for key in _fields}
AmpX = FloatDescriptor(
'AmpX', _required, strict=DEFAULT_STRICT, bounds=(0.0, 1.0),
docstring='E-field relative amplitude in ACF X direction') # type: float
AmpY = FloatDescriptor(
'AmpY', _required, strict=DEFAULT_STRICT, bounds=(0.0, 1.0),
docstring='E-field relative amplitude in ACF Y direction') # type: float
PhaseY = FloatDescriptor(
'PhaseY', _required, strict=DEFAULT_STRICT, bounds=(-0.5, 0.5),
docstring='Relative phase of the Y E-field '
'relative to the X E-field at f=f_0') # type: float
def __init__(
self,
AmpX: float = None,
AmpY: float = None,
PhaseY: float = None,
**kwargs):
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.AmpX = AmpX
self.AmpY = AmpY
self.PhaseY = PhaseY
super(AntPolRefType, self).__init__(**kwargs)
def get_array(self, dtype=numpy.float64) -> numpy.ndarray:
"""
Gets an array representation of the class instance.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return
Returns
-------
numpy.ndarray
array of the form [AmpX, AmpY, PhaseY]
"""
return numpy.array([self.AmpX, self.AmpY, self.PhaseY], dtype=dtype)
@classmethod
def from_array(cls, array: numpy.ndarray):
"""
Construct from an iterable.
Parameters
----------
array : numpy.ndarray|list|tuple
Returns
-------
AntPolRefType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 3:
raise ValueError(
'Expected array to be of length 3,\n\t'
'and received `{}`'.format(array))
return cls(AmpX=array[0], AmpY=array[1], PhaseY=array[2])
raise ValueError(
'Expected array to be numpy.ndarray, list, or tuple,\n\t'
'got `{}`'.format(type(array)))
class EBType(Serializable):
"""
Electrical boresight (EB) steering directions for an electronically steered array.
"""
_fields = ('DCXPoly', 'DCYPoly', 'UseEBPVP')
_required = ('DCXPoly', 'DCYPoly')
# descriptors
DCXPoly = SerializableDescriptor(
'DCXPoly', Poly1DType, _required, strict=DEFAULT_STRICT,
docstring='Electrical boresight steering *X-axis direction cosine (DCX)* as a function of '
'slow time ``(variable 1)``.') # type: Poly1DType
DCYPoly = SerializableDescriptor(
'DCYPoly', Poly1DType, _required, strict=DEFAULT_STRICT,
docstring='Electrical boresight steering *Y-axis direction cosine (DCY)* as a function of '
'slow time ``(variable 1)``.') # type: Poly1DType
UseEBPVP = BooleanDescriptor(
'UseEBPVP', _required, strict=DEFAULT_STRICT,
docstring="") # type: Optional[bool]
def __init__(self, DCXPoly=None, DCYPoly=None, UseEBPVP=None, **kwargs):
"""
Parameters
----------
DCXPoly : Poly1DType|numpy.ndarray|list|tuple
DCYPoly : Poly1DType|numpy.ndarray|list|tuple
UseEBPVP : None|bool
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.DCXPoly = DCXPoly
self.DCYPoly = DCYPoly
self.UseEBPVP = UseEBPVP
super(EBType, self).__init__(**kwargs)
def __call__(self, t):
"""
Evaluate the polynomial at points `t`. This passes `t` straight through
to :func:`polyval` of `numpy.polynomial.polynomial` for each of
`DCXPoly,DCYPoly` components. If any of `DCXPoly,DCYPoly` is not populated,
then `None` is returned.
Parameters
----------
t : float|int|numpy.ndarray
The point(s) at which to evaluate.
Returns
-------
None|numpy.ndarray
"""
if self.DCXPoly is None or self.DCYPoly is None:
return None
return numpy.array([self.DCXPoly(t), self.DCYPoly(t)])
def version_required(self) -> Tuple[int, int, int]:
required = (1, 0, 1)
if self.UseEBPVP is not None:
required = max(required, (1, 1, 0))
return required
class GainPhasePolyType(Serializable):
"""A container for the Gain and Phase Polygon definitions."""
_fields = ('GainPoly', 'PhasePoly', 'AntGPid')
_required = ('GainPoly', 'PhasePoly')
# descriptors
GainPoly = SerializableDescriptor(
'GainPoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='One-way signal gain (in dB) as a function of X-axis direction cosine (DCX) (variable 1) '
'and Y-axis direction cosine (DCY) (variable 2). Gain relative to gain at DCX = 0 '
'and DCY = 0, so constant coefficient is always 0.0.') # type: Poly2DType
PhasePoly = SerializableDescriptor(
'PhasePoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='One-way signal phase (in cycles) as a function of DCX (variable 1) and '
'DCY (variable 2). Phase relative to phase at DCX = 0 and DCY = 0, '
'so constant coefficient is always 0.0.') # type: Poly2DType
AntGPid = StringDescriptor(
'AntGPid', _required, strict=DEFAULT_STRICT,
docstring='') # type: Optional[str]
def __init__(self, GainPoly=None, PhasePoly=None, AntGPid=None, **kwargs):
"""
Parameters
----------
GainPoly : Poly2DType|numpy.ndarray|list|tuple
PhasePoly : Poly2DType|numpy.ndarray|list|tuple
AntGPid : None|str
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.GainPoly = GainPoly
self.PhasePoly = PhasePoly
self.AntGPid = AntGPid
super(GainPhasePolyType, self).__init__(**kwargs)
def __call__(self, x, y):
"""
Evaluate a polynomial at points [`x`, `y`]. This passes `x`,`y` straight
through to the call method for each component.
Parameters
----------
x : float|int|numpy.ndarray
The first dependent variable of point(s) at which to evaluate.
y : float|int|numpy.ndarray
The second dependent variable of point(s) at which to evaluate.
Returns
-------
numpy.ndarray
"""
if self.GainPoly is None or self.PhasePoly is None:
return None
return numpy.array([self.GainPoly(x, y), self.PhasePoly(x, y)], dtype=numpy.float64)
def minimize_order(self):
"""
Trim the trailing zeros for each component coefficient array. This
modifies the object in place.
Returns
-------
None
"""
self.GainPoly.minimize_order()
self.PhasePoly.minimize_order()
def version_required(self) -> Tuple[int, int, int]:
required = (1, 0, 1)
if self.AntGPid is not None:
required = max(required, (1, 1, 0))
return required
class AntPatternType(Serializable):
"""
Parameter set that defines each Antenna Pattern as function time.
"""
_fields = (
'Identifier', 'FreqZero', 'GainZero', 'EBFreqShift', 'EBFreqShift',
'MLFreqDilation', 'MLFreqDilationSF', 'GainBSPoly', 'AntPolRef',
'EB', 'Array', 'Element', 'GainPhaseArray')
_required = (
'Identifier', 'FreqZero', 'EB', 'Array', 'Element')
_collections_tags = {
'GainPhaseArray': {'array': False, 'child_tag': 'GainPhaseArray'}}
_numeric_format = {'FreqZero': FLOAT_FORMAT, 'GainZero': FLOAT_FORMAT}
# descriptors
Identifier = StringDescriptor(
'Identifier', _required, strict=DEFAULT_STRICT,
docstring='String that uniquely identifies this ACF.') # type: str
FreqZero = FloatDescriptor(
'FreqZero', _required, strict=DEFAULT_STRICT,
docstring='The reference frequency value for which the Electrical Boresight '
'and array pattern polynomials are computed.') # type: float
GainZero = FloatDescriptor(
'GainZero', _required, strict=DEFAULT_STRICT,
docstring='The reference antenna gain at zero steering angle at the '
'reference frequency, measured in dB.') # type: float
EBFreqShift = BooleanDescriptor(
'EBFreqShift', _required, strict=DEFAULT_STRICT,
docstring="Parameter indicating whether the electronic boresite shifts with "
"frequency.") # type: bool
EBFreqShiftSF = SerializableDescriptor(
'EBFreqShiftSF', FreqSFType, _required, strict=DEFAULT_STRICT,
docstring='') # type: Optional[FreqSFType]
MLFreqDilation = BooleanDescriptor(
'MLFreqDilation', _required, strict=DEFAULT_STRICT,
docstring="Parameter indicating the mainlobe (ML) width changes with "
"frequency.") # type: bool
MLFreqDilationSF = SerializableDescriptor(
'MLFreqDilationSF', FreqSFType, _required, strict=DEFAULT_STRICT,
docstring='') # type: Optional[FreqSFType]
GainBSPoly = SerializableDescriptor(
'GainBSPoly', Poly1DType, _required, strict=DEFAULT_STRICT,
docstring='Gain polynomial *(in dB)* as a function of frequency for boresight *(BS)* '
'at :math:`DCX=0, DCY=0`. '
'Frequency ratio :math:`(f-f0)/f0` is the input variable, and the constant '
'coefficient is always `0.0`.') # type: Poly1DType
AntPolRef = SerializableDescriptor(
'AntPolRef', AntPolRefType, _required, strict=DEFAULT_STRICT,
docstring='Polarization parameters for the EB steered to mechanical '
'boresight (EB_DCX = 0 and EB_DCY = 0).') # type: AntPolRefType
EB = SerializableDescriptor(
'EB', EBType, _required, strict=DEFAULT_STRICT,
docstring='Electrical boresight *(EB)* steering directions for an electronically '
'steered array.') # type: EBType
Array = SerializableDescriptor(
'Array', GainPhasePolyType, _required, strict=DEFAULT_STRICT,
docstring='Array pattern polynomials that define the shape of the '
'main-lobe.') # type: GainPhasePolyType
Element = SerializableDescriptor(
'Element', GainPhasePolyType, _required, strict=DEFAULT_STRICT,
docstring='Element array pattern polynomials for electronically steered '
'arrays.') # type: GainPhasePolyType
GainPhaseArray = SerializableListDescriptor(
'GainPhaseArray', GainPhaseArrayType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Array of parameters that identify 2-D sampled Gain and Phase patterns at '
'single frequency value.') # type: Union[None, List[GainPhaseArrayType]]
def __init__(
self,
Identifier=None,
FreqZero=None,
GainZero=None,
EBFreqShift=None,
EBFreqShiftSF: Optional[FreqSFType] = None,
MLFreqDilation=None,
MLFreqDilationSF: Optional[FreqSFType] = None,
GainBSPoly=None,
AntPolRef: Union[None, AntPolRefType, numpy.ndarray, list, tuple] = None,
EB=None,
Array=None,
Element=None,
GainPhaseArray=None,
**kwargs):
"""
Parameters
----------
Identifier : str
FreqZero : float
GainZero : float
EBFreqShift : bool
EBFreqShiftSF : None|FreqSFType|numpy.ndarray|list|tuple
MLFreqDilation : bool
MLFreqDilationSF : None|FreqSFType|numpy.ndarray|list|tuple
GainBSPoly : None|Poly1DType|numpy.ndarray|list|tuple
AntPolRef : None|AntPolRefType|numpy.ndarray|list|tuple
EB : EBType
Array : GainPhasePolyType
Element : GainPhasePolyType
GainPhaseArray : None|List[GainPhaseArrayType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Identifier = Identifier
self.FreqZero = FreqZero
self.GainZero = GainZero
self.EBFreqShift = EBFreqShift
self.EBFreqShiftSF = EBFreqShiftSF
self.MLFreqDilation = MLFreqDilation
self.MLFreqDilationSF = MLFreqDilationSF
self.GainBSPoly = GainBSPoly
self.AntPolRef = AntPolRef
self.EB = EB
self.Array = Array
self.Element = Element
self.GainPhaseArray = GainPhaseArray
super(AntPatternType, self).__init__(**kwargs)
def version_required(self) -> Tuple[int, int, int]:
required = (1, 0, 1)
for fld in ['EB', 'Array', 'Element']:
val = getattr(self, fld)
if val is not None:
required = max(required, val.version_required())
if self.EBFreqShiftSF is not None or \
self.MLFreqDilationSF is not None or \
self.AntPolRef is not None:
required = (required, (1, 1, 0))
return required
class AntennaType(Serializable):
"""
Parameters that describe the transmit and receive antennas used to collect
the signal array(s).
"""
_fields = (
'NumACFs', 'NumAPCs', 'NumAntPats', 'AntCoordFrame', 'AntPhaseCenter', 'AntPattern')
_required = ('AntCoordFrame', 'AntPhaseCenter', 'AntPattern')
_collections_tags = {
'AntCoordFrame': {'array': False, 'child_tag': 'AntCoordFrame'},
'AntPhaseCenter': {'array': False, 'child_tag': 'AntPhaseCenter'},
'AntPattern': {'array': False, 'child_tag': 'AntPattern'}}
# descriptors
AntCoordFrame = SerializableListDescriptor(
'AntCoordFrame', AntCoordFrameType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Unit vectors that describe the orientation of an Antenna Coordinate Frame (ACF) '
'as function of time. Parameter set repeated for '
'each ACF.') # type: List[AntCoordFrameType]
AntPhaseCenter = SerializableListDescriptor(
'AntPhaseCenter', AntPhaseCenterType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe each Antenna Phase Center (APC). Parameter '
'set repeated for each APC.') # type: List[AntPhaseCenterType]
AntPattern = SerializableListDescriptor(
'AntPattern', AntPatternType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Parameter set that defines each Antenna Pattern as function time. Parameters '
'set repeated for each Antenna Pattern.') # type: List[AntPatternType]
def __init__(self, AntCoordFrame=None, AntPhaseCenter=None, AntPattern=None, **kwargs):
"""
Parameters
----------
AntCoordFrame : List[AntCoordFrameType]
AntPhaseCenter : List[AntPhaseCenterType]
AntPattern : List[AntPatternType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.AntCoordFrame = AntCoordFrame
self.AntPhaseCenter = AntPhaseCenter
self.AntPattern = AntPattern
super(AntennaType, self).__init__(**kwargs)
@property
def NumACFs(self):
"""
int: The number of antenna coordinate frame elements.
"""
if self.AntCoordFrame is None:
return 0
return len(self.AntCoordFrame)
@property
def NumAPCs(self):
"""
int: The number of antenna phase center elements.
"""
if self.AntPhaseCenter is None:
return 0
return len(self.AntPhaseCenter)
@property
def NumAntPats(self):
"""
int: The number of antenna pattern elements.
"""
if self.AntPattern is None:
return 0
return len(self.AntPattern)
def version_required(self) -> Tuple[int, int, int]:
required = (1, 0, 1)
if self.AntCoordFrame is not None:
for entry in self.AntCoordFrame:
required = max(required, entry.version_required())
return required
| 24,539 | 35.57228 | 108 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/ProductInfo.py | """
The ProductInfo elements.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, List
import numpy
from sarpy.io.xml.base import Serializable, ParametersCollection
from sarpy.io.xml.descriptors import StringDescriptor, DateTimeDescriptor, \
ParametersDescriptor, SerializableListDescriptor
from .base import DEFAULT_STRICT
class CreationInfoType(Serializable):
"""
Parameters that provide general information about the CPHD product generation.
"""
_fields = ('Application', 'DateTime', 'Site', 'Parameters')
_required = ('DateTime', )
_collections_tags = {'Parameters': {'array': False, 'child_tag': 'Parameter'}}
# descriptors
Application = StringDescriptor(
'Application', _required, strict=DEFAULT_STRICT,
docstring='Name and version of the application used to create the CPHD.') # type: str
DateTime = DateTimeDescriptor(
'DateTime', _required, strict=DEFAULT_STRICT, numpy_datetime_units='us',
docstring='Date and time the image creation application processed the image (UTC).') # type: numpy.datetime64
Site = StringDescriptor(
'Site', _required, strict=DEFAULT_STRICT,
docstring='The creation site of this CPHD product.') # type: str
Parameters = ParametersDescriptor(
'Parameters', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Additional parameters.') # type: Union[None, ParametersCollection]
def __init__(self, Application=None, DateTime=None, Site=None, Parameters=None, **kwargs):
"""
Parameters
----------
Application : str
DateTime : numpy.datetime64|datetime|date|str
Site : str
Profile : str
Parameters : None|ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Application = Application
self.DateTime = DateTime
self.Site = Site
self.Parameters = Parameters
super(CreationInfoType, self).__init__(**kwargs)
class ProductInfoType(Serializable):
"""
Parameters that provide general information about the CPHD product and/or the
derived products that may be created from it.
"""
_fields = ('Profile', 'CreationInfos', 'Parameters')
_required = ()
_collections_tags = {
'CreationInfos': {'array': False, 'child_tag': 'CreationInfo'},
'Parameters': {'array': False, 'child_tag': 'Parameter'}}
# descriptors
Profile = StringDescriptor(
'Profile', _required, strict=DEFAULT_STRICT,
docstring='Identifies what profile was used to create this CPHD product.') # type: str
CreationInfos = SerializableListDescriptor(
'CreationInfos', CreationInfoType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Parameters that provide general information about the CPHD '
'product generation.') # type: Union[None, List[CreationInfoType]]
Parameters = ParametersDescriptor(
'Parameters', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Additional parameters.') # type: Union[None, ParametersCollection]
def __init__(self, Profile=None, CreationInfos=None, Parameters=None, **kwargs):
"""
Parameters
----------
Profile : str
CreationInfos : None|List[CreationInfoType]
Parameters : None|ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Profile = Profile
self.CreationInfos = CreationInfos
self.Parameters = Parameters
super(ProductInfoType, self).__init__(**kwargs)
| 3,959 | 36.358491 | 118 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/Data.py | """
The DataType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import List
from .base import DEFAULT_STRICT
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import StringDescriptor, StringEnumDescriptor, \
IntegerDescriptor, SerializableListDescriptor
from .utils import binary_format_string_to_dtype
class ChannelSizeType(Serializable):
"""
Parameters that define the Channel signal array and PVP array size and location.
"""
_fields = (
'Identifier', 'NumVectors', 'NumSamples', 'SignalArrayByteOffset', 'PVPArrayByteOffset',
'CompressedSignalSize')
_required = (
'Identifier', 'NumVectors', 'NumSamples', 'SignalArrayByteOffset', 'PVPArrayByteOffset')
# descriptors
Identifier = StringDescriptor(
'Identifier', _required, strict=DEFAULT_STRICT,
docstring='String that uniquely identifies the CPHD channel for which the data '
'applies.') # type: str
NumVectors = IntegerDescriptor(
'NumVectors', _required, strict=DEFAULT_STRICT, bounds=(1, None),
docstring='Number of vectors in the signal array.') # type: int
NumSamples = IntegerDescriptor(
'NumSamples', _required, strict=DEFAULT_STRICT, bounds=(1, None),
docstring='Number of samples per vector in the signal array.') # type: int
SignalArrayByteOffset = IntegerDescriptor(
'SignalArrayByteOffset', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Signal Array offset from the start of the Signal block (in bytes) to the '
'start of the Signal Array for the channel.') # type: int
PVPArrayByteOffset = IntegerDescriptor(
'PVPArrayByteOffset', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='PVP Array offset from the start of the PVP block (in bytes) to the '
'start of the PVP Array for the channel.') # type: int
CompressedSignalSize = IntegerDescriptor(
'CompressedSignalSize', _required, strict=DEFAULT_STRICT, bounds=(1, None),
docstring='Size (in bytes) of the compressed signal array byte sequence for the data channel. '
'Parameter included if and only if the signal arrays are stored in '
'compressed format.') # type: int
def __init__(self, Identifier=None, NumVectors=None, NumSamples=None, SignalArrayByteOffset=None,
PVPArrayByteOffset=None, CompressedSignalSize=None, **kwargs):
"""
Parameters
----------
Identifier : str
NumVectors : int
NumSamples : int
SignalArrayByteOffset : int
PVPArrayByteOffset : int
CompressedSignalSize : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Identifier = Identifier
self.NumVectors = NumVectors
self.NumSamples = NumSamples
self.SignalArrayByteOffset = SignalArrayByteOffset
self.PVPArrayByteOffset = PVPArrayByteOffset
self.CompressedSignalSize = CompressedSignalSize
super(ChannelSizeType, self).__init__(**kwargs)
class SupportArraySizeType(Serializable):
"""
Support Array size parameters.
"""
_fields = ('Identifier', 'NumRows', 'NumCols', 'BytesPerElement', 'ArrayByteOffset')
_required = _fields
# descriptors
Identifier = StringDescriptor(
'Identifier', _required, strict=DEFAULT_STRICT,
docstring='Unique string that identifies this support array.') # type: str
NumRows = IntegerDescriptor(
'NumRows', _required, strict=DEFAULT_STRICT, bounds=(1, None),
docstring='Number of rows in the array.') # type: int
NumCols = IntegerDescriptor(
'NumCols', _required, strict=DEFAULT_STRICT, bounds=(1, None),
docstring='Number of columns per row in the array.') # type: int
BytesPerElement = IntegerDescriptor(
'BytesPerElement', _required, strict=DEFAULT_STRICT, bounds=(1, None),
docstring='Indicates the size in bytes of each data element in the support '
'array. Each element contains 1 or more binary-formatted '
'components.') # type: int
ArrayByteOffset = IntegerDescriptor(
'ArrayByteOffset', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Array offset from the start of the Support block (in bytes) to '
'the start of the support array.') # type: int
def __init__(self, Identifier=None, NumRows=None, NumCols=None, BytesPerElement=None,
ArrayByteOffset=None, **kwargs):
"""
Parameters
----------
Identifier : str
NumRows : int
NumCols : int
BytesPerElement : int
ArrayByteOffset : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Identifier = Identifier
self.NumRows = NumRows
self.NumCols = NumCols
self.BytesPerElement = BytesPerElement
self.ArrayByteOffset = ArrayByteOffset
super(SupportArraySizeType, self).__init__(**kwargs)
def calculate_size(self):
"""
Calculates the size of the support array in bytes as described by the contained fields.
"""
return self.BytesPerElement * self.NumRows * self.NumCols
class DataType(Serializable):
"""
Parameters that describe binary data components contained in the product.
"""
_fields = (
'SignalArrayFormat', 'NumBytesPVP', 'NumCPHDChannels',
'SignalCompressionID', 'Channels', 'NumSupportArrays', 'SupportArrays')
_required = ('SignalArrayFormat', 'NumBytesPVP', 'Channels')
_collections_tags = {
'Channels': {'array': False, 'child_tag': 'Channel'},
'SupportArrays': {'array': False, 'child_tag': 'SupportArray'}}
# descriptors
SignalArrayFormat = StringEnumDescriptor(
'SignalArrayFormat', ('CI2', 'CI4', 'CF8'), _required, strict=DEFAULT_STRICT,
docstring='Signal Array sample binary format of the CPHD signal arrays in standard '
'(i.e. uncompressed) format, where `CI2` denotes a 1 byte signed integer '
"parameter, 2's complement format, and 2 Bytes Per Sample; `CI4` denotes "
"a 2 byte signed integer parameter, 2's complement format, and "
"4 Bytes Per Sample; `CF8` denotes a 4 byte floating point parameter, and "
"8 Bytes Per Sample.") # type: str
NumBytesPVP = IntegerDescriptor(
'NumBytesPVP', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Number of bytes per set of Per Vector Parameters, where there is '
'one set of PVPs for each CPHD signal vector') # type: int
SignalCompressionID = StringDescriptor(
'SignalCompressionID', _required, strict=DEFAULT_STRICT,
docstring='Parameter that indicates the signal arrays are in compressed format. Value '
'identifies the method of decompression. Parameter included if and only if '
'the signal arrays are in compressed format.') # type: str
Channels = SerializableListDescriptor(
'Channels', ChannelSizeType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Parameters that define the Channel signal array and PVP array size '
'and location.') # type: List[ChannelSizeType]
SupportArrays = SerializableListDescriptor(
'SupportArrays', SupportArraySizeType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Support Array size parameters. Branch repeated for each binary support array. '
'Support Array referenced by its unique Support Array '
'identifier.') # type: List[SupportArraySizeType]
def __init__(self, SignalArrayFormat=None, NumBytesPVP=None,
SignalCompressionID=None, Channels=None, SupportArrays=None, **kwargs):
"""
Parameters
----------
SignalArrayFormat : str
NumBytesPVP : int
SignalCompressionID : None|str
Channels : List[ChannelSizeType]
SupportArrays : None|List[SupportArraySizeType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.SignalArrayFormat = SignalArrayFormat
self.NumBytesPVP = NumBytesPVP
self.SignalCompressionID = SignalCompressionID
self.Channels = Channels
self.SupportArrays = SupportArrays
super(DataType, self).__init__(**kwargs)
@property
def NumSupportArrays(self):
"""
int: The number of support arrays.
"""
if self.SupportArrays is None:
return 0
else:
return len(self.SupportArrays)
@property
def NumCPHDChannels(self):
"""
int: The number of CPHD channels.
"""
if self.Channels is None:
return 0
else:
return len(self.Channels)
def calculate_support_block_size(self):
"""
Calculates the size of the support block in bytes as described by the SupportArray fields.
"""
return sum([s.calculate_size() for s in self.SupportArrays])
def calculate_pvp_block_size(self):
"""
Calculates the size of the PVP block in bytes as described by the Data fields.
"""
return self.NumBytesPVP * sum([c.NumVectors for c in self.Channels])
def calculate_signal_block_size(self):
"""
Calculates the size of the signal block in bytes as described by the Data fields.
"""
if self.SignalCompressionID is not None:
return sum([c.CompressedSignalSize for c in self.Channels])
else:
num_bytes_per_sample = binary_format_string_to_dtype(self.SignalArrayFormat).itemsize
return num_bytes_per_sample * sum([c.NumVectors * c.NumSamples for c in self.Channels])
| 10,417 | 41.008065 | 103 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/ErrorParameters.py | """
The error parameters type definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, Tuple, Optional
from sarpy.io.xml.base import Serializable, ParametersCollection
from sarpy.io.xml.descriptors import FloatDescriptor, SerializableDescriptor, \
ParametersDescriptor
from sarpy.io.complex.sicd_elements.blocks import ErrorDecorrFuncType
from sarpy.io.complex.sicd_elements.ErrorStatistics import PosVelErrType, TropoErrorType
from .base import DEFAULT_STRICT, FLOAT_FORMAT
class RadarSensorType(Serializable):
"""
Radar sensor error statistics.
"""
_fields = ('RangeBias', 'ClockFreqSF', 'CollectionStartTime', 'RangeBiasDecorr')
_required = ('RangeBias', )
_numeric_format = {'RangeBias': FLOAT_FORMAT, 'ClockFreqSF': FLOAT_FORMAT, 'CollectionStartTime': FLOAT_FORMAT}
# descriptors
RangeBias = FloatDescriptor(
'RangeBias', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Range bias error standard deviation.') # type: float
ClockFreqSF = FloatDescriptor(
'ClockFreqSF', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Payload clock frequency scale factor standard deviation, '
r'where :math:`SF = (\Delta f)/f_0`.') # type: float
CollectionStartTime = FloatDescriptor(
'CollectionStartTime', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Collection Start time error standard deviation, '
'in seconds.') # type: float
RangeBiasDecorr = SerializableDescriptor(
'RangeBiasDecorr', ErrorDecorrFuncType, _required, strict=DEFAULT_STRICT,
docstring='Range Bias error decorrelation function.') # type: ErrorDecorrFuncType
def __init__(self, RangeBias=None, ClockFreqSF=None, CollectionStartTime=None,
RangeBiasDecorr=None, **kwargs):
"""
Parameters
----------
RangeBias : float
ClockFreqSF : float
CollectionStartTime : float
RangeBiasDecorr : ErrorDecorrFuncType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.RangeBias = RangeBias
self.ClockFreqSF = ClockFreqSF
self.CollectionStartTime = CollectionStartTime
self.RangeBiasDecorr = RangeBiasDecorr
super(RadarSensorType, self).__init__(**kwargs)
class IonoErrorType(Serializable):
"""
Ionosphere delay error statistics.
"""
_fields = ('IonoRangeVertical', 'IonoRangeRateVertical', 'IonoRgRgRateCC', 'IonoRangeVertDecorr')
_required = ('IonoRgRgRateCC', )
_numeric_format = {'IonoRangeVertical': FLOAT_FORMAT, 'IonoRangeRateVertical': FLOAT_FORMAT, 'IonoRgRgRateCC': FLOAT_FORMAT}
# descriptors
IonoRangeVertical = FloatDescriptor(
'IonoRangeVertical', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Ionosphere two-way delay error for normal incidence standard deviation. '
r'Expressed as a range error. :math:`(\Delta R) = (\Delta T) \cdot (c/2)`.') # type: float
IonoRangeRateVertical = FloatDescriptor(
'IonoRangeRateVertical', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Ionosphere two-way delay rate of change error for normal incidence standard deviation. '
r'Expressed as a range rate error. :math:`\dot{R} = \Delta \dot{TD_Iono} \times c/2`.') # type: float
IonoRgRgRateCC = FloatDescriptor(
'IonoRgRgRateCC', _required, strict=DEFAULT_STRICT, bounds=(-1, 1),
docstring='Ionosphere range error and range rate error correlation coefficient.') # type: float
IonoRangeVertDecorr = SerializableDescriptor(
'IonoRangeVertDecorr', ErrorDecorrFuncType, _required, strict=DEFAULT_STRICT,
docstring='Ionosphere range error decorrelation function.') # type: ErrorDecorrFuncType
def __init__(self, IonoRangeVertical=None, IonoRangeRateVertical=None,
IonoRgRgRateCC=None, IonoRangeVertDecorr=None, **kwargs):
"""
Parameters
----------
IonoRangeVertical : float
IonoRangeRateVertical : float
IonoRgRgRateCC : float
IonoRangeVertDecorr : ErrorDecorrFuncType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.IonoRangeVertical = IonoRangeVertical
self.IonoRangeRateVertical = IonoRangeRateVertical
self.IonoRgRgRateCC = IonoRgRgRateCC
self.IonoRangeVertDecorr = IonoRangeVertDecorr
super(IonoErrorType, self).__init__(**kwargs)
class BistaticRadarSensorType(Serializable):
"""
Error statistics for a single radar platform.
"""
_fields = ('DelayBias', 'ClockFreqSF', 'CollectionStartTime')
_required = ('CollectionStartTime', )
_numeric_format = {
'DelayBias': FLOAT_FORMAT, 'ClockFreqSF': FLOAT_FORMAT,
'CollectionStartTime': FLOAT_FORMAT}
# descriptors
DelayBias = FloatDescriptor(
'DelayBias', _required, strict=DEFAULT_STRICT,
docstring='') # type: Optional[float]
ClockFreqSF = FloatDescriptor(
'ClockFreqSF', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Payload clock frequency scale factor standard deviation, '
r'where :math:`SF = (\Delta f)/f_0`.') # type: Optional[float]
CollectionStartTime = FloatDescriptor(
'CollectionStartTime', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Collection Start time error standard deviation, '
'in seconds.') # type: float
def __init__(self, DelayBias=None, ClockFreqSF=None, CollectionStartTime=None, **kwargs):
"""
Parameters
----------
DelayBias : None|float
ClockFreqSF : None|float
CollectionStartTime : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.DelayBias = DelayBias
self.ClockFreqSF = ClockFreqSF
self.CollectionStartTime = CollectionStartTime
super(BistaticRadarSensorType, self).__init__(**kwargs)
def version_required(self) -> Tuple[int, int, int]:
required = (1, 0, 1)
if self.DelayBias is not None:
required = max(required, (1, 1, 0))
return required
class MonostaticType(Serializable):
"""
Error parameters for monstatic collection.
"""
_fields = ('PosVelErr', 'RadarSensor', 'TropoError', 'IonoError', 'AddedParameters')
_required = ('PosVelErr', 'RadarSensor')
_collections_tags = {'AddedParameters': {'array': False, 'child_tag': 'Parameter'}}
# descriptors
PosVelErr = SerializableDescriptor(
'PosVelErr', PosVelErrType, _required, strict=DEFAULT_STRICT,
docstring='Position and velocity error statistics for the sensor '
'platform.') # type: PosVelErrType
RadarSensor = SerializableDescriptor(
'RadarSensor', RadarSensorType, _required, strict=DEFAULT_STRICT,
docstring='Radar sensor error statistics.') # type: RadarSensorType
TropoError = SerializableDescriptor(
'TropoError', TropoErrorType, _required, strict=DEFAULT_STRICT,
docstring='Troposphere delay error statistics.') # type: TropoErrorType
IonoError = SerializableDescriptor(
'IonoError', IonoErrorType, _required, strict=DEFAULT_STRICT,
docstring='Ionosphere delay error statistics.') # type: IonoErrorType
AddedParameters = ParametersDescriptor(
'AddedParameters', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Additional error parameters.') # type: ParametersCollection
def __init__(self, PosVelErr=None, RadarSensor=None, TropoError=None, IonoError=None,
AddedParameters=None, **kwargs):
"""
Parameters
----------
PosVelErr : PosVelErrType
RadarSensor : RadarSensorType
TropoError : None|TropoErrorType
IonoError : None|IonoErrorType
AddedParameters : None|ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.PosVelErr = PosVelErr
self.RadarSensor = RadarSensor
self.TropoError = TropoError
self.IonoError = IonoError
self.AddedParameters = AddedParameters
super(MonostaticType, self).__init__(PosVelErr=PosVelErr, RadarSensor=RadarSensor, **kwargs)
class PlatformType(Serializable):
"""
Basic bistatic platform error type definition.
"""
_fields = ('PosVelErr', 'RadarSensor')
_required = _fields
# descriptors
PosVelErr = SerializableDescriptor(
'PosVelErr', PosVelErrType, _required, strict=DEFAULT_STRICT,
docstring='Position and velocity error statistics for the sensor '
'platform.') # type: PosVelErrType
RadarSensor = SerializableDescriptor(
'RadarSensor', BistaticRadarSensorType, _required, strict=DEFAULT_STRICT,
docstring='Platform sensor error statistics.') # type: BistaticRadarSensorType
def __init__(self, PosVelErr=None, RadarSensor=None, **kwargs):
"""
Parameters
----------
PosVelErr : PosVelErrType
RadarSensor : BistaticRadarSensorType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.PosVelErr = PosVelErr
self.RadarSensor = RadarSensor
super(PlatformType, self).__init__(**kwargs)
def version_required(self) -> Tuple[int, int, int]:
required = (1, 0, 1)
if self.RadarSensor is not None:
required = max(required, self.RadarSensor.version_required())
return required
class BistaticType(Serializable):
"""
Error parameters for bistatic parameters.
"""
_fields = ('TxPlatform', 'RcvPlatform', 'AddedParameters')
_required = ('TxPlatform', )
_collections_tags = {'AddedParameters': {'array': False, 'child_tag': 'Parameter'}}
# descriptors
TxPlatform = SerializableDescriptor(
'TxPlatform', PlatformType, _required, strict=DEFAULT_STRICT,
docstring='Error statistics for the transmit platform.') # type: PlatformType
RcvPlatform = SerializableDescriptor(
'RcvPlatform', PlatformType, _required, strict=DEFAULT_STRICT,
docstring='Error statistics for the receive platform.') # type: PlatformType
AddedParameters = ParametersDescriptor(
'AddedParameters', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Additional error parameters.') # type: ParametersCollection
def __init__(self, TxPlatform=None, RcvPlatform=None, AddedParameters=None, **kwargs):
"""
Parameters
----------
TxPlatform : PlatformType
RcvPlatform : PlatformType
AddedParameters : None|ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TxPlatform = TxPlatform
self.RcvPlatform = RcvPlatform
self.AddedParameters = AddedParameters
super(BistaticType, self).__init__(**kwargs)
def version_required(self) -> Tuple[int, int, int]:
required = (1, 0, 1)
for fld in ['TxPlatform', 'RcvPlatform']:
val = getattr(self, fld)
if val is not None:
required = max(required, val.version_required())
return required
class ErrorParametersType(Serializable):
"""
Parameters that describe the statistics of errors in measured or estimated
parameters that describe the collection.
"""
_fields = ('Monostatic', 'Bistatic')
_required = ()
_choice = ({'required': True, 'collection': _fields}, )
# descriptors
Monostatic = SerializableDescriptor(
'Monostatic', MonostaticType, _required, strict=DEFAULT_STRICT,
docstring='The monstatic parameters.') # type: Union[None, MonostaticType]
Bistatic = SerializableDescriptor(
'Bistatic', BistaticType, _required, strict=DEFAULT_STRICT,
docstring='The bistatic parameters.') # type: Union[None, BistaticType]
def __init__(self, Monostatic=None, Bistatic=None, **kwargs):
"""
Parameters
----------
Monostatic : None|MonostaticType
Bistatic : None|BistaticType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Monostatic = Monostatic
self.Bistatic = Bistatic
super(ErrorParametersType, self).__init__(**kwargs)
def version_required(self) -> Tuple[int, int, int]:
required = (1, 0, 1)
if self.Bistatic is not None:
required = max(required, self.Bistatic.version_required())
return required
| 13,623 | 38.375723 | 128 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/CollectionID.py | """
The CollectionIDType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from .base import DEFAULT_STRICT
from sarpy.io.complex.sicd_elements.CollectionInfo import CollectionInfoType
from sarpy.io.xml.descriptors import StringDescriptor
class CollectionIDType(CollectionInfoType):
"""
The CollectionID type definition.
"""
_fields = (
'CollectorName', 'IlluminatorName', 'CoreName', 'CollectType',
'RadarMode', 'Classification', 'ReleaseInfo', 'Parameters', 'CountryCodes')
_required = ('CollectorName', 'CoreName', 'CollectType', 'RadarMode', 'Classification', 'ReleaseInfo')
# descriptors
ReleaseInfo = StringDescriptor(
'ReleaseInfo', _required, strict=DEFAULT_STRICT, default_value='UNRESTRICTED',
docstring='The product release information.') # type: str
def __init__(self, CollectorName=None, IlluminatorName=None, CoreName=None, CollectType=None,
RadarMode=None, Classification="UNCLASSIFIED", ReleaseInfo='UNRESTRICTED',
CountryCodes=None, Parameters=None, **kwargs):
"""
Parameters
----------
CollectorName : str
IlluminatorName : str
CoreName : str
CollectType : str
RadarMode : RadarModeType
Classification : str
ReleaseInfo : str
CountryCodes : list|str
Parameters : ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.ReleaseInfo = ReleaseInfo
super(CollectionIDType, self).__init__(
CollectorName=CollectorName, IlluminatorName=IlluminatorName, CoreName=CoreName,
CollectType=CollectType, RadarMode=RadarMode, Classification=Classification,
CountryCodes=CountryCodes, Parameters=Parameters, **kwargs)
| 1,979 | 35 | 106 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/utils.py | """
Common utils for CPHD 1.0 functionality.
"""
import numpy
__classification__ = "UNCLASSIFIED"
__author__ = ("Thomas McCullough", "Daniel Pressler, Valkyrie")
#########
# Module variables
_DTYPE_LOOKUP = {
"U1": numpy.dtype('>u1'),
"U2": numpy.dtype('>u2'),
"U4": numpy.dtype('>u4'),
"U8": numpy.dtype('>u8'),
"I1": numpy.dtype('>i1'),
"I2": numpy.dtype('>i2'),
"I4": numpy.dtype('>i4'),
"I8": numpy.dtype('>i8'),
"F4": numpy.dtype('>f4'),
"F8": numpy.dtype('>f8'),
"CI2": numpy.dtype([('real', '>i1'), ('imag', '>i1')]),
"CI4": numpy.dtype([('real', '>i2'), ('imag', '>i2')]),
"CI8": numpy.dtype([('real', '>i4'), ('imag', '>i4')]),
"CI16": numpy.dtype([('real', '>i8'), ('imag', '>i8')]),
"CF8": numpy.dtype('>c8'),
"CF16": numpy.dtype('>c16')}
def _single_binary_format_string_to_dtype(form):
"""
Convert a CPHD datatype into a dtype.
Parameters
----------
form
Returns
-------
numpy.dtype
"""
if form.startswith('S'):
return numpy.dtype(form)
else:
return _DTYPE_LOOKUP[form]
def binary_format_string_to_dtype(format_string):
"""
Return the numpy.dtype for CPHD Binary Format string (table 10-2).
Parameters
----
format_string: str
PVP type designator (e.g., :code:`'I1', 'I4', 'CF8'`, etc.).
Returns
-------
numpy.dtype
The equivalent `numpy.dtype` of the PVP format string
(e.g., :code:`numpy.int8, numpy.int32, numpy.complex64`, etc.).
"""
components = format_string.split(';')
if '=' in components[0]:
assert format_string.endswith(';'), 'Format strings describing multiple parameters must end with a semi-colon'
comptypes = []
for comp in components[:-1]:
kvp = comp.split('=')
comptypes.append((kvp[0], _single_binary_format_string_to_dtype(kvp[1])))
keys, types = list(zip(*comptypes))
if keys == ('X', 'Y', 'Z') and len(set(types)) == 1:
# special handling of XYZ types
dtype = numpy.dtype((comptypes[0][1], 3))
elif keys == ('DCX', 'DCY') and len(set(types)) == 1:
# special handling of DCX/DCY types
dtype = numpy.dtype((comptypes[0][1], 2))
else:
dtype = numpy.dtype(comptypes)
else:
dtype = _single_binary_format_string_to_dtype(components[0])
return dtype
def homogeneous_dtype(format_string, return_length=False):
"""
Determine a numpy.dtype (including endianness) from a CPHD format string, requiring
that any multiple parts are all identical.
Parameters
----------
format_string : str
return_length : bool
Return the number of elements?
Returns
-------
numpy.dtype|(numpy.dtype, int)
Tuple of (`numpy.dtype`, # of elements) if `return_length`, `numpy.dtype` otherwise
"""
raw_dtype = binary_format_string_to_dtype(format_string)
if raw_dtype.names is None:
# Simple or subarray
dtype = raw_dtype.base
num_elements = max(1, sum(raw_dtype.shape))
else:
# Structured
dtype_set = {v[0] for v in raw_dtype.fields.values()}
if len(dtype_set) == 1:
dtype = dtype_set.pop()
num_elements = len(raw_dtype.names)
else:
raise ValueError("Format string {} was heterogeneous (dtype_set={})".format(format_string, dtype_set))
if return_length:
return dtype, num_elements
else:
return dtype
| 3,556 | 27.456 | 118 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/PVP.py | """
The Per Vector parameters (PVP) definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, List, Tuple, Optional
import numpy
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import StringDescriptor, IntegerDescriptor, \
SerializableDescriptor, SerializableListDescriptor
from .utils import binary_format_string_to_dtype, homogeneous_dtype
from .base import DEFAULT_STRICT
class PerVectorParameterI8(Serializable):
_fields = ('Offset', 'Size', 'Format')
_required = ('Offset', )
# descriptors
Offset = IntegerDescriptor(
'Offset', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='The offset value.') # type: int
def __init__(self, Offset=None, **kwargs):
"""
Parameters
----------
Offset : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Offset = Offset
super(PerVectorParameterI8, self).__init__(**kwargs)
@property
def Size(self):
"""
int: The size of the vector, constant value 1 here.
"""
return 1
@property
def Format(self):
"""
str: The format of the vector data, constant value 'I8' here.
"""
return 'I8'
class PerVectorParameterF8(Serializable):
_fields = ('Offset', 'Size', 'Format')
_required = ('Offset', )
# descriptors
Offset = IntegerDescriptor(
'Offset', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='The offset value.') # type: int
def __init__(self, Offset=None, **kwargs):
"""
Parameters
----------
Offset : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Offset = Offset
super(PerVectorParameterF8, self).__init__(**kwargs)
@property
def Size(self):
"""
int: The size of the vector, constant value 1 here.
"""
return 1
@property
def Format(self):
"""
str: The format of the vector data, constant value 'F8' here.
"""
return 'F8'
class PerVectorParameterXYZ(Serializable):
_fields = ('Offset', 'Size', 'Format')
_required = ('Offset', )
# descriptors
Offset = IntegerDescriptor(
'Offset', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='The offset value.') # type: int
def __init__(self, Offset=None, **kwargs):
"""
Parameters
----------
Offset : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Offset = Offset
super(PerVectorParameterXYZ, self).__init__(**kwargs)
@property
def Size(self):
"""
int: The size of the vector, constant value 3 here.
"""
return 3
@property
def Format(self):
"""
str: The format of the vector data, constant value 'X=F8;Y=F8;Z=F8;' here.
"""
return 'X=F8;Y=F8;Z=F8;'
class PerVectorParameterEB(Serializable):
_fields = ('Offset', 'Size', 'Format')
_required = ('Offset', )
# descriptors
Offset = IntegerDescriptor(
'Offset', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='The offset value.') # type: int
def __init__(self, Offset=None, **kwargs):
"""
Parameters
----------
Offset : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Offset = Offset
super(PerVectorParameterEB, self).__init__(**kwargs)
@property
def Size(self):
"""
int: The size of the vector, constant value 2 here.
"""
return 2
@property
def Format(self):
"""
str: The format of the vector data, constant value 'DCX=F8;DCY=F8;' here.
"""
return 'DCX=F8;DCY=F8;'
class UserDefinedPVPType(Serializable):
"""
A user defined PVP structure.
"""
_fields = ('Name', 'Offset', 'Size', 'Format')
_required = _fields
# descriptors
Name = StringDescriptor(
'Name', _required, strict=DEFAULT_STRICT,
docstring='') # type: str
Offset = IntegerDescriptor(
'Offset', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='') # type: int
Size = IntegerDescriptor(
'Size', _required, strict=DEFAULT_STRICT, bounds=(1, None),
docstring='') # type: int
Format = StringDescriptor(
'Format', _required, strict=DEFAULT_STRICT,
docstring='') # type: str
def __init__(self, Name=None, Offset=None, Size=None, Format=None, **kwargs):
"""
Parameters
----------
Name : str
Offset : int
Size : int
Format : str
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Name = Name
self.Offset = Offset
self.Size = Size
self.Format = Format
super(UserDefinedPVPType, self).__init__(**kwargs)
class TxAntennaType(Serializable):
_fields = ('TxACX', 'TxACY', 'TxEB')
_required = _fields
TxACX = SerializableDescriptor(
'TxACX', PerVectorParameterXYZ, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterXYZ
TxACY = SerializableDescriptor(
'TxACY', PerVectorParameterXYZ, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterXYZ
TxEB = SerializableDescriptor(
'TxEB', PerVectorParameterEB, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterEB
def __init__(
self,
TxACX: PerVectorParameterXYZ = None,
TxACY: PerVectorParameterXYZ = None,
TxEB: PerVectorParameterEB = None,
**kwargs):
"""
Parameters
----------
TxACX : PerVectorParameterXYZ
TxACY : PerVectorParameterXYZ
TxEB : PerVectorParameterEB
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TxACX = TxACX
self.TxACY = TxACY
self.TxEB = TxEB
super(TxAntennaType, self).__init__(**kwargs)
class RcvAntennaType(Serializable):
_fields = ('RcvACX', 'RcvACY', 'RcvEB')
_required = _fields
RcvACX = SerializableDescriptor(
'RcvACX', PerVectorParameterXYZ, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterXYZ
RcvACY = SerializableDescriptor(
'RcvACY', PerVectorParameterXYZ, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterXYZ
RcvEB = SerializableDescriptor(
'RcvEB', PerVectorParameterEB, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterEB
def __init__(
self,
RcvACX: Optional[PerVectorParameterXYZ] = None,
RcvACY: Optional[PerVectorParameterXYZ] = None,
RcvEB: Optional[PerVectorParameterEB] = None,
**kwargs):
"""
Parameters
----------
RcvACX : PerVectorParameterXYZ
RcvACY : PerVectorParameterXYZ
RcvEB : PerVectorParameterEB
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.RcvACX = RcvACX
self.RcvACY = RcvACY
self.RcvEB = RcvEB
super(RcvAntennaType, self).__init__(**kwargs)
class PVPType(Serializable):
_fields = (
'TxTime', 'TxPos', 'TxVel', 'RcvTime', 'RcvPos', 'RcvVel',
'SRPPos', 'AmpSF', 'aFDOP', 'aFRR1', 'aFRR2', 'FX1', 'FX2',
'FXN1', 'FXN2', 'TOA1', 'TOA2', 'TOAE1', 'TOAE2', 'TDTropoSRP',
'TDIonoSRP', 'SC0', 'SCSS', 'SIGNAL',
'TxAntenna', 'RcvAntenna', 'AddedPVP')
_required = (
'TxTime', 'TxPos', 'TxVel', 'RcvTime', 'RcvPos', 'RcvVel',
'SRPPos', 'aFDOP', 'aFRR1', 'aFRR2', 'FX1', 'FX2',
'TOA1', 'TOA2', 'TDTropoSRP', 'SC0', 'SCSS')
_collections_tags = {'AddedPVP': {'array': False, 'child_tag': 'AddedPVP'}}
# descriptors
TxTime = SerializableDescriptor(
'TxTime', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
TxPos = SerializableDescriptor(
'TxPos', PerVectorParameterXYZ, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterXYZ
TxVel = SerializableDescriptor(
'TxVel', PerVectorParameterXYZ, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterXYZ
RcvTime = SerializableDescriptor(
'RcvTime', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
RcvPos = SerializableDescriptor(
'RcvPos', PerVectorParameterXYZ, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterXYZ
RcvVel = SerializableDescriptor(
'RcvVel', PerVectorParameterXYZ, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterXYZ
SRPPos = SerializableDescriptor(
'SRPPos', PerVectorParameterXYZ, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterXYZ
AmpSF = SerializableDescriptor(
'AmpSF', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
aFDOP = SerializableDescriptor(
'aFDOP', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
aFRR1 = SerializableDescriptor(
'aFRR1', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
aFRR2 = SerializableDescriptor(
'aFRR2', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
FX1 = SerializableDescriptor(
'FX1', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
FX2 = SerializableDescriptor(
'FX2', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
FXN1 = SerializableDescriptor(
'FXN1', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
FXN2 = SerializableDescriptor(
'FXN2', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
TOA1 = SerializableDescriptor(
'TOA1', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
TOA2 = SerializableDescriptor(
'TOA2', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
TOAE1 = SerializableDescriptor(
'TOAE1', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
TOAE2 = SerializableDescriptor(
'TOAE2', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
TDTropoSRP = SerializableDescriptor(
'TDTropoSRP', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
TDIonoSRP = SerializableDescriptor(
'TDIonoSRP', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
SC0 = SerializableDescriptor(
'SC0', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
SCSS = SerializableDescriptor(
'SCSS', PerVectorParameterF8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterF8
SIGNAL = SerializableDescriptor(
'SIGNAL', PerVectorParameterI8, _required, strict=DEFAULT_STRICT,
docstring='') # type: PerVectorParameterI8
TxAntenna = SerializableDescriptor(
'TxAntenna', TxAntennaType, _required, strict=DEFAULT_STRICT,
docstring='') # type: Optional[TxAntennaType]
RcvAntenna = SerializableDescriptor(
'RcvAntenna', RcvAntennaType, _required, strict=DEFAULT_STRICT,
docstring='') # type: Optional[RcvAntennaType]
AddedPVP = SerializableListDescriptor(
'AddedPVP', UserDefinedPVPType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, List[UserDefinedPVPType]]
def __init__(
self,
TxTime: PerVectorParameterF8 = None,
TxPos: PerVectorParameterXYZ = None,
TxVel: PerVectorParameterXYZ = None,
RcvTime: PerVectorParameterF8 = None,
RcvPos: PerVectorParameterXYZ = None,
RcvVel: PerVectorParameterXYZ = None,
SRPPos: PerVectorParameterXYZ = None,
AmpSF: Optional[PerVectorParameterF8] = None,
aFDOP: PerVectorParameterF8 = None,
aFRR1: PerVectorParameterF8 = None,
aFRR2: PerVectorParameterF8 = None,
FX1: PerVectorParameterF8 = None,
FX2: PerVectorParameterF8 = None,
FXN1: Optional[PerVectorParameterF8] = None,
FXN2: Optional[PerVectorParameterF8] = None,
TOA1: PerVectorParameterF8 = None,
TOA2: PerVectorParameterF8 = None,
TOAE1: Optional[PerVectorParameterF8] = None,
TOAE2: Optional[PerVectorParameterF8] = None,
TDTropoSRP: PerVectorParameterF8 = None,
TDIonoSRP: Optional[PerVectorParameterF8] = None,
SC0: PerVectorParameterF8 = None,
SCSS: PerVectorParameterF8 = None,
SIGNAL: Optional[PerVectorParameterI8] = None,
TxAntenna: Optional[TxAntennaType] = None,
RcvAntenna: Optional[RcvAntennaType] = None,
AddedPVP: Optional[List[UserDefinedPVPType]] = None,
**kwargs):
"""
Parameters
----------
TxTime : PerVectorParameterF8
TxPos : PerVectorParameterXYZ
TxVel : PerVectorParameterXYZ
RcvTime : PerVectorParameterF8
RcvPos : PerVectorParameterXYZ
RcvVel : PerVectorParameterXYZ
SRPPos : PerVectorParameterXYZ
AmpSF : None|PerVectorParameterF8
aFDOP : PerVectorParameterF8
aFRR1 : PerVectorParameterF8
aFRR2 : PerVectorParameterF8
FX1 : PerVectorParameterF8
FX2 : PerVectorParameterF8
FXN1 : None|PerVectorParameterF8
FXN2 : None|PerVectorParameterF8
TOA1 : PerVectorParameterF8
TOA2 : PerVectorParameterF8
TOAE1 : None|PerVectorParameterF8
TOAE2 : None|PerVectorParameterF8
TDTropoSRP : PerVectorParameterF8
TDIonoSRP : None|PerVectorParameterF8
SC0 : PerVectorParameterF8
SCSS : PerVectorParameterF8
SIGNAL : None|PerVectorParameterI8
TxAntenna : None|TxAntennaType
RcvAntenna : None|RcvAntennaType
AddedPVP : None|List[UserDefinedPVPType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TxTime = TxTime
self.TxPos = TxPos
self.TxVel = TxVel
self.RcvTime = RcvTime
self.RcvPos = RcvPos
self.RcvVel = RcvVel
self.SRPPos = SRPPos
self.AmpSF = AmpSF
self.aFDOP = aFDOP
self.aFRR1 = aFRR1
self.aFRR2 = aFRR2
self.FX1 = FX1
self.FX2 = FX2
self.FXN1 = FXN1
self.FXN2 = FXN2
self.TOA1 = TOA1
self.TOA2 = TOA2
self.TOAE1 = TOAE1
self.TOAE2 = TOAE2
self.TDTropoSRP = TDTropoSRP
self.TDIonoSRP = TDIonoSRP
self.SC0 = SC0
self.SCSS = SCSS
self.SIGNAL = SIGNAL
self.TxAntenna = TxAntenna
self.RcvAntenna = RcvAntenna
self.AddedPVP = AddedPVP
super(PVPType, self).__init__(**kwargs)
def get_size(self):
"""
Gets the size in bytes of each vector.
Returns
-------
int
"""
out = 0
for fld in self._fields[:-3]:
val = getattr(self, fld)
if val is not None:
out += val.Size*8
for fld in ['TxAntenna', 'RcvAntenna']:
val = getattr(self, fld)
assert(isinstance(val, TxAntennaType))
if val is not None:
out += (3 + 3 + 2)*8
if self.AddedPVP is not None:
for entry in self.AddedPVP:
out += entry.Size*8
return out
def get_offset_size_format(self, field):
"""
Get the Offset (in bytes), Size (in bytes) for the given field,
as well as the corresponding struct format string.
Parameters
----------
field : str
The desired field name.
Returns
-------
None|Tuple[int, int, str]
"""
def get_return(the_val) -> Union[None, Tuple[int, int, str]]:
if the_val is None:
return None
return the_val.Offset*8, the_val.Size*8, homogeneous_dtype(the_val.Format).char
if field in self._fields[:-3]:
return get_return(getattr(self, field))
elif field in ['TxACX', 'TxACY', 'TxEB']:
if self.TxAntenna is None:
return None
else:
return get_return(getattr(self.TxAntenna, field))
elif field in ['RcvACX', 'RcvACY', 'RcvEB']:
if self.RcvAntenna is None:
return None
else:
return get_return(getattr(self.RcvAntenna, field))
else:
if self.AddedPVP is None:
return None
for val in self.AddedPVP:
if field == val.Name:
return get_return(val)
return None
def get_vector_dtype(self):
"""
Gets the dtype for the corresponding structured array for the full PVP
array.
Returns
-------
numpy.dtype
This will be a compound dtype for a structured array.
"""
bytes_per_word = 8
names = []
formats = []
offsets = []
for fld in self._fields:
val = getattr(self, fld)
if val is None:
continue
elif fld == 'TxAntenna':
for t_fld in ['TxACX', 'TxACY', 'TxEB']:
t_val = getattr(val, t_fld)
names.append(t_fld)
formats.append(binary_format_string_to_dtype(t_val.Format))
offsets.append(t_val.Offset*bytes_per_word)
elif fld == 'RcvAntenna':
for t_fld in ['RcvACX', 'RcvACY', 'RcvEB']:
t_val = getattr(val, t_fld)
names.append(t_fld)
formats.append(binary_format_string_to_dtype(t_val.Format))
offsets.append(t_val.Offset*bytes_per_word)
elif fld == 'AddedPVP':
for entry in val:
assert isinstance(entry, UserDefinedPVPType)
names.append(entry.Name)
formats.append(binary_format_string_to_dtype(entry.Format))
offsets.append(entry.Offset*bytes_per_word)
else:
names.append(fld)
formats.append(binary_format_string_to_dtype(val.Format))
offsets.append(val.Offset*bytes_per_word)
return numpy.dtype({'names': names, 'formats': formats, 'offsets': offsets})
def version_required(self) -> Tuple[int, int, int]:
required = (1, 0, 1)
if self.TxAntenna is not None or self.RcvAntenna is not None:
required = max(required, (1, 1, 0))
return required
| 20,842 | 33.394389 | 92 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/Channel.py | """
The Channel definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, List, Tuple, Optional
import numpy
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from .blocks import POLARIZATION_TYPE, AreaType
from sarpy.io.xml.base import Serializable, SerializableArray, ParametersCollection, \
Arrayable
from sarpy.io.xml.descriptors import StringDescriptor, StringEnumDescriptor, StringListDescriptor, \
IntegerDescriptor, FloatDescriptor, BooleanDescriptor, ParametersDescriptor, \
SerializableDescriptor, SerializableListDescriptor, SerializableArrayDescriptor
class PolarizationRefType(Serializable, Arrayable):
"""
Polarization reference type.
"""
_fields = ('AmpH', 'AmpV', 'PhaseV')
_required = _fields
_numeric_format = {key: '0.17E' for key in _fields}
AmpH = FloatDescriptor(
'AmpH', _required, strict=DEFAULT_STRICT, bounds=(0.0, 1.0),
docstring='E-field relative amplitude in H direction') # type: float
AmpV = FloatDescriptor(
'AmpV', _required, strict=DEFAULT_STRICT, bounds=(0.0, 1.0),
docstring='E-field relative amplitude in V direction') # type: float
PhaseV = FloatDescriptor(
'PhaseV', _required, strict=DEFAULT_STRICT, bounds=(-0.5, 0.5),
docstring='Relative phase of the V E-field '
'relative to the H E-field') # type: float
def __init__(
self,
AmpH: float = None,
AmpV: float = None,
PhaseV: float = None,
**kwargs):
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.AmpH = AmpH
self.AmpV = AmpV
self.PhaseV = PhaseV
super(PolarizationRefType, self).__init__(**kwargs)
def get_array(self, dtype=numpy.float64) -> numpy.ndarray:
"""
Gets an array representation of the class instance.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return
Returns
-------
numpy.ndarray
array of the form [AmpH, AmpV, PhaseV]
"""
return numpy.array([self.AmpH, self.AmpV, self.PhaseV], dtype=dtype)
@classmethod
def from_array(cls, array: numpy.ndarray):
"""
Construct from a iterable.
Parameters
----------
array : numpy.ndarray|list|tuple
Returns
-------
PolarizationRefType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 3:
raise ValueError(
'Expected array to be of length 3,\n\t'
'and received `{}`'.format(array))
return cls(AmpH=array[0], AmpV=array[1], PhaseV=array[2])
raise ValueError(
'Expected array to be numpy.ndarray, list, or tuple,\n\t'
'got `{}`'.format(type(array)))
class PolarizationType(Serializable):
"""
Polarization(s) of the signals that formed the signal array.
"""
_fields = ('TxPol', 'RcvPol', 'TxPolRef', 'RcvPolRef')
_required = ('TxPol', 'RcvPol')
# descriptors
TxPol = StringEnumDescriptor(
'TxPol', POLARIZATION_TYPE, _required, strict=DEFAULT_STRICT,
docstring='Transmitted signal polarization for the channel.') # type: str
RcvPol = StringEnumDescriptor(
'RcvPol', POLARIZATION_TYPE, _required, strict=DEFAULT_STRICT,
docstring='Receive polarization for the channel.') # type: str
TxPolRef = SerializableDescriptor(
'TxPolRef', PolarizationRefType, _required, strict=DEFAULT_STRICT,
docstring='') # type: Optional[PolarizationRefType]
RcvPolRef = SerializableDescriptor(
'RcvPolRef', PolarizationRefType, _required, strict=DEFAULT_STRICT,
docstring='') # type: Optional[PolarizationRefType]
def __init__(
self,
TxPol: str = None,
RcvPol: str = None,
TxPolRef: Union[None, PolarizationRefType, numpy.ndarray, tuple, list] = None,
RcvPolRef: Union[None, PolarizationRefType, numpy.ndarray, tuple, list] = None,
**kwargs):
"""
Parameters
----------
TxPol : str
RcvPol : str
TxPolRef : None|PolarizationRefType|numpy.ndarray|tuple|list
RcvPolRef : None|PolarizationRefType|numpy.ndarray|tuple|list
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TxPol = TxPol
self.RcvPol = RcvPol
self.TxPolRef = TxPolRef
self.RcvPolRef = RcvPolRef
super(PolarizationType, self).__init__(**kwargs)
def version_required(self) -> Tuple[int, int, int]:
required = (1, 0, 1)
for fld in ['TxPol', 'RcvPol']:
val = getattr(self, fld)
if val is not None and val in ['S', 'E']:
required = max(required, (1, 1, 0))
if self.TxPolRef is not None or self.RcvPolRef is not None:
required = max(required, (1, 1, 0))
return required
class LFMEclipseType(Serializable):
"""
The LFM Eclipse definition.
"""
_fields = ('FxEarlyLow', 'FxEarlyHigh', 'FxLateLow', 'FxLateHigh')
_required = _fields
_numeric_format = {fld: FLOAT_FORMAT for fld in _fields}
# descriptors
FxEarlyLow = FloatDescriptor(
'FxEarlyLow', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring=r'FX domain minimum frequency value for an echo at '
r':math:`\Delta TOA = \Delta TOAE1 < \Delta TOA1`, in Hz.') # type: float
FxEarlyHigh = FloatDescriptor(
'FxEarlyHigh', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='FX domain maximum frequency value for an echo at '
r':math:`\Delta TOA = \Delta TOAE1 < \Delta TOA1`, in Hz.') # type: float
FxLateLow = FloatDescriptor(
'FxLateLow', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='FX domain minimum frequency value for an echo at '
r':math:`\Delta TOA = \Delta TOAE2 < \Delta TOA2`, in Hz.') # type: float
FxLateHigh = FloatDescriptor(
'FxLateHigh', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='FX domain maximum frequency value for echo at '
r':math:`\Delta TOA = \Delta TOAE2 < \Delta TOA2`, in Hz.') # type: float
def __init__(self, FxEarlyLow=None, FxEarlyHigh=None, FxLateLow=None, FxLateHigh=None,
**kwargs):
"""
Parameters
----------
FxEarlyLow : float
FxEarlyHigh : float
FxLateLow : float
FxLateHigh : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.FxEarlyLow = FxEarlyLow
self.FxEarlyHigh = FxEarlyHigh
self.FxLateLow = FxLateLow
self.FxLateHigh = FxLateHigh
super(LFMEclipseType, self).__init__(**kwargs)
class TOAExtendedType(Serializable):
"""
The time-of-arrival (TOA) extended swath information.
"""
_fields = ('TOAExtSaved', 'LFMEclipse')
_required = ('TOAExtSaved', )
_numeric_format = {'TOAExtSaved': FLOAT_FORMAT}
# descriptors
TOAExtSaved = FloatDescriptor(
'TOAExtSaved', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='TOA extended swath saved that includes both full and partially '
'eclipsed echoes.') # type: float
LFMEclipse = SerializableDescriptor(
'LFMEclipse', LFMEclipseType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe the FX domain signal content for partially '
'eclipsed echoes when the collection is performed with a Linear '
'FM waveform.') # type: Union[None, LFMEclipseType]
def __init__(self, TOAExtSaved=None, LFMEclipse=None, **kwargs):
"""
Parameters
----------
TOAExtSaved : float
LFMEclipse : None|LFMEclipseType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TOAExtSaved = TOAExtSaved
self.LFMEclipse = LFMEclipse
super(TOAExtendedType, self).__init__(**kwargs)
class DwellTimesType(Serializable):
"""
COD Time and Dwell Time polynomials over the image area.
"""
_fields = ('CODId', 'DwellId', 'DTAId', 'UseDTA')
_required = ('CODId', 'DwellId')
# descriptors
CODId = StringDescriptor(
'CODId', _required, strict=DEFAULT_STRICT,
docstring='Identifier of the Center of Dwell Time polynomial that maps '
'reference surface position to COD time.') # type: str
DwellId = StringDescriptor(
'DwellId', _required, strict=DEFAULT_STRICT,
docstring='Identifier of the Dwell Time polynomial that maps reference '
'surface position to dwell time.') # type: str
DTAId = StringDescriptor(
'DTAId', _required, strict=DEFAULT_STRICT,
docstring='') # type: Optional[str]
UseDTA = BooleanDescriptor(
'UseDTA', _required, strict=DEFAULT_STRICT,
docstring='') # type: Optional[bool]
def __init__(self, CODId=None, DwellId=None, DTAId=None, UseDTA=None, **kwargs):
"""
Parameters
----------
CODId : str
DwellId : str
DTAId : None|str
UseDTA : None|bool
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.CODId = CODId
self.DwellId = DwellId
self.DTAId = DTAId
self.UseDTA = UseDTA
super(DwellTimesType, self).__init__(**kwargs)
def version_required(self) -> Tuple[int, int, int]:
if self.DTAId is not None or self.UseDTA is not None:
return (1, 1, 0)
else:
return (1, 0, 1)
class AntennaType(Serializable):
""""
Antenna Phase Center and Antenna Pattern identifiers for
the antenna(s) used to collect and form the signal array data.
"""
_fields = ('TxAPCId', 'TxAPATId', 'RcvAPCId', 'RcvAPATId')
_required = _fields
# descriptors
TxAPCId = StringDescriptor(
'TxAPCId', _required, strict=DEFAULT_STRICT,
docstring='Identifier of Transmit APC to be used to compute the transmit '
'antenna pattern as a function of time for the channel.') # type: str
TxAPATId = StringDescriptor(
'TxAPATId', _required, strict=DEFAULT_STRICT,
docstring='Identifier of Transmit Antenna pattern used to form the channel '
'signal array.') # type: str
RcvAPCId = StringDescriptor(
'RcvAPCId', _required, strict=DEFAULT_STRICT,
docstring='Identifier of Receive APC to be used to compute the receive antenna '
'pattern as a function of time for the channel.') # type: str
RcvAPATId = StringDescriptor(
'RcvAPATId', _required, strict=DEFAULT_STRICT,
docstring='Identifier of Receive Antenna pattern used to form the '
'channel.') # type: str
def __init__(self, TxAPCId=None, TxAPATId=None, RcvAPCId=None, RcvAPATId=None, **kwargs):
"""
Parameters
----------
TxAPCId : str
TxAPATId : str
RcvAPCId : str
RcvAPATId : str
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TxAPCId = TxAPCId
self.TxAPATId = TxAPATId
self.RcvAPCId = RcvAPCId
self.RcvAPATId = RcvAPATId
super(AntennaType, self).__init__(**kwargs)
class TxRcvType(Serializable):
"""
Parameters to identify the Transmit and Receive parameter sets used to collect the signal array.
"""
_fields = ('TxWFId', 'RcvId')
_required = _fields
_collections_tags = {
'TxWFId': {'array': False, 'child_tag': 'TxWFId'},
'RcvId': {'array': False, 'child_tag': 'RcvId'}}
# descriptors
TxWFId = StringListDescriptor(
'TxWFId', _required, strict=DEFAULT_STRICT, minimum_length=1,
docstring='Identifier of the Transmit Waveform parameter set(s) that '
'were used.') # type: List[str]
RcvId = StringListDescriptor(
'RcvId', _required, strict=DEFAULT_STRICT, minimum_length=1,
docstring='Identifier of the Receive Parameter set(s) that were '
'used.') # type: List[str]
def __init__(self, TxWFId=None, RcvId=None, **kwargs):
"""
Parameters
----------
TxWFId : List[str]
RcvId : List[str]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TxWFId = TxWFId
self.RcvId = RcvId
super(TxRcvType, self).__init__(**kwargs)
class TgtRefLevelType(Serializable):
"""
Signal level for an ideal point scatterer located at the SRP for reference
signal vector.
"""
_fields = ('PTRef', )
_required = _fields
_numeric_format = {'PTRef': FLOAT_FORMAT}
# descriptors
PTRef = FloatDescriptor(
'PTRef', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Target power level for a 1.0 square meter ideal point scatterer located '
'at the SRP. For FX Domain signal arrays, PTRef is the signal level at '
':math:`fx = fx_C`. For TOA Domain, PTRef is the peak signal level at '
r':math:`\Delta TOA = 0`, and :math:`Power = |Signal|^2`.') # type: float
def __init__(self, PTRef=None, **kwargs):
"""
Parameters
----------
PTRef : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.PTRef = PTRef
super(TgtRefLevelType, self).__init__(**kwargs)
class FxPNPointType(Serializable):
"""
Points that describe the noise profile.
"""
_fields = ('Fx', 'PN')
_required = _fields
_numeric_format = {'FX': FLOAT_FORMAT, 'PN': FLOAT_FORMAT}
# descriptors
Fx = FloatDescriptor(
'Fx', _required, strict=DEFAULT_STRICT,
docstring='Frequency value of this noise profile point, in Hz.') # type: float
PN = FloatDescriptor(
'PN', _required, strict=DEFAULT_STRICT,
docstring='Power level of this noise profile point.') # type: float
def __init__(self, Fx=None, PN=None, **kwargs):
"""
Parameters
----------
Fx : float
PN : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Fx = Fx
self.PN = PN
super(FxPNPointType, self).__init__(**kwargs)
class FxNoiseProfileType(SerializableArray):
_set_size = False
_set_index = False
class NoiseLevelType(Serializable):
"""
Thermal noise level for the reference signal vector.
"""
_fields = ('PNRef', 'BNRef', 'FxNoiseProfile')
_required = ('PNRef', 'BNRef')
_collections_tags = {
'FxNoiseProfile': {'array': True, 'child_tag': 'Point'}}
_numeric_format = {'PNRef': FLOAT_FORMAT, 'BNRef': FLOAT_FORMAT}
# descriptors
PNRef = FloatDescriptor(
'PNRef', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Noise power level for thermal noise.') # type: float
BNRef = FloatDescriptor(
'BNRef', _required, strict=DEFAULT_STRICT, bounds=(0, 1),
docstring='Noise Equivalent BW for noise signal. Bandwidth BN is '
'expressed relative to the sample bandwidth.') # type: float
FxNoiseProfile = SerializableArrayDescriptor(
'FxNoiseProfile', FxPNPointType, _collections_tags, _required, strict=DEFAULT_STRICT,
minimum_length=2, array_extension=FxNoiseProfileType,
docstring='FX Domain Noise Level Profile. Power level for thermal noise (PN) vs. FX '
'frequency values.') # type: Union[None, FxNoiseProfileType, List[FxPNPointType]]
def __init__(self, PNRef=None, BNRef=None, FxNoiseProfile=None, **kwargs):
"""
Parameters
----------
PNRef : float
BNRef : float
FxNoiseProfile : FxNoiseProfileType|List[FxPNPointType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.PNRef = PNRef
self.BNRef = BNRef
self.FxNoiseProfile = FxNoiseProfile
super(NoiseLevelType, self).__init__(**kwargs)
class ChannelParametersType(Serializable):
_fields = (
'Identifier', 'RefVectorIndex', 'FXFixed', 'TOAFixed', 'SRPFixed',
'SignalNormal', 'Polarization', 'FxC', 'FxBW', 'FxBWNoise', 'TOASaved',
'TOAExtended', 'DwellTimes', 'ImageArea', 'Antenna', 'TxRcv',
'TgtRefLevel', 'NoiseLevel')
_required = (
'Identifier', 'RefVectorIndex', 'FXFixed', 'TOAFixed', 'SRPFixed',
'Polarization', 'FxC', 'FxBW', 'TOASaved', 'DwellTimes')
_numeric_format = {
'FxC': FLOAT_FORMAT, 'FxBW': FLOAT_FORMAT, 'FxBWNoise': FLOAT_FORMAT, 'TOASaved': FLOAT_FORMAT}
# descriptors
Identifier = StringDescriptor(
'Identifier', _required, strict=DEFAULT_STRICT,
docstring='String that uniquely identifies this CPHD data channel.') # type: str
RefVectorIndex = IntegerDescriptor(
'RefVectorIndex', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Index of the reference vector for the channel.') # type: int
FXFixed = BooleanDescriptor(
'FXFixed', _required, strict=DEFAULT_STRICT,
docstring='Flag to indicate when a constant FX band is saved for all signal '
'vectors of the channel.') # type: bool
TOAFixed = BooleanDescriptor(
'TOAFixed', _required, strict=DEFAULT_STRICT,
docstring='Flag to indicate when a constant TOA swath is saved for all '
'signal vectors of the channel.') # type: bool
SRPFixed = BooleanDescriptor(
'SRPFixed', _required, strict=DEFAULT_STRICT,
docstring='Flag to indicate when a constant SRP position is used all '
'signal vectors of the channel.') # type: bool
SignalNormal = BooleanDescriptor(
'SignalNormal', _required, strict=DEFAULT_STRICT,
docstring='Flag to indicate when all signal array vectors are normal. '
'Included if and only if the SIGNAL PVP is also included.') # type: bool
Polarization = SerializableDescriptor(
'Polarization', PolarizationType, _required, strict=DEFAULT_STRICT,
docstring='Polarization(s) of the signals that formed the signal '
'array.') # type: PolarizationType
FxC = FloatDescriptor(
'FxC', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='FX center frequency value for saved bandwidth for the channel. '
'Computed from all vectors of the signal array.') # type: float
FxBW = FloatDescriptor(
'FxBW', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='FX band spanned for the saved bandwidth for the channel. '
'Computed from all vectors of the signal array.') # type: float
FxBWNoise = FloatDescriptor(
'FxBWNoise', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='FX signal bandwidth saved that includes noise signal below or '
'above the retained echo signal bandwidth.') # type: float
TOASaved = FloatDescriptor(
'TOASaved', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='TOA swath saved for the full resolution echoes for the channel.') # type: float
TOAExtended = SerializableDescriptor(
'TOAExtended', TOAExtendedType, _required, strict=DEFAULT_STRICT,
docstring='TOA extended swath information.') # type: Union[None, TOAExtendedType]
DwellTimes = SerializableDescriptor(
'DwellTimes', DwellTimesType, _required, strict=DEFAULT_STRICT,
docstring='COD Time and Dwell Time polynomials over the image area.') # type: DwellTimesType
ImageArea = SerializableDescriptor(
'ImageArea', AreaType, _required, strict=DEFAULT_STRICT,
docstring='Image Area for the CPHD channel defined by a rectangle aligned with '
'(IAX, IAY). May be reduced by the optional '
'polygon.') # type: Union[None, AreaType]
Antenna = SerializableDescriptor(
'Antenna', AntennaType, _required, strict=DEFAULT_STRICT,
docstring='Antenna Phase Center and Antenna Pattern identifiers for the antenna(s) '
'used to collect and form the signal array data.') # type: Union[None, AntennaType]
TxRcv = SerializableDescriptor(
'TxRcv', TxRcvType, _required, strict=DEFAULT_STRICT,
docstring='Parameters to identify the Transmit and Receive parameter sets '
'used to collect the signal array.') # type: Union[None, TxRcvType]
TgtRefLevel = SerializableDescriptor(
'TgtRefLevel', TgtRefLevelType, _required, strict=DEFAULT_STRICT,
docstring='Signal level for an ideal point scatterer located at the SRP for '
'reference signal vector.') # type: Union[None, TgtRefLevelType]
NoiseLevel = SerializableDescriptor(
'NoiseLevel', NoiseLevelType, _required, strict=DEFAULT_STRICT,
docstring='Thermal noise level for the reference signal '
'vector.') # type: Union[None, NoiseLevelType]
def __init__(self, Identifier=None, RefVectorIndex=None, FXFixed=None, TOAFixed=None,
SRPFixed=None, SignalNormal=None, Polarization=None, FxC=None, FxBW=None,
FxBWNoise=None, TOASaved=None, TOAExtended=None, DwellTimes=None,
ImageArea=None, Antenna=None, TxRcv=None, TgtRefLevel=None,
NoiseLevel=None, **kwargs):
"""
Parameters
----------
Identifier : str
RefVectorIndex : int
FXFixed : bool
TOAFixed : bool
SRPFixed : bool
SignalNormal : None|bool
Polarization : PolarizationType
FxC : float
FxBW : float
FxBWNoise : None|float
TOASaved : float
TOAExtended : None|TOAExtendedType
DwellTimes : DwellTimesType
ImageArea : None|AreaType
Antenna : None|AntennaType
TxRcv : None|TxRcvType
TgtRefLevel : None|TgtRefLevelType
NoiseLevel : None|NoiseLevelType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Identifier = Identifier
self.RefVectorIndex = RefVectorIndex
self.FXFixed = FXFixed
self.TOAFixed = TOAFixed
self.SRPFixed = SRPFixed
self.SignalNormal = SignalNormal
self.Polarization = Polarization
self.FxC = FxC
self.FxBW = FxBW
self.FxBWNoise = FxBWNoise
self.TOASaved = TOASaved
self.TOAExtended = TOAExtended
self.DwellTimes = DwellTimes
self.ImageArea = ImageArea
self.Antenna = Antenna
self.TxRcv = TxRcv
self.TgtRefLevel = TgtRefLevel
self.NoiseLevel = NoiseLevel
super(ChannelParametersType, self).__init__(**kwargs)
def version_required(self) -> Tuple[int, int, int]:
required = (1, 0, 1)
if self.Polarization is not None:
required = max(required, self.Polarization.version_required())
if self.DwellTimes is not None:
required = max(required, self.DwellTimes.version_required())
return required
class ChannelType(Serializable):
"""
The channel definition.
"""
_fields = (
'RefChId', 'FXFixedCPHD', 'TOAFixedCPHD', 'SRPFixedCPHD',
'Parameters', 'AddedParameters')
_required = (
'RefChId', 'FXFixedCPHD', 'TOAFixedCPHD', 'SRPFixedCPHD', 'Parameters')
_collections_tags = {
'Parameters': {'array': False, 'child_tag': 'Parameters'},
'AddedParameters': {'array': False, 'child_tag': 'AddedParameters'}}
# descriptors
RefChId = StringDescriptor(
'RefChId', _required, strict=DEFAULT_STRICT,
docstring='Channel ID for the Reference Channel in the '
'product.') # type: str
FXFixedCPHD = BooleanDescriptor(
'FXFixedCPHD', _required, strict=DEFAULT_STRICT,
docstring='Flag to indicate when a constant FX band is saved for all '
'signal vectors of all channels.') # type: bool
TOAFixedCPHD = BooleanDescriptor(
'TOAFixedCPHD', _required, strict=DEFAULT_STRICT,
docstring='Flag to indicate when a constant TOA swath is saved for all '
'signal vectors of all channels.') # type: bool
SRPFixedCPHD = BooleanDescriptor(
'SRPFixedCPHD', _required, strict=DEFAULT_STRICT,
docstring='Flag to indicate when a constant SRP position is used all '
'signal vectors of all channels.') # type: bool
Parameters = SerializableListDescriptor(
'Parameters', ChannelParametersType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Parameter Set that describes a CPHD data '
'channel.') # type: List[ChannelParametersType]
AddedParameters = ParametersDescriptor(
'AddedParameters', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Additional free form parameters.') # type: Union[None, ParametersCollection]
def __init__(
self,
RefChId: str = None,
FXFixedCPHD: bool = None,
TOAFixedCPHD: bool = None,
SRPFixedCPHD: bool = None,
Parameters: List[ChannelParametersType] = None,
AddedParameters: Optional[ParametersCollection] = None,
**kwargs):
"""
Parameters
----------
RefChId : str
FXFixedCPHD : bool
TOAFixedCPHD : bool
SRPFixedCPHD : bool
Parameters : List[ChannelParametersType]
AddedParameters : None|ParametersCollection
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.RefChId = RefChId
self.FXFixedCPHD = FXFixedCPHD
self.TOAFixedCPHD = TOAFixedCPHD
self.SRPFixedCPHD = SRPFixedCPHD
self.Parameters = Parameters
self.AddedParameters = AddedParameters
super(ChannelType, self).__init__(**kwargs)
def version_required(self) -> Tuple[int, int, int]:
required = (1, 0, 1)
if self.Parameters is not None:
for entry in self.Parameters:
required = max(required, entry.version_required())
return required
| 28,120 | 37.73416 | 103 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/GeoInfo.py | """
The GeoInfo definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from collections import OrderedDict
from xml.etree import ElementTree
from typing import List, Dict
import numpy
from sarpy.io.xml.base import Serializable, ParametersCollection, \
find_children, create_new_node
from sarpy.io.xml.descriptors import StringDescriptor, \
ParametersDescriptor, SerializableListDescriptor
from sarpy.io.complex.sicd_elements.blocks import LatLonRestrictionType, LatLonArrayElementType
from .base import DEFAULT_STRICT
class LineType(Serializable):
_fields = ('EndPoint', 'size')
_required = ('EndPoint', 'size')
def __init__(self, EndPoint=None, **kwargs):
"""
Parameters
----------
EndPoint : List[LatLonArrayElementType]|numpy.ndarray|list|tuple
kwargs
"""
self._array = None
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.EndPoint = EndPoint
super(LineType, self).__init__(**kwargs)
@property
def size(self):
"""
int: The size attribute
"""
return 0 if self._array is None else self._array.size
@property
def EndPoint(self):
"""
numpy.ndarray: The array of points.
"""
return numpy.array([], dtype='object') if self._array is None else self._array
@EndPoint.setter
def EndPoint(self, value):
if value is None:
self._array = None
return
if isinstance(value, numpy.ndarray):
is_type = True
for entry in value:
is_type &= isinstance(entry, LatLonArrayElementType)
if is_type:
self._array = value
return
if isinstance(value, (numpy.ndarray, list, tuple)):
use_value = []
for i, entry in enumerate(value):
if isinstance(entry, LatLonArrayElementType):
entry.index = i+1
use_value.append(entry)
elif isinstance(entry, dict):
e_val = LatLonArrayElementType.from_dict(entry)
e_val.index = i+1
use_value.append(e_val)
elif isinstance(entry, (numpy.ndarray, list, tuple)):
use_value.append(LatLonArrayElementType.from_array(entry, index=i+1))
else:
raise TypeError('Got unexpected type for element of EndPoint array `{}`'.format(type(entry)))
self._array = numpy.array(use_value, dtype='object')
else:
raise TypeError('Got unexpected type for EndPoint array `{}`'.format(type(value)))
def __getitem__(self, item):
return self._array.__getitem__(item)
def __setitem__(self, key, value):
self._array.__setitem__(key, value)
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
"""
Parameters
----------
node
xml_ns : None|dict
ns_key : None|str
kwargs : dict
Returns
-------
LineType
"""
end_point_key = cls._child_xml_ns_key.get('EndPoint', ns_key)
end_points = []
for cnode in find_children(node, 'EndPoint', xml_ns, end_point_key):
end_points.append(LatLonArrayElementType.from_node(cnode, xml_ns, ns_key=end_point_key))
return cls(EndPoint=end_points)
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
if parent is None:
parent = doc.getroot()
if ns_key is None:
node = create_new_node(doc, tag, parent=parent)
else:
node = create_new_node(doc, '{}:{}'.format(ns_key, tag), parent=parent)
node.attrib['size'] = str(self.size)
end_point_key = self._child_xml_ns_key.get('EndPoint', ns_key)
for entry in self.EndPoint:
entry.to_node(doc, 'EndPoint', ns_key=end_point_key, parent=node,
check_validity=check_validity, strict=strict)
return node
def to_dict(self, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
return OrderedDict([
('EndPoint', [entry.to_dict() for entry in self.EndPoint]),
('size', self.size)])
class PolygonType(Serializable):
_fields = ('Vertex', 'size')
_required = ('Vertex', 'size')
def __init__(self, Vertex=None, **kwargs):
"""
Parameters
----------
Vertex : List[LatLonArrayElementType]|numpy.ndarray|list|tuple
kwargs
"""
self._array = None
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Vertex = Vertex
super(PolygonType, self).__init__(**kwargs)
@property
def size(self):
"""
int: The size attribute
"""
if self._array is None:
return 0
else:
return self._array.size
@property
def Vertex(self):
"""
numpy.ndarray: The array of points.
"""
if self._array is None:
return numpy.array((0,), dtype='object')
else:
return self._array
@Vertex.setter
def Vertex(self, value):
if value is None:
self._array = None
return
if isinstance(value, numpy.ndarray):
is_type = True
for entry in value:
is_type &= isinstance(entry, LatLonArrayElementType)
if is_type:
self._array = value
return
if isinstance(value, (numpy.ndarray, list, tuple)):
use_value = []
for i, entry in enumerate(value):
if isinstance(entry, LatLonArrayElementType):
entry.index = i + 1
use_value.append(entry)
elif isinstance(entry, dict):
e_val = LatLonArrayElementType.from_dict(entry)
e_val.index = i + 1
use_value.append(e_val)
elif isinstance(entry, (numpy.ndarray, list, tuple)):
use_value.append(LatLonArrayElementType.from_array(entry, index=i + 1))
else:
raise TypeError('Got unexpected type for element of Vertex array `{}`'.format(type(entry)))
self._array = numpy.array(use_value, dtype='object')
else:
raise TypeError('Got unexpected type for Vertex array `{}`'.format(type(value)))
def __getitem__(self, item):
return self._array.__getitem__(item)
def __setitem__(self, key, value):
self._array.__setitem__(key, value)
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
"""
Parameters
----------
node
xml_ns : None|dict
ns_key : None|str
kwargs : dict
Returns
-------
PolygonType
"""
vertex_key = cls._child_xml_ns_key.get('Vertex', ns_key)
vertices = []
for cnode in find_children(node, 'Vertex', xml_ns, vertex_key):
vertices.append(LatLonArrayElementType.from_node(cnode, xml_ns, ns_key=vertex_key))
return cls(Vertex=vertices)
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
if parent is None:
parent = doc.getroot()
if ns_key is None:
node = create_new_node(doc, tag, parent=parent)
else:
node = create_new_node(doc, '{}:{}'.format(ns_key, tag), parent=parent)
node.attrib['size'] = str(self.size)
end_point_key = self._child_xml_ns_key.get('Vertex', ns_key)
for entry in self.Vertex:
entry.to_node(doc, 'Vertex', ns_key=end_point_key, parent=node,
check_validity=check_validity, strict=strict)
return node
def to_dict(self, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
return OrderedDict([
('Vertex', [entry.to_dict() for entry in self.Vertex]),
('size', self.size)])
class GeoInfoType(Serializable):
"""
A geographic feature.
"""
_fields = ('name', 'Descriptions', 'Point', 'Line', 'Polygon', 'GeoInfo')
_required = ('name', )
_set_as_attribute = ('name', )
_collections_tags = {
'Descriptions': {'array': False, 'child_tag': 'Desc'},
'Point': {'array': False, 'child_tag': 'Point'},
'Line': {'array': False, 'child_tag': 'Line'},
'Polygon': {'array': False, 'child_tag': 'Polygon'},
'GeoInfo': {'array': False, 'child_tag': 'GeoInfo'}
}
# descriptors
name = StringDescriptor(
'name', _required, strict=DEFAULT_STRICT,
docstring='The name.') # type: str
Descriptions = ParametersDescriptor(
'Descriptions', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Descriptions of the geographic feature.') # type: ParametersCollection
Point = SerializableListDescriptor(
'Point', LatLonRestrictionType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Geographic points with WGS-84 coordinates.'
) # type: List[LatLonRestrictionType]
Line = SerializableListDescriptor(
'Line', LineType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Geographic lines (array) with WGS-84 coordinates.'
) # type: List[LineType]
Polygon = SerializableListDescriptor(
'Polygon', PolygonType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Geographic polygons (array) with WGS-84 coordinates.'
) # type: List[PolygonType]
def __init__(self, name=None, Descriptions=None, Point=None, Line=None,
Polygon=None, GeoInfo=None, **kwargs):
"""
Parameters
----------
name : str
Descriptions : ParametersCollection|dict
Point : List[LatLonRestrictionType]
Line : List[LineType]
Polygon : List[PolygonType]
GeoInfo : Dict[GeoInfoTpe]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.name = name
self.Descriptions = Descriptions
self.Point = Point
self.Line = Line
self.Polygon = Polygon
self._GeoInfo = []
if GeoInfo is None:
pass
elif isinstance(GeoInfo, GeoInfoType):
self.addGeoInfo(GeoInfo)
elif isinstance(GeoInfo, (list, tuple)):
for el in GeoInfo:
self.addGeoInfo(el)
else:
raise ValueError('GeoInfo got unexpected type {}'.format(type(GeoInfo)))
super(GeoInfoType, self).__init__(**kwargs)
@property
def GeoInfo(self):
"""
List[GeoInfoType]: list of GeoInfo objects.
"""
return self._GeoInfo
def getGeoInfo(self, key):
"""
Get GeoInfo(s) with name attribute == `key`.
Parameters
----------
key : str
Returns
-------
List[GeoInfoType]
"""
return [entry for entry in self._GeoInfo if entry.name == key]
def addGeoInfo(self, value):
"""
Add the given GeoInfo to the GeoInfo list.
Parameters
----------
value : GeoInfoType
Returns
-------
None
"""
if isinstance(value, ElementTree.Element):
gi_key = self._child_xml_ns_key.get('GeoInfo', self._xml_ns_key)
value = GeoInfoType.from_node(value, self._xml_ns, ns_key=gi_key)
elif isinstance(value, dict):
value = GeoInfoType.from_dict(value)
if isinstance(value, GeoInfoType):
self._GeoInfo.append(value)
else:
raise TypeError('Trying to set GeoInfo element with unexpected type {}'.format(type(value)))
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
if kwargs is None:
kwargs = OrderedDict()
gi_key = cls._child_xml_ns_key.get('GeoInfo', ns_key)
kwargs['GeoInfo'] = find_children(node, 'GeoInfo', xml_ns, gi_key)
return super(GeoInfoType, cls).from_node(node, xml_ns, ns_key=ns_key, kwargs=kwargs)
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
node = super(GeoInfoType, self).to_node(
doc, tag, ns_key=ns_key, parent=parent, check_validity=check_validity,
strict=strict, exclude=exclude+('GeoInfo', ))
# slap on the GeoInfo children
if self._GeoInfo is not None and len(self._GeoInfo) > 0:
for entry in self._GeoInfo:
entry.to_node(doc, tag, ns_key=ns_key, parent=node, strict=strict)
return node
def to_dict(self, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
out = super(GeoInfoType, self).to_dict(
check_validity=check_validity, strict=strict, exclude=exclude+('GeoInfo', ))
# slap on the GeoInfo children
if self.GeoInfo is not None and len(self.GeoInfo) > 0:
out['GeoInfo'] = [entry.to_dict(check_validity=check_validity, strict=strict) for entry in self._GeoInfo]
return out
| 13,747 | 32.531707 | 117 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/SupportArray.py | """
The Support Array parameters definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from xml.etree import ElementTree
from typing import Union, List, Tuple, Optional
import numpy
from sarpy.io.xml.base import Serializable, ParametersCollection, get_node_value
from sarpy.io.xml.descriptors import FloatDescriptor, StringDescriptor, StringEnumDescriptor, \
ParametersDescriptor, SerializableListDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from .utils import homogeneous_dtype
class SupportArrayCore(Serializable):
"""
The support array base case.
"""
_fields = ('Identifier', 'ElementFormat', 'X0', 'Y0', 'XSS', 'YSS', 'NODATA')
_required = ('Identifier', 'ElementFormat', 'X0', 'Y0', 'XSS', 'YSS')
_numeric_format = {'X0': FLOAT_FORMAT, 'Y0': FLOAT_FORMAT, 'XSS': FLOAT_FORMAT, 'YSS': FLOAT_FORMAT}
# descriptors
Identifier = StringDescriptor(
'Identifier', _required, strict=DEFAULT_STRICT,
docstring='The support array identifier.') # type: str
ElementFormat = StringDescriptor(
'ElementFormat', _required, strict=DEFAULT_STRICT,
docstring='The data element format.') # type: str
X0 = FloatDescriptor(
'X0', _required, strict=DEFAULT_STRICT,
docstring='') # type: float
Y0 = FloatDescriptor(
'Y0', _required, strict=DEFAULT_STRICT,
docstring='') # type: float
XSS = FloatDescriptor(
'XSS', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='') # type: float
YSS = FloatDescriptor(
'YSS', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='') # type: float
def __init__(
self,
Identifier: str = None,
ElementFormat: str = None,
X0: float = None,
Y0: float = None,
XSS: float = None,
YSS: float = None,
NODATA: str = None,
**kwargs):
"""
Parameters
----------
Identifier : str
ElementFormat : str
X0 : float
Y0 : float
XSS : float
YSS : float
NODATA : None|str
kwargs
"""
self._NODATA = None
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Identifier = Identifier
self.ElementFormat = ElementFormat
self.X0 = X0
self.Y0 = Y0
self.XSS = XSS
self.YSS = YSS
self.NODATA = NODATA
super(SupportArrayCore, self).__init__(**kwargs)
@property
def NODATA(self) -> Optional[str]:
"""
None|str: The no data hex string value.
"""
return self._NODATA
@NODATA.setter
def NODATA(self, value: Optional[str]):
if value is None:
self._NODATA = None
return
if isinstance(value, ElementTree.Element):
value = get_node_value(value)
if isinstance(value, str):
self._NODATA = value
elif isinstance(value, bytes):
self._NODATA = value.decode('utf-8')
elif isinstance(value, int):
raise NotImplementedError
elif isinstance(value, float):
raise NotImplementedError
else:
raise TypeError('Got unexpected type {}'.format(type(value)))
def get_nodata_as_int(self) -> Optional[int]:
"""
Get the no data value as an integer value.
Returns
-------
None|int
"""
if self._NODATA is None:
return None
raise NotImplementedError
def get_nodata_as_float(self) -> Optional[float]:
"""
Gets the no data value as a floating point value.
Returns
-------
None|float
"""
if self._NODATA is None:
return None
raise NotImplementedError
def get_numpy_format(self) -> Tuple[numpy.dtype, int]:
"""
Convert the element format to a numpy dtype (including endianness) and depth.
Returns
-------
data: numpy.dtype
depth: int
"""
return homogeneous_dtype(self.ElementFormat, return_length=True)
class IAZArrayType(SupportArrayCore):
"""
Array of scene surface heights expressed in image coordinate IAZ values (meters).
Grid coordinates are image area coordinates (IAX, IAY).
"""
_fields = ('Identifier', 'ElementFormat', 'X0', 'Y0', 'XSS', 'YSS', 'NODATA')
_required = ('Identifier', 'ElementFormat', 'X0', 'Y0', 'XSS', 'YSS')
# descriptors
ElementFormat = StringEnumDescriptor(
'ElementFormat', ('IAZ=F4;', ), _required, strict=DEFAULT_STRICT, default_value='IAZ=F4;',
docstring='The data element format.') # type: str
def __init__(
self,
Identifier: str = None,
ElementFormat: str = 'IAZ=F4;',
X0: float = None,
Y0: float = None,
XSS: float = None,
YSS: float = None,
NODATA: str = None,
**kwargs):
"""
Parameters
----------
Identifier : str
ElementFormat : str
X0 : float
Y0 : float
XSS : float
YSS : float
NODATA : str
kwargs
"""
super(IAZArrayType, self).__init__(
Identifier=Identifier, ElementFormat=ElementFormat, X0=X0, Y0=Y0,
XSS=XSS, YSS=YSS, NODATA=NODATA, **kwargs)
class AntGainPhaseType(SupportArrayCore):
"""
Antenna array with values are antenna gain and phase expressed in dB and
cycles. Array coordinates are direction cosines with respect to the
ACF (DCX, DCY).
"""
_fields = ('Identifier', 'ElementFormat', 'X0', 'Y0', 'XSS', 'YSS', 'NODATA')
_required = ('Identifier', 'ElementFormat', 'X0', 'Y0', 'XSS', 'YSS')
# descriptors
ElementFormat = StringEnumDescriptor(
'ElementFormat', ('Gain=F4;Phase=F4;', ), _required, strict=DEFAULT_STRICT, default_value='Gain=F4;Phase=F4;',
docstring='The data element format.') # type: str
def __init__(
self,
Identifier: str = None,
ElementFormat: str = 'Gain=F4;Phase=F4;',
X0: float = None,
Y0: float = None,
XSS: float = None,
YSS: float = None,
NODATA: str = None,
**kwargs):
"""
Parameters
----------
Identifier : str
X0 : float
Y0 : float
XSS : float
YSS : float
NODATA : str
kwargs
"""
super(AntGainPhaseType, self).__init__(
Identifier=Identifier, ElementFormat=ElementFormat, X0=X0, Y0=Y0,
XSS=XSS, YSS=YSS, NODATA=NODATA, **kwargs)
class DwellTimeArrayType(SupportArrayCore):
"""
Array of COD times (sec) and Dwell Times (sec) for points on reference surface.
Array coordinates are image area coordinates (IAX, IAY)
"""
_fields = ('Identifier', 'ElementFormat', 'X0', 'Y0', 'XSS', 'YSS', 'NODATA')
_required = ('Identifier', 'ElementFormat', 'X0', 'Y0', 'XSS', 'YSS')
# descriptors
ElementFormat = StringEnumDescriptor(
'ElementFormat', ('COD=F4;DT=F4;', ), _required, strict=DEFAULT_STRICT,
default_value='COD=F4;DT=F4;',
docstring='The data element format.') # type: str
def __init__(
self,
Identifier: str = None,
ElementFormat: str = 'COD=F4;DT=F4;',
X0: float = None,
Y0: float = None,
XSS: float = None,
YSS: float = None,
NODATA: str = None,
**kwargs):
"""
Parameters
----------
Identifier : str
X0 : float
Y0 : float
XSS : float
YSS : float
NODATA : str
kwargs
"""
super(DwellTimeArrayType, self).__init__(
Identifier=Identifier, ElementFormat=ElementFormat, X0=X0, Y0=Y0,
XSS=XSS, YSS=YSS, NODATA=NODATA, **kwargs)
class AddedSupportArrayType(SupportArrayCore):
"""
Additional arrays (two-dimensional), where the content and format and units
of each element are user defined.
"""
_fields = (
'Identifier', 'ElementFormat', 'X0', 'Y0', 'XSS', 'YSS', 'NODATA',
'XUnits', 'YUnits', 'ZUnits', 'Parameters')
_required = (
'Identifier', 'ElementFormat', 'X0', 'Y0', 'XSS', 'YSS',
'XUnits', 'YUnits', 'ZUnits')
_collections_tags = {
'Parameters': {'array': False, 'child_tag': 'Parameter'}}
# descriptors
XUnits = StringDescriptor(
'XUnits', _required, strict=DEFAULT_STRICT,
docstring='The X units.') # type: str
YUnits = StringDescriptor(
'YUnits', _required, strict=DEFAULT_STRICT,
docstring='The Y units.') # type: str
ZUnits = StringDescriptor(
'ZUnits', _required, strict=DEFAULT_STRICT,
docstring='The Z units.') # type: str
Parameters = ParametersDescriptor(
'Parameters', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Other necessary free-form parameters.') # type: Union[None, ParametersCollection]
def __init__(self, Identifier=None, ElementFormat=None,
X0=None, Y0=None, XSS=None, YSS=None, NODATA=None,
XUnits=None, YUnits=None, ZUnits=None, Parameters=None, **kwargs):
"""
Parameters
----------
Identifier : str
ElementFormat : str
X0 : float
Y0 : float
XSS : float
YSS : float
NODATA : str
XUnits : str
YUnits : str
ZUnits : str
Parameters : ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.XUnits = XUnits
self.YUnits = YUnits
self.ZUnits = ZUnits
self.Parameters = Parameters
super(AddedSupportArrayType, self).__init__(
Identifier=Identifier, ElementFormat=ElementFormat, X0=X0, Y0=Y0,
XSS=XSS, YSS=YSS, NODATA=NODATA, **kwargs)
class SupportArrayType(Serializable):
"""
Parameters that describe the binary support array(s) content and
grid coordinates.
"""
_fields = ('IAZArray', 'AntGainPhase', 'DwellTimeArray', 'AddedSupportArray')
_required = ()
_collections_tags = {
'IAZArray': {'array': False, 'child_tag': 'IAZArray'},
'AntGainPhase': {'array': False, 'child_tag': 'AntGainPhase'},
'DwellTimeArray': {'array': False, 'child_tag': 'DwellTimeArray'},
'AddedSupportArray': {'array': False, 'child_tag': 'AddedSupportArray'}}
# descriptors
IAZArray = SerializableListDescriptor(
'IAZArray', IAZArrayType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Arrays of scene surface heights expressed in image coordinate IAZ '
'values (meters). Grid coordinates are image area coordinates '
'(IAX, IAY).') # type: Union[None, List[IAZArrayType]]
AntGainPhase = SerializableListDescriptor(
'AntGainPhase', AntGainPhaseType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Antenna arrays with values are antenna gain and phase expressed in dB '
'and cycles. Array coordinates are direction cosines with respect to '
'the ACF (DCX, DCY).') # type: Union[None, List[AntGainPhaseType]]
DwellTimeArray = SerializableListDescriptor(
'DwellTimeArray', DwellTimeArrayType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Array of COD times (sec) and Dwell Times (sec) for points on '
'reference surface. Array coordinates are image area '
'coordinates (IAX, IAY).') # type: Union[None, List[DwellTimeArrayType]]
AddedSupportArray = SerializableListDescriptor(
'AddedSupportArray', AddedSupportArrayType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Additional arrays (two-dimensional), where the content and format and units of each '
'element are user defined.') # type: Union[None, List[AddedSupportArrayType]]
def __init__(
self,
IAZArray: Optional[List[IAZArrayType]] = None,
AntGainPhase: Optional[List[AntGainPhaseType]] = None,
DwellTimeArray: Optional[List[DwellTimeArrayType]] = None,
AddedSupportArray: Optional[List[AddedSupportArrayType]] = None,
**kwargs):
"""
Parameters
----------
IAZArray : None|List[IAZArrayType]
AntGainPhase : None|List[AntGainPhaseType]
DwellTimeArray : None|List[DwellTimeArrayType]
AddedSupportArray : None|List[AddedSupportArrayType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.IAZArray = IAZArray
self.AntGainPhase = AntGainPhase
self.DwellTimeArray = DwellTimeArray
self.AddedSupportArray = AddedSupportArray
super(SupportArrayType, self).__init__(**kwargs)
def find_support_array(
self,
identifier: str) -> Union[IAZArrayType, AntGainPhaseType, DwellTimeArrayType, AddedSupportArrayType]:
"""
Find and return the details for support array associated with the given identifier.
Parameters
----------
identifier : str
Returns
-------
IAZArrayType|AntGainPhaseType|DwellTimeArrayType|AddedSupportArrayType
"""
if self.IAZArray is not None:
for entry in self.IAZArray:
if entry.Identifier == identifier:
return entry
if self.AntGainPhase is not None:
for entry in self.AntGainPhase:
if entry.Identifier == identifier:
return entry
if self.DwellTimeArray is not None:
for entry in self.DwellTimeArray:
if entry.Identifier == identifier:
return entry
if self.AddedSupportArray is not None:
for entry in self.AddedSupportArray:
if entry.Identifier == identifier:
return entry
raise KeyError('Identifier {} not associated with a support array.'.format(identifier))
def version_required(self) -> Tuple[int, int, int]:
required = (1, 0, 1)
if self.DwellTimeArray is not None:
required = max(required, (1, 1, 0))
return required
| 14,977 | 32.433036 | 118 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/CPHD.py | """
The Compensated Phase History Data 1.0.1 definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = ("Thomas McCullough", "Daniel Pressler, Valkyrie")
from xml.etree import ElementTree
from collections import OrderedDict
from typing import Union
import numpy
from sarpy.io.xml.base import Serializable, find_children, parse_xml_from_file, \
parse_xml_from_string
from sarpy.io.xml.descriptors import SerializableDescriptor, IntegerDescriptor, \
StringDescriptor
from sarpy.io.complex.sicd_elements.MatchInfo import MatchInfoType
from .base import DEFAULT_STRICT
from .CollectionID import CollectionIDType
from .Global import GlobalType
from .SceneCoordinates import SceneCoordinatesType
from .Data import DataType
from .Channel import ChannelType
from .PVP import PVPType
from .SupportArray import SupportArrayType
from .Dwell import DwellType
from .ReferenceGeometry import ReferenceGeometryType
from .Antenna import AntennaType
from .TxRcv import TxRcvType
from .ErrorParameters import ErrorParametersType
from .ProductInfo import ProductInfoType
from .GeoInfo import GeoInfoType
from sarpy.io.phase_history.cphd_schema import get_urn_details, WRITABLE_VERSIONS, \
get_namespace, get_default_tuple
#########
# Module variables
_CPHD_SPEC_DETAILS = {
key: {'namespace': get_namespace(key), 'details': get_urn_details(key)}
for key in WRITABLE_VERSIONS}
_CPHD_DEFAULT_TUPLE = get_default_tuple()
_CPHD_DEFAULT_VERSION = '{}.{}.{}'.format(*_CPHD_DEFAULT_TUPLE)
CPHD_SECTION_TERMINATOR = b'\f\n'
#########
# CPHD header object
def _parse_cphd_header_field(line):
"""
Parse the CPHD header field, or return `None` as a termination signal.
Parameters
----------
line : bytes
Returns
-------
None|(str, str)
"""
if line.startswith(CPHD_SECTION_TERMINATOR):
return None
parts = line.split(b' := ')
if len(parts) != 2:
raise ValueError('Cannot extract CPHD header value from line {}'.format(line))
fld = parts[0].strip().decode('utf-8')
val = parts[1].strip().decode('utf-8')
return fld, val
class CPHDHeaderBase(object):
_fields = ()
_required = ()
def __init__(self, **kwargs):
# intended as an abstract object
pass
@classmethod
def from_file_object(cls, fi):
"""
Extract the CPHD header object from a file opened in byte mode.
This file object is assumed to be at the correct location for the
CPHD header.
Parameters
----------
fi
The open file object, which will be progressively read.
Returns
-------
CPHDHeaderBase
"""
the_dict = {}
while True:
line = fi.readline()
res = _parse_cphd_header_field(line)
if res is None:
break
else:
fld, val = res
if fld not in cls._fields:
raise ValueError('Cannot extract CPHD header value from line {}'.format(line))
the_dict[fld] = val
return cls(**the_dict)
class CPHDHeader(CPHDHeaderBase):
_fields = (
'XML_BLOCK_SIZE', 'XML_BLOCK_BYTE_OFFSET', 'SUPPORT_BLOCK_SIZE', 'SUPPORT_BLOCK_BYTE_OFFSET',
'PVP_BLOCK_SIZE', 'PVP_BLOCK_BYTE_OFFSET', 'SIGNAL_BLOCK_SIZE', 'SIGNAL_BLOCK_BYTE_OFFSET',
'CLASSIFICATION', 'RELEASE_INFO')
_required = (
'XML_BLOCK_SIZE', 'XML_BLOCK_BYTE_OFFSET', 'PVP_BLOCK_SIZE', 'PVP_BLOCK_BYTE_OFFSET',
'SIGNAL_BLOCK_SIZE', 'SIGNAL_BLOCK_BYTE_OFFSET', 'CLASSIFICATION', 'RELEASE_INFO')
# descriptor
XML_BLOCK_SIZE = IntegerDescriptor(
'XML_BLOCK_SIZE', _required, strict=True,
docstring='ize of the XML instance that describes the product in bytes. '
'Size does NOT include the 2 bytes of the section terminator.') # type: int
XML_BLOCK_BYTE_OFFSET = IntegerDescriptor(
'XML_BLOCK_BYTE_OFFSET', _required, strict=True,
docstring='Offset to the first byte of the XML block in bytes.') # type: int
SUPPORT_BLOCK_SIZE = IntegerDescriptor(
'SUPPORT_BLOCK_SIZE', _required, strict=True,
docstring='Size of the Support block in bytes. Note - If the Support block is omitted, this '
'is not included.') # type: int
SUPPORT_BLOCK_BYTE_OFFSET = IntegerDescriptor(
'SUPPORT_BLOCK_BYTE_OFFSET', _required, strict=True,
docstring='Offset to the first byte of the Support block in bytes. Note - If the Support '
'block is omitted, this is not included.') # type: int
PVP_BLOCK_SIZE = IntegerDescriptor(
'PVP_BLOCK_SIZE', _required, strict=True,
docstring='Size of the PVP block in bytes.') # type: int
PVP_BLOCK_BYTE_OFFSET = IntegerDescriptor(
'PVP_BLOCK_BYTE_OFFSET', _required, strict=True,
docstring='Offset to the first byte of the PVP block in bytes.') # type: int
SIGNAL_BLOCK_SIZE = IntegerDescriptor(
'SIGNAL_BLOCK_SIZE', _required, strict=True,
docstring='Size of the Signal block in bytes.') # type: int
SIGNAL_BLOCK_BYTE_OFFSET = IntegerDescriptor(
'SIGNAL_BLOCK_BYTE_OFFSET', _required, strict=True,
docstring='Offset to the first byte of the Signal block in bytes.') # type: int
CLASSIFICATION = StringDescriptor(
'CLASSIFICATION', _required, strict=True, default_value='UNCLASSIFIED',
docstring='Product classification information that is human-readable.') # type: str
RELEASE_INFO = StringDescriptor(
'RELEASE_INFO', _required, strict=True, default_value='UNRESTRICTED',
docstring='Product release information that is human-readable.') # type: str
def __init__(self, XML_BLOCK_SIZE=None, XML_BLOCK_BYTE_OFFSET=None,
SUPPORT_BLOCK_SIZE=None, SUPPORT_BLOCK_BYTE_OFFSET=None,
PVP_BLOCK_SIZE=None, PVP_BLOCK_BYTE_OFFSET=None,
SIGNAL_BLOCK_SIZE=None, SIGNAL_BLOCK_BYTE_OFFSET=None,
CLASSIFICATION='UNCLASSIFIED', RELEASE_INFO='UNRESTRICTED',
use_version=None):
self.XML_BLOCK_SIZE = XML_BLOCK_SIZE
self.XML_BLOCK_BYTE_OFFSET = XML_BLOCK_BYTE_OFFSET
self.SUPPORT_BLOCK_SIZE = SUPPORT_BLOCK_SIZE
self.SUPPORT_BLOCK_BYTE_OFFSET = SUPPORT_BLOCK_BYTE_OFFSET
self.PVP_BLOCK_SIZE = PVP_BLOCK_SIZE
self.PVP_BLOCK_BYTE_OFFSET = PVP_BLOCK_BYTE_OFFSET
self.SIGNAL_BLOCK_SIZE = SIGNAL_BLOCK_SIZE
self.SIGNAL_BLOCK_BYTE_OFFSET = SIGNAL_BLOCK_BYTE_OFFSET
self.CLASSIFICATION = CLASSIFICATION
self.RELEASE_INFO = RELEASE_INFO
self._use_version = _CPHD_DEFAULT_VERSION if use_version is None else use_version
super(CPHDHeader, self).__init__()
@property
def use_version(self) -> str:
return self._use_version
def to_string(self):
"""
Forms a CPHD file header string (not including the section terminator) from populated attributes.
"""
return ('CPHD/{}\n'.format(self.use_version)
+ ''.join(["{} := {}\n".format(f, getattr(self, f))
for f in self._fields if getattr(self, f) is not None]))
class CPHDType(Serializable):
"""
The Compensated Phase History Data definition.
"""
_fields = (
'CollectionID', 'Global', 'SceneCoordinates', 'Data', 'Channel', 'PVP',
'SupportArray', 'Dwell', 'ReferenceGeometry', 'Antenna', 'TxRcv',
'ErrorParameters', 'ProductInfo', 'GeoInfo', 'MatchInfo')
_required = (
'CollectionID', 'Global', 'SceneCoordinates', 'Data', 'Channel', 'PVP',
'Dwell', 'ReferenceGeometry')
_collections_tags = {'GeoInfo': {'array': 'False', 'child_tag': 'GeoInfo'}}
# descriptors
CollectionID = SerializableDescriptor(
'CollectionID', CollectionIDType, _required, strict=DEFAULT_STRICT,
docstring='General information about the collection.') # type: CollectionIDType
Global = SerializableDescriptor(
'Global', GlobalType, _required, strict=DEFAULT_STRICT,
docstring='Global parameters that apply to metadata components and CPHD '
'signal arrays.') # type: GlobalType
SceneCoordinates = SerializableDescriptor(
'SceneCoordinates', SceneCoordinatesType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that define geographic coordinates for in the imaged '
'scene.') # type: SceneCoordinatesType
Data = SerializableDescriptor(
'Data', DataType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe binary data components contained in '
'the product.') # type: DataType
Channel = SerializableDescriptor(
'Channel', ChannelType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe the data channels contained in the '
'product.') # type: ChannelType
PVP = SerializableDescriptor(
'PVP', PVPType, _required, strict=DEFAULT_STRICT,
docstring='Structure specifying the Per Vector parameters provided for '
'each channel of a given product.') # type: PVPType
SupportArray = SerializableDescriptor(
'SupportArray', SupportArrayType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe the binary support array(s) content and '
'grid coordinates.') # type: Union[None, SupportArrayType]
Dwell = SerializableDescriptor(
'Dwell', DwellType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that specify the dwell time supported by the signal '
'arrays contained in the CPHD product.') # type: DwellType
ReferenceGeometry = SerializableDescriptor(
'ReferenceGeometry', ReferenceGeometryType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe the collection geometry for the reference '
'vector of the reference channel.') # type: ReferenceGeometryType
Antenna = SerializableDescriptor(
'Antenna', AntennaType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe the transmit and receive antennas used '
'to collect the signal array(s).') # type: Union[None, AntennaType]
TxRcv = SerializableDescriptor(
'TxRcv', TxRcvType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe the transmitted waveform(s) and receiver configurations '
'used in the collection.') # type: Union[None, TxRcvType]
ErrorParameters = SerializableDescriptor(
'ErrorParameters', ErrorParametersType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe the statistics of errors in measured or estimated parameters '
'that describe the collection.') # type: Union[None, ErrorParametersType]
ProductInfo = SerializableDescriptor(
'ProductInfo', ProductInfoType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that provide general information about the CPHD product '
'and/or the derived products that may be created '
'from it.') # type: Union[None, ProductInfoType]
MatchInfo = SerializableDescriptor(
'MatchInfo', MatchInfoType, _required, strict=DEFAULT_STRICT,
docstring='Information about other collections that are matched to the collection from which '
'this CPHD product was generated.') # type: Union[None, MatchInfoType]
def __init__(self, CollectionID=None, Global=None, SceneCoordinates=None, Data=None,
Channel=None, PVP=None, SupportArray=None, Dwell=None, ReferenceGeometry=None,
Antenna=None, TxRcv=None, ErrorParameters=None, ProductInfo=None,
GeoInfo=None, MatchInfo=None, **kwargs):
"""
Parameters
----------
CollectionID : CollectionIDType
Global : GlobalType
SceneCoordinates : SceneCoordinatesType
Data : DataType
Channel : ChannelType
PVP : PVPType
SupportArray : None|SupportArrayType
Dwell : DwellType
ReferenceGeometry : ReferenceGeometryType
Antenna : None|AntennaType
TxRcv : None|TxRcvType
ErrorParameters : None|ErrorParametersType
ProductInfo : None|ProductInfoType
GeoInfo : None|List[GeoInfoType]|GeoInfoType
MatchInfo : None|MatchInfoType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.CollectionID = CollectionID
self.Global = Global
self.SceneCoordinates = SceneCoordinates
self.Data = Data
self.Channel = Channel
self.PVP = PVP
self.SupportArray = SupportArray
self.Dwell = Dwell
self.ReferenceGeometry = ReferenceGeometry
self.Antenna = Antenna
self.TxRcv = TxRcv
self.ErrorParameters = ErrorParameters
self.ProductInfo = ProductInfo
self.MatchInfo = MatchInfo
self._GeoInfo = []
if GeoInfo is None:
pass
elif isinstance(GeoInfo, GeoInfoType):
self.addGeoInfo(GeoInfo)
elif isinstance(GeoInfo, (list, tuple)):
for el in GeoInfo:
self.addGeoInfo(el)
else:
raise ValueError('GeoInfo got unexpected type {}'.format(type(GeoInfo)))
super(CPHDType, self).__init__(**kwargs)
@property
def GeoInfo(self):
"""
List[GeoInfoType]: Parameters that describe a geographic feature.
"""
return self._GeoInfo
def getGeoInfo(self, key):
"""
Get GeoInfo(s) with name attribute == `key`.
Parameters
----------
key : str
Returns
-------
List[GeoInfoType]
"""
return [entry for entry in self._GeoInfo if entry.name == key]
def addGeoInfo(self, value):
"""
Add the given GeoInfo to the GeoInfo list.
Parameters
----------
value : GeoInfoType
Returns
-------
None
"""
if isinstance(value, ElementTree.Element):
gi_key = self._child_xml_ns_key.get('GeoInfo', self._xml_ns_key)
value = GeoInfoType.from_node(value, self._xml_ns, ns_key=gi_key)
elif isinstance(value, dict):
value = GeoInfoType.from_dict(value)
if isinstance(value, GeoInfoType):
self._GeoInfo.append(value)
else:
raise TypeError('Trying to set GeoInfo element with unexpected type {}'.format(type(value)))
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
if kwargs is None:
kwargs = OrderedDict()
gi_key = cls._child_xml_ns_key.get('GeoInfo', ns_key)
kwargs['GeoInfo'] = find_children(node, 'GeoInfo', xml_ns, gi_key)
return super(CPHDType, cls).from_node(node, xml_ns, ns_key=ns_key, kwargs=kwargs)
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
node = super(CPHDType, self).to_node(
doc, tag, ns_key=ns_key, parent=parent, check_validity=check_validity,
strict=strict, exclude=exclude+('GeoInfo', ))
# slap on the GeoInfo children
if self._GeoInfo is not None and len(self._GeoInfo) > 0:
for entry in self._GeoInfo:
entry.to_node(doc, 'GeoInfo', ns_key=ns_key, parent=node, strict=strict)
return node
def to_dict(self, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
out = super(CPHDType, self).to_dict(
check_validity=check_validity, strict=strict, exclude=exclude+('GeoInfo', ))
# slap on the GeoInfo children
if len(self.GeoInfo) > 0:
out['GeoInfo'] = [entry.to_dict(
check_validity=check_validity, strict=strict) for entry in self._GeoInfo]
return out
def to_xml_bytes(self, urn=None, tag='CPHD', check_validity=False, strict=DEFAULT_STRICT):
if urn is None:
urn = get_namespace(_CPHD_DEFAULT_VERSION)
return super(CPHDType, self).to_xml_bytes(urn=urn, tag=tag, check_validity=check_validity, strict=strict)
def to_xml_string(self, urn=None, tag='CPHD', check_validity=False, strict=DEFAULT_STRICT):
return self.to_xml_bytes(urn=urn, tag=tag, check_validity=check_validity, strict=strict).decode('utf-8')
def version_required(self):
"""
What CPHD version is required for valid support?
Returns
-------
tuple
"""
required = (1, 0, 1)
for fld in self._fields:
val = getattr(self, fld)
if val is not None and hasattr(val, 'version_required'):
required = max(required, val.version_required())
return required
def make_file_header(self, xml_offset=1024, use_version=None):
"""
Forms a CPHD file header consistent with the information in the Data and CollectionID nodes.
Parameters
----------
xml_offset : int, optional
Offset in bytes to the first byte of the XML block. If the provided value is not large enough to account for
the length of the file header string, a larger value is chosen.
use_version : None|str
What version to use?
Returns
-------
header : CPHDHeader
"""
kwargs = OrderedDict()
kwargs['use_version'] = _CPHD_DEFAULT_VERSION if use_version is None else use_version
def _align(val):
align_to = 64
return int(numpy.ceil(float(val)/align_to)*align_to)
kwargs['XML_BLOCK_SIZE'] = len(self.to_xml_string())
kwargs['XML_BLOCK_BYTE_OFFSET'] = xml_offset
block_end = kwargs['XML_BLOCK_BYTE_OFFSET'] + kwargs['XML_BLOCK_SIZE'] + len(CPHD_SECTION_TERMINATOR)
if self.Data.NumSupportArrays > 0:
kwargs['SUPPORT_BLOCK_SIZE'] = self.Data.calculate_support_block_size()
kwargs['SUPPORT_BLOCK_BYTE_OFFSET'] = _align(block_end)
block_end = kwargs['SUPPORT_BLOCK_BYTE_OFFSET'] + kwargs['SUPPORT_BLOCK_SIZE']
kwargs['PVP_BLOCK_SIZE'] = self.Data.calculate_pvp_block_size()
kwargs['PVP_BLOCK_BYTE_OFFSET'] = _align(block_end)
block_end = kwargs['PVP_BLOCK_BYTE_OFFSET'] + kwargs['PVP_BLOCK_SIZE']
kwargs['SIGNAL_BLOCK_SIZE'] = self.Data.calculate_signal_block_size()
kwargs['SIGNAL_BLOCK_BYTE_OFFSET'] = _align(block_end)
kwargs['CLASSIFICATION'] = self.CollectionID.Classification
kwargs['RELEASE_INFO'] = self.CollectionID.ReleaseInfo
header = CPHDHeader(**kwargs)
header_str = header.to_string()
min_xml_offset = len(header_str) + len(CPHD_SECTION_TERMINATOR)
if kwargs['XML_BLOCK_BYTE_OFFSET'] < min_xml_offset:
header = self.make_file_header(xml_offset=_align(min_xml_offset + 32), use_version=use_version)
return header
def get_pvp_dtype(self):
"""
Gets the dtype for the corresponding PVP structured array. Note that they
must all have homogeneous dtype.
Returns
-------
numpy.dtype
This will be a compound dtype for a structured array.
"""
if self.PVP is None:
raise ValueError('No PVP defined.')
return self.PVP.get_vector_dtype()
@classmethod
def from_xml_file(cls, file_path):
"""
Construct the cphd object from a stand-alone xml file path.
Parameters
----------
file_path : str
Returns
-------
CPHDType
"""
root_node, xml_ns = parse_xml_from_file(file_path)
ns_key = 'default' if 'default' in xml_ns else None
return cls.from_node(root_node, xml_ns=xml_ns, ns_key=ns_key)
@classmethod
def from_xml_string(cls, xml_string):
"""
Construct the cphd object from an xml string.
Parameters
----------
xml_string : str|bytes
Returns
-------
CPHDType
"""
root_node, xml_ns = parse_xml_from_string(xml_string)
ns_key = 'default' if 'default' in xml_ns else None
return cls.from_node(root_node, xml_ns=xml_ns, ns_key=ns_key)
| 20,628 | 38.978682 | 120 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/__init__.py | """
**This sub-package is a work in progress to encapsulate pythonic object-oriented CPHD structure 1.0.1
"""
__classification__ = "UNCLASSIFIED"
| 147 | 23.666667 | 101 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/ReferenceGeometry.py | """
The reference geometry parameters definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union
import numpy
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import FloatDescriptor, StringEnumDescriptor, \
SerializableDescriptor
from sarpy.io.complex.sicd_elements.blocks import XYZType
from .base import DEFAULT_STRICT, FLOAT_FORMAT
class SRPType(Serializable):
"""
The SRP position for the reference vector of the reference channel.
"""
_fields = ('ECF', 'IAC')
_required = _fields
# descriptors
ECF = SerializableDescriptor(
'ECF', XYZType, _required, strict=DEFAULT_STRICT,
docstring='SRP position in ECF coordinates.') # type: XYZType
IAC = SerializableDescriptor(
'IAC', XYZType, _required, strict=DEFAULT_STRICT,
docstring='SRP position in Image Area Coordinates.') # type: XYZType
def __init__(self, ECF=None, IAC=None, **kwargs):
"""
Parameters
----------
ECF : XYZType|numpy.ndarray|list|tuple
IAC : XYZType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.ECF = ECF
self.IAC = IAC
super(SRPType, self).__init__(**kwargs)
class ReferenceGeometryCore(Serializable):
"""
The base reference geometry implementation.
"""
_fields = (
'SideOfTrack', 'SlantRange', 'GroundRange', 'DopplerConeAngle',
'GrazeAngle', 'IncidenceAngle', 'AzimuthAngle')
_required = _fields
_numeric_format = {
'SlantRange': FLOAT_FORMAT, 'GroundRange': FLOAT_FORMAT, 'DopplerConeAngle': FLOAT_FORMAT,
'GrazeAngle': FLOAT_FORMAT, 'IncidenceAngle': FLOAT_FORMAT, 'AzimuthAngle': FLOAT_FORMAT}
# descriptors
SideOfTrack = StringEnumDescriptor(
'SideOfTrack', ('L', 'R'), _required, strict=DEFAULT_STRICT,
docstring='Side of Track parameter for the collection.') # type: str
SlantRange = FloatDescriptor(
'SlantRange', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Slant range from the ARP to the SRP.') # type: float
GroundRange = FloatDescriptor(
'GroundRange', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Ground range from the ARP to the SRP.') # type: float
DopplerConeAngle = FloatDescriptor(
'DopplerConeAngle', _required, strict=DEFAULT_STRICT, bounds=(0, 180),
docstring='Doppler Cone Angle between ARP velocity and deg SRP Line of '
'Sight (LOS).') # type: float
GrazeAngle = FloatDescriptor(
'GrazeAngle', _required, strict=DEFAULT_STRICT, bounds=(0, 90),
docstring='Grazing angle for the ARP to SRP LOS and the deg Earth Tangent '
'Plane (ETP) at the SRP.') # type: float
IncidenceAngle = FloatDescriptor(
'IncidenceAngle', _required, strict=DEFAULT_STRICT, bounds=(0, 90),
docstring='Incidence angle for the ARP to SRP LOS and the Earth Tangent '
'Plane (ETP) at the SRP.') # type: float
AzimuthAngle = FloatDescriptor(
'AzimuthAngle', _required, strict=DEFAULT_STRICT, bounds=(0, 360),
docstring='Angle from north to the line from the SRP to the ARP ETP '
'Nadir (i.e. North to +GPX). Measured clockwise from North '
'toward East.') # type: float
def __init__(self, SideOfTrack=None, SlantRange=None, GroundRange=None,
DopplerConeAngle=None, GrazeAngle=None, IncidenceAngle=None,
AzimuthAngle=None, **kwargs):
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.SideOfTrack = SideOfTrack
self.SlantRange = SlantRange
self.GroundRange = GroundRange
self.DopplerConeAngle = DopplerConeAngle
self.GrazeAngle = GrazeAngle
self.IncidenceAngle = IncidenceAngle
self.AzimuthAngle = AzimuthAngle
super(ReferenceGeometryCore, self).__init__(**kwargs)
@property
def look(self):
"""
int: An integer version of `SideOfTrack`:
* None if `SideOfTrack` is not defined
* -1 if SideOfTrack == 'R'
* 1 if SideOftrack == 'L'
"""
if self.SideOfTrack is None:
return None
return -1 if self.SideOfTrack == 'R' else 1
class MonostaticType(ReferenceGeometryCore):
"""
Parameters for monostatic collection.
"""
_fields = (
'ARPPos', 'ARPVel',
'SideOfTrack', 'SlantRange', 'GroundRange', 'DopplerConeAngle',
'GrazeAngle', 'IncidenceAngle', 'AzimuthAngle',
'TwistAngle', 'SlopeAngle', 'LayoverAngle')
_required = _fields
_numeric_format = {
'SlantRange': FLOAT_FORMAT, 'GroundRange': FLOAT_FORMAT, 'DopplerConeAngle': FLOAT_FORMAT,
'GrazeAngle': FLOAT_FORMAT, 'IncidenceAngle': FLOAT_FORMAT, 'AzimuthAngle': FLOAT_FORMAT,
'TwistAngle': FLOAT_FORMAT, 'SlopeAngle': FLOAT_FORMAT, 'LayoverAngle': FLOAT_FORMAT}
# descriptors
ARPPos = SerializableDescriptor(
'ARPPos', XYZType, _required, strict=DEFAULT_STRICT,
docstring='ARP position in ECF coordinates.') # type: XYZType
ARPVel = SerializableDescriptor(
'ARPVel', XYZType, _required, strict=DEFAULT_STRICT,
docstring='ARP velocity in ECF coordinates.') # type: XYZType
TwistAngle = FloatDescriptor(
'TwistAngle', _required, strict=DEFAULT_STRICT, bounds=(-90, 90),
docstring='Twist angle between cross range in the ETP and cross range in '
'the slant plane at the SRP.') # type: float
SlopeAngle = FloatDescriptor(
'SlopeAngle', _required, strict=DEFAULT_STRICT, bounds=(0, 90),
docstring='Angle between the ETP normal (uUP) and the slant plane normal '
'(uSPN) at the SRP.') # type: float
LayoverAngle = FloatDescriptor(
'LayoverAngle', _required, strict=DEFAULT_STRICT, bounds=(0, 360),
docstring='Angle from north to the layover direction in the ETP. Measured '
'clockwise from +North toward +East.') # type: float
def __init__(self, ARPPos=None, ARPVel=None,
SideOfTrack=None, SlantRange=None, GroundRange=None, DopplerConeAngle=None,
GrazeAngle=None, IncidenceAngle=None, AzimuthAngle=None,
TwistAngle=None, SlopeAngle=None, LayoverAngle=None, **kwargs):
"""
Parameters
----------
ARPPos : XYZType|numpy.ndarray|list|tuple
ARPVel : XYZType|numpy.ndarray|list|tuple
SideOfTrack : float
SlantRange : float
GroundRange : float
DopplerConeAngle : float
GrazeAngle : float
IncidenceAngle : float
AzimuthAngle : float
TwistAngle : float
SlopeAngle : float
LayoverAngle : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.ARPPos = ARPPos
self.ARPVel = ARPVel
self.TwistAngle = TwistAngle
self.SlopeAngle = SlopeAngle
self.LayoverAngle = LayoverAngle
super(MonostaticType, self).__init__(
SideOfTrack=SideOfTrack, SlantRange=SlantRange, GroundRange=GroundRange,
DopplerConeAngle=DopplerConeAngle, GrazeAngle=GrazeAngle, IncidenceAngle=IncidenceAngle,
AzimuthAngle=AzimuthAngle, **kwargs)
@property
def MultipathGround(self):
"""
float: The anticipated angle of multipath features on the ground in degrees.
"""
if self.TwistAngle is None:
return None
else:
return numpy.rad2deg(
-numpy.arctan(numpy.tan(numpy.deg2rad(self.TwistAngle)) *
numpy.sin(numpy.deg2rad(self.GrazeAngle))))
@property
def Multipath(self):
"""
float: The anticipated angle of multipath features in degrees.
"""
if self.MultipathGround is None:
return None
else:
return numpy.mod(self.AzimuthAngle - 180 + self.MultipathGround, 360)
@property
def Shadow(self):
"""
float: The anticipated angle of shadow features in degrees.
"""
return numpy.mod(self.AzimuthAngle - 180, 360)
class BistaticTxRcvType(ReferenceGeometryCore):
"""
Parameters that describe the Transmit/Receive platforms.
"""
_fields = (
'Time', 'Pos', 'Vel',
'SideOfTrack', 'SlantRange', 'GroundRange', 'DopplerConeAngle',
'GrazeAngle', 'IncidenceAngle', 'AzimuthAngle')
_required = _fields
_numeric_format = {
'Time': FLOAT_FORMAT, 'SlantRange': FLOAT_FORMAT, 'GroundRange': FLOAT_FORMAT, 'DopplerConeAngle': FLOAT_FORMAT,
'GrazeAngle': FLOAT_FORMAT, 'IncidenceAngle': FLOAT_FORMAT, 'AzimuthAngle': FLOAT_FORMAT}
# descriptors
Time = FloatDescriptor(
'Time', _required, strict=DEFAULT_STRICT,
docstring='The transmit or receive time for the vector.') # type: float
Pos = SerializableDescriptor(
'Pos', XYZType, _required, strict=DEFAULT_STRICT,
docstring='APC position in ECF coordinates.') # type: XYZType
Vel = SerializableDescriptor(
'Vel', XYZType, _required, strict=DEFAULT_STRICT,
docstring='APC velocity in ECF coordinates.') # type: XYZType
def __init__(self, Time=None, Pos=None, Vel=None,
SideOfTrack=None, SlantRange=None, GroundRange=None, DopplerConeAngle=None,
GrazeAngle=None, IncidenceAngle=None, AzimuthAngle=None, **kwargs):
"""
Parameters
----------
Time : float
Pos : XYZType|numpy.ndarray|list|tuple
Vel : XYZType|numpy.ndarray|list|tuple
SideOfTrack : float
SlantRange : float
GroundRange : float
DopplerConeAngle : float
GrazeAngle : float
IncidenceAngle : float
AzimuthAngle : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Time = Time
self.Pos = Pos
self.Vel = Vel
super(BistaticTxRcvType, self).__init__(
SideOfTrack=SideOfTrack, SlantRange=SlantRange, GroundRange=GroundRange,
DopplerConeAngle=DopplerConeAngle, GrazeAngle=GrazeAngle, IncidenceAngle=IncidenceAngle,
AzimuthAngle=AzimuthAngle, **kwargs)
class BistaticType(Serializable):
"""
"""
_fields = (
'AzimuthAngle', 'AzimuthAngleRate', 'BistaticAngle', 'BistaticAngleRate',
'GrazeAngle', 'TwistAngle', 'SlopeAngle', 'LayoverAngle',
'TxPlatform', 'RcvPlatform')
_required = _fields
_numeric_format = {
'AzimuthAngle': FLOAT_FORMAT, 'AzimuthAngleRate': FLOAT_FORMAT, 'BistaticAngle': FLOAT_FORMAT,
'BistaticAngleRate': FLOAT_FORMAT, 'GrazeAngle': FLOAT_FORMAT, 'TwistAngle': FLOAT_FORMAT,
'SlopeAngle': FLOAT_FORMAT, 'LayoverAngle': FLOAT_FORMAT}
# descriptors
AzimuthAngle = FloatDescriptor(
'AzimuthAngle', _required, strict=DEFAULT_STRICT, bounds=(0, 360),
docstring='Angle from north to the projection of the Bistatic pointing vector '
'(bP) into the ETP. Measured clockwise from +North toward '
'+East.') # type: float
AzimuthAngleRate = FloatDescriptor(
'AzimuthAngleRate', _required, strict=DEFAULT_STRICT,
docstring='Instantaneous rate of change of the Azimuth Angle '
':math:`d(AZIM)/dt`.') # type: float
BistaticAngle = FloatDescriptor(
'BistaticAngle', _required, strict=DEFAULT_STRICT, bounds=(0, 180),
docstring='Bistatic angle (Beta) between unit vector from SRP to transmit APC '
'(uXmt) and the unit vector from the SRP to the receive '
'APC (uRcv).') # type: float
BistaticAngleRate = FloatDescriptor(
'BistaticAngleRate', _required, strict=DEFAULT_STRICT,
docstring='Instantaneous rate of change of the bistatic angle '
':math:`d(Beta)/dt)`.') # type: float
GrazeAngle = FloatDescriptor(
'GrazeAngle', _required, strict=DEFAULT_STRICT, bounds=(0, 90),
docstring='Angle between the bistatic pointing vector and the ETP at the '
'SRP.') # type: float
TwistAngle = FloatDescriptor(
'TwistAngle', _required, strict=DEFAULT_STRICT, bounds=(-90, 90),
docstring='Angle between cross range in the ETP at the SRP and cross range '
'in the instantaneous plane of maximum bistatic resolution. '
'Note - For monostatic imaging, the plane of maximum resolution is '
'the instantaneous slant plane.') # type: float
SlopeAngle = FloatDescriptor(
'SlopeAngle', _required, strict=DEFAULT_STRICT, bounds=(0, 90),
docstring='Angle between the ETP normal and the normal to the instantaneous '
'plane of maximum bistatic resolution.') # type: float
LayoverAngle = FloatDescriptor(
'LayoverAngle', _required, strict=DEFAULT_STRICT, bounds=(0, 360),
docstring='Angle from north to the bistatic layover direction in the ETP. '
'Measured clockwise from +North toward +East.') # type: float
TxPlatform = SerializableDescriptor(
'TxPlatform', BistaticTxRcvType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe the Transmit platform.') # type: BistaticTxRcvType
RcvPlatform = SerializableDescriptor(
'RcvPlatform', BistaticTxRcvType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe the Receive platform.') # type: BistaticTxRcvType
def __init__(self, AzimuthAngle=None, AzimuthAngleRate=None, BistaticAngle=None,
BistaticAngleRate=None, GrazeAngle=None, TwistAngle=None,
SlopeAngle=None, LayoverAngle=None, TxPlatform=None,
RcvPlatform=None, **kwargs):
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.AzimuthAngle = AzimuthAngle
self.AzimuthAngleRate = AzimuthAngleRate
self.BistaticAngle = BistaticAngle
self.BistaticAngleRate = BistaticAngleRate
self.GrazeAngle = GrazeAngle
self.TwistAngle = TwistAngle
self.SlopeAngle = SlopeAngle
self.LayoverAngle = LayoverAngle
self.TxPlatform = TxPlatform
self.RcvPlatform = RcvPlatform
super(BistaticType, self).__init__(**kwargs)
class ReferenceGeometryType(Serializable):
"""
Parameters that describe the collection geometry for the reference vector
of the reference channel.
"""
_fields = ('SRP', 'ReferenceTime', 'SRPCODTime', 'SRPDwellTime', 'Monostatic', 'Bistatic')
_required = ('SRP', 'ReferenceTime', 'SRPCODTime', 'SRPDwellTime')
_choice = ({'required': True, 'collection': ('Monostatic', 'Bistatic')}, )
_numeric_format = {'ReferenceTime': FLOAT_FORMAT, 'SRPCODTime': FLOAT_FORMAT, 'SRPDwellTime': FLOAT_FORMAT}
# descriptors
SRP = SerializableDescriptor(
'SRP', SRPType, _required, strict=DEFAULT_STRICT,
docstring='The SRP position for the reference vector of the reference '
'channel.') # type: SRPType
ReferenceTime = FloatDescriptor(
'ReferenceTime', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Reference time for the selected reference vector, in '
'seconds.') # type: float
SRPCODTime = FloatDescriptor(
'SRPCODTime', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='The COD Time for point on the reference surface, in '
'seconds.') # type: float
SRPDwellTime = FloatDescriptor(
'SRPDwellTime', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='') # type: float
Monostatic = SerializableDescriptor(
'Monostatic', MonostaticType, _required, strict=DEFAULT_STRICT,
docstring='Parameters for monstatic collection.') # type: Union[None, MonostaticType]
Bistatic = SerializableDescriptor(
'Bistatic', BistaticType, _required, strict=DEFAULT_STRICT,
docstring='Parameters for bistatic collection.') # type: Union[None, BistaticType]
def __init__(self, SRP=None, ReferenceTime=None, SRPCODTime=None, SRPDwellTime=None,
Monostatic=None, Bistatic=None, **kwargs):
"""
Parameters
----------
SRP : SRPType
ReferenceTime : float
SRPCODTime : float
SRPDwellTime : float
Monostatic : None|MonostaticType
Bistatic : None|BistaticType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.SRP = SRP
self.ReferenceTime = ReferenceTime
self.SRPCODTime = SRPCODTime
self.SRPDwellTime = SRPDwellTime
self.Monostatic = Monostatic
self.Bistatic = Bistatic
super(ReferenceGeometryType, self).__init__(**kwargs)
| 17,797 | 40.779343 | 120 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/Global.py | """
The Global type definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union
import numpy
from sarpy.io.xml.base import Serializable, Arrayable
from sarpy.io.xml.descriptors import FloatDescriptor, DateTimeDescriptor, \
StringEnumDescriptor, IntegerEnumDescriptor, SerializableDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT
class TimelineType(Serializable):
"""
Parameters that describe the collection times for the data contained in the product.
"""
_fields = ('CollectionStart', 'RcvCollectionStart', 'TxTime1', 'TxTime2')
_required = ('CollectionStart', 'TxTime1', 'TxTime2')
_numeric_format = {'TxTime1': FLOAT_FORMAT, 'TxTime2': FLOAT_FORMAT}
# descriptors
CollectionStart = DateTimeDescriptor(
'CollectionStart', _required, strict=DEFAULT_STRICT, numpy_datetime_units='us',
docstring='Collection Start date and time (UTC). Time reference used for times '
'measured from collection start (i.e. slow time t = 0). For bistatic '
'collections, the time is the transmit platform collection '
'start time. The default display precision is microseconds, but this '
'does not that accuracy in value.') # type: numpy.datetime64
RcvCollectionStart = DateTimeDescriptor(
'RcvCollectionStart', _required, strict=DEFAULT_STRICT, numpy_datetime_units='us',
docstring='Receive only platform collection date and start time.') # type: numpy.datetime64
TxTime1 = FloatDescriptor(
'TxTime1', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Earliest TxTime value for any signal vector in the product. '
'Time relative to Collection Start in seconds.') # type: float
TxTime2 = FloatDescriptor(
'TxTime2', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Latest TxTime value for any signal vector in the product. '
'Time relative to Collection Start in seconds.') # type: float
def __init__(self, CollectionStart=None, RcvCollectionStart=None, TxTime1=None, TxTime2=None, **kwargs):
"""
Parameters
----------
CollectionStart : numpy.datetime64|datetime|date|str
RcvCollectionStart : None|numpy.datetime64|datetime|date|str
TxTime1 : float
TxTime2 : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.CollectionStart = CollectionStart
self.RcvCollectionStart = RcvCollectionStart
self.TxTime1 = TxTime1
self.TxTime2 = TxTime2
super(TimelineType, self).__init__(**kwargs)
class FxBandType(Serializable, Arrayable):
"""
Parameters that describe the FX frequency limits for the signal array(s)
contained in the product.
"""
_fields = ('FxMin', 'FxMax')
_required = _fields
_numeric_format = {fld: FLOAT_FORMAT for fld in _fields}
# descriptors
FxMin = FloatDescriptor(
'FxMin', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Minimum fx value for any signal vector in the product in '
'Hz.') # type: float
FxMax = FloatDescriptor(
'FxMax', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Maximum fx value for any signal vector in the product in '
'Hz.') # type: float
def __init__(self, FxMin=None, FxMax=None, **kwargs):
"""
Parameters
----------
FxMin : float
FxMax : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.FxMin = FxMin
self.FxMax = FxMax
super(FxBandType, self).__init__(**kwargs)
def get_array(self, dtype=numpy.float64):
return numpy.array([self.FxMin, self.FxMax], dtype=dtype)
@classmethod
def from_array(cls, array):
# type: (Union[numpy.ndarray, list, tuple]) -> FxBandType
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError('Expected array to be of length 2, and received {}'.format(array))
return cls(FxMin=array[0], FxMax=array[1])
raise ValueError('Expected array to be numpy.ndarray, list, or tuple, got {}'.format(type(array)))
class TOASwathType(Serializable, Arrayable):
"""
Parameters that describe the time-of-arrival (TOA) swath limits for the signal
array(s) contained in the product.
"""
_fields = ('TOAMin', 'TOAMax')
_required = _fields
_numeric_format = {fld: FLOAT_FORMAT for fld in _fields}
# descriptors
TOAMin = FloatDescriptor(
'TOAMin', _required, strict=DEFAULT_STRICT,
docstring=r'Minimum :math:`\Delta TOA` value for any signal vector in '
'the product, in seconds.') # type: float
TOAMax = FloatDescriptor(
'TOAMax', _required, strict=DEFAULT_STRICT,
docstring=r'Maximum :math:`\Delta TOA` value for any signal vector in '
'the product, in seconds.') # type: float
def __init__(self, TOAMin=None, TOAMax=None, **kwargs):
"""
Parameters
----------
TOAMin : float
TOAMax : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TOAMin = TOAMin
self.TOAMax = TOAMax
super(TOASwathType, self).__init__(**kwargs)
def get_array(self, dtype=numpy.float64):
return numpy.array([self.TOAMin, self.TOAMax], dtype=dtype)
@classmethod
def from_array(cls, array):
# type: (Union[numpy.ndarray, list, tuple]) -> TOASwathType
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError('Expected array to be of length 2, and received {}'.format(array))
return cls(TOAMin=array[0], TOAMax=array[1])
raise ValueError('Expected array to be numpy.ndarray, list, or tuple, got {}'.format(type(array)))
class TropoParametersType(Serializable):
"""
Parameters used to compute the propagation delay due to the troposphere.
"""
_fields = ('N0', 'RefHeight')
_required = _fields
_numeric_format = {'N0': FLOAT_FORMAT}
# descriptors
N0 = FloatDescriptor(
'N0', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Refractivity value of the troposphere for the imaged scene used '
'to form the product (dimensionless). Value at the IARP '
'location.') # type: float
RefHeight = StringEnumDescriptor(
'RefHeight', ('IARP', 'ZERO'), _required, strict=DEFAULT_STRICT,
docstring='Reference Height for the `N0` value.') # type: str
def __init__(self, N0=None, RefHeight=None, **kwargs):
"""
Parameters
----------
N0 : float
RefHeight : str
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.N0 = N0
self.RefHeight = RefHeight
super(TropoParametersType, self).__init__(**kwargs)
class IonoParametersType(Serializable):
"""
Parameters used to compute propagation effects due to the ionosphere.
"""
_fields = ('TECV', 'F2Height')
_required = ('TECV', )
_numeric_format = {fld: FLOAT_FORMAT for fld in _fields}
# descriptor
TECV = FloatDescriptor(
'TECV', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Total Electron Content (TEC) integrated along TECU the Vertical (V), '
r'in units where :math:`1 TECU = 10^{16} e^{-}/m^{2}`') # type: float
F2Height = FloatDescriptor(
'F2Height', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='The F2 height of the ionosphere, in '
'meters.') # type: Union[None, float]
def __init__(self, TECV=None, F2Height=None, **kwargs):
"""
Parameters
----------
TECV : float
F2Height : None|float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TECV = TECV
self.F2Height = F2Height
super(IonoParametersType, self).__init__(**kwargs)
class GlobalType(Serializable):
"""
The Global type definition.
"""
_fields = (
'DomainType', 'SGN', 'Timeline', 'FxBand', 'TOASwath', 'TropoParameters', 'IonoParameters')
_required = ('DomainType', 'SGN', 'Timeline', 'FxBand', 'TOASwath')
# descriptors
DomainType = StringEnumDescriptor(
'DomainType', ('FX', 'TOA'), _required, strict=DEFAULT_STRICT,
docstring='Indicates the domain represented by the sample dimension of the '
'CPHD signal array(s), where "FX" denotes Transmit Frequency, and '
'"TOA" denotes Difference in Time of Arrival') # type: str
SGN = IntegerEnumDescriptor(
'SGN', (-1, 1), _required, strict=DEFAULT_STRICT,
docstring='Phase SGN applied to compute target signal phase as a function of '
r'target :math:`\Delta TOA^{TGT}`. Target phase in cycles. '
r'For simple phase model :math:`Phase(fx) = SGN \times fx \times \Delta TOA^{TGT}` '
r'In TOA domain, phase of the mainlobe peak '
r':math:`Phase(\Delta TOA^{TGT}) = SGN \times fx_C \times \Delta TOA^{TGT}`'
'.') # type: int
Timeline = SerializableDescriptor(
'Timeline', TimelineType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe the collection times for the data contained '
'in the product') # type: TimelineType
FxBand = SerializableDescriptor(
'FxBand', FxBandType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe the FX frequency limits for the signal array(s) '
'contained in the product.') # type: FxBandType
TOASwath = SerializableDescriptor(
'TOASwath', TOASwathType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe the time-of-arrival (TOA) swath limits for the '
'signal array(s) contained in the product.') # type: TOASwathType
TropoParameters = SerializableDescriptor(
'TropoParameters', TropoParametersType, _required, strict=DEFAULT_STRICT,
docstring='Parameters used to compute the propagation delay due to the '
'troposphere.') # type: Union[None, TropoParametersType]
IonoParameters = SerializableDescriptor(
'IonoParameters', IonoParametersType, _required, strict=DEFAULT_STRICT,
docstring='Parameters used to compute propagation effects due to the '
'ionosphere.') # type: Union[None, IonoParametersType]
def __init__(self, DomainType=None, SGN=None, Timeline=None, FxBand=None, TOASwath=None,
TropoParameters=None, IonoParameters=None, **kwargs):
"""
Parameters
----------
DomainType : str
SGN : int
Timeline : TimelineType
FxBand : FxBandType|numpy.ndarray|list|tuple
TOASwath : TOASwathType|numpy.ndarray|list|tuple
TropoParameters : None|TropoParametersType
IonoParameters : None|IonoParametersType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.DomainType = DomainType
self.SGN = SGN
self.Timeline = Timeline
self.FxBand = FxBand
self.TOASwath = TOASwath
self.TropoParameters = TropoParameters
self.IonoParameters = IonoParameters
super(GlobalType, self).__init__(**kwargs)
| 12,529 | 38.402516 | 108 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd1_elements/blocks.py | """
Basic building blocks for CPHD standard - mostly overlap with SICD elements
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, List
import numpy
from sarpy.io.xml.base import Serializable, Arrayable, SerializableArray
from sarpy.io.xml.descriptors import SerializableDescriptor, SerializableArrayDescriptor, \
IntegerDescriptor, FloatDescriptor
from .base import DEFAULT_STRICT
_len2_array_text = 'Expected array to be of length 2,\n\t' \
'and received `{}`'
_array_type_text = 'Expected array to be numpy.ndarray, list, or tuple,\n\tgot `{}`'
###################
# module variables
POLARIZATION_TYPE = ('H', 'V', 'X', 'Y', 'S', 'E', 'RHC', 'LHC', 'UNSPECIFIED')
class LSType(Serializable, Arrayable):
"""
Represents line and sample.
"""
_fields = ('Line', 'Sample')
_required = _fields
_numeric_format = {'Line': '0.17G', 'Sample': '0.17G'}
# Descriptor
Line = FloatDescriptor(
'Line', _required, strict=DEFAULT_STRICT,
docstring='The Line.') # type: float
Sample = FloatDescriptor(
'Sample', _required, strict=DEFAULT_STRICT,
docstring='The Sample.') # type: float
def __init__(self, Line=None, Sample=None, **kwargs):
"""
Parameters
----------
Line : float
Sample : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Line = Line
self.Sample = Sample
super(LSType, self).__init__(**kwargs)
def get_array(self, dtype=numpy.float64):
"""
Gets an array representation of the class instance.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return
Returns
-------
numpy.ndarray
array of the form [Line, Sample]
"""
return numpy.array([self.Line, self.Sample], dtype=dtype)
@classmethod
def from_array(cls, array):
"""
Construct from a iterable.
Parameters
----------
array : numpy.ndarray|list|tuple
Returns
-------
LSType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError(_len2_array_text.format(array))
return cls(Line=array[0], Sample=array[1])
raise ValueError(_array_type_text.format(type(array)))
class LSVertexType(LSType):
"""
An array element of LSType.
"""
_fields = ('Line', 'Sample', 'index')
_required = _fields
# descriptors
index = IntegerDescriptor(
'index', _required, strict=DEFAULT_STRICT, bounds=(1, None),
docstring='The array index.') # type: int
def __init__(self, Line=None, Sample=None, index=None, **kwargs):
"""
Parameters
----------
Line : float
Sample : float
index : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.index = index
super(LSVertexType, self).__init__(Line=Line, Sample=Sample, **kwargs)
@classmethod
def from_array(cls, array, index=1):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed [Line, Sample]
index : int
array index
Returns
-------
XYVertexType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError(_len2_array_text.format(array))
return cls(Line=array[0], Sample=array[1], index=index)
raise ValueError(_array_type_text.format(type(array)))
class XYType(Serializable, Arrayable):
"""
A point in two-dimensional spatial coordinates.
"""
_fields = ('X', 'Y')
_required = _fields
_numeric_format = {'X': '0.17G', 'Y': '0.17G'}
# descriptors
X = FloatDescriptor(
'X', _required, strict=True,
docstring='The X attribute. Assumed to ECF or other, similar '
'coordinates.') # type: float
Y = FloatDescriptor(
'Y', _required, strict=True,
docstring='The Y attribute. Assumed to ECF or other, similar '
'coordinates.') # type: float
def __init__(self, X=None, Y=None, **kwargs):
"""
Parameters
----------
X : float
Y : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.X, self.Y = X, Y
super(XYType, self).__init__(**kwargs)
@classmethod
def from_array(cls, array):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed [X, Y]
Returns
-------
XYType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError(_len2_array_text.format(array))
return cls(X=array[0], Y=array[1])
raise ValueError(_array_type_text.format(type(array)))
def get_array(self, dtype=numpy.float64):
"""
Gets an array representation of the class instance.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return
Returns
-------
numpy.ndarray
array of the form [X,Y]
"""
return numpy.array([self.X, self.Y], dtype=dtype)
class XYVertexType(XYType):
"""
An array element of XYType.
"""
_fields = ('X', 'Y', 'index')
_required = _fields
_set_as_attribute = ('index', )
# descriptors
index = IntegerDescriptor(
'index', _required, strict=DEFAULT_STRICT, bounds=(1, None),
docstring='The array index.') # type: int
def __init__(self, X=None, Y=None, index=None, **kwargs):
"""
Parameters
----------
X : float
Y : float
index : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.index = index
super(XYVertexType, self).__init__(X=X, Y=Y, **kwargs)
@classmethod
def from_array(cls, array, index=1):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed [X, Y]
index : int
array index
Returns
-------
XYVertexType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError(_len2_array_text.format(array))
return cls(X=array[0], Y=array[1], index=index)
raise ValueError(_array_type_text.format(type(array)))
class AreaType(Serializable):
"""
An area object.
"""
_fields = ('X1Y1', 'X2Y2', 'Polygon')
_required = _fields
_collections_tags = {'Polygon': {'array': True, 'child_tag': 'Vertex'}}
# descriptors
X1Y1 = SerializableDescriptor(
'X1Y1', XYType, _required, strict=DEFAULT_STRICT,
docstring='*"Minimum"* corner of the rectangle in Image '
'coordinates.') # type: XYType
X2Y2 = SerializableDescriptor(
'X2Y2', XYType, _required, strict=DEFAULT_STRICT,
docstring='*"Maximum"* corner of the rectangle in Image '
'coordinates.') # type: XYType
Polygon = SerializableArrayDescriptor(
'Polygon', XYVertexType, _collections_tags, _required, strict=DEFAULT_STRICT, minimum_length=3,
docstring='Polygon further reducing the bounding box, in Image '
'coordinates.') # type: Union[SerializableArray, List[XYVertexType]]
def __init__(self, X1Y1=None, X2Y2=None, Polygon=None, **kwargs):
"""
Parameters
----------
X1Y1 : XYType|numpy.ndarray|list|tuple
X2Y2 : XYType|numpy.ndarray|list|tuple
Polygon : SerializableArray|List[XYVertexType]|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.X1Y1 = X1Y1
self.X2Y2 = X2Y2
self.Polygon = Polygon
super(AreaType, self).__init__(**kwargs)
| 9,153 | 26.739394 | 103 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd0_3_elements/Antenna.py | """
The Antenna definition for CPHD 0.3.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, List
from sarpy.io.phase_history.cphd1_elements.base import DEFAULT_STRICT, FLOAT_FORMAT
from sarpy.io.complex.sicd_elements.blocks import XYZPolyType
from sarpy.io.complex.sicd_elements.Antenna import AntParamType as AntParamTypeBase
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import SerializableDescriptor, SerializableListDescriptor, \
FloatDescriptor
class HPBWType(Serializable):
"""
Half power beamwidth parameters.
"""
_fields = ('DCX', 'DCY')
_required = _fields
_numeric_format = {'DCX': FLOAT_FORMAT, 'DCY': FLOAT_FORMAT}
# descriptors
DCX = FloatDescriptor(
'DCX', _required, strict=DEFAULT_STRICT,
docstring='Half power beamwidth in the X-axis direction cosine '
'(DCX).') # type: float
DCY = FloatDescriptor(
'DCY', _required, strict=DEFAULT_STRICT,
docstring='Half power beamwidth in the Y -axis direction cosine '
'(DCY).') # type: float
def __init__(self, DCX=None, DCY=None, **kwargs):
"""
Parameters
----------
DCX : float
DCY : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.DCX = DCX
self.DCY = DCY
super(HPBWType, self).__init__(**kwargs)
class AntParamType(AntParamTypeBase):
"""
The antenna parameters container.
"""
_fields = (
'XAxisPoly', 'YAxisPoly', 'FreqZero', 'EB', 'HPBW', 'Array', 'Elem',
'GainBSPoly', 'EBFreqShift', 'MLFreqDilation')
_required = ('XAxisPoly', 'YAxisPoly', 'FreqZero', )
_numeric_format = {'FreqZero': FLOAT_FORMAT}
# descriptors
HPBW = SerializableDescriptor(
'HPBW', HPBWType, _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, HPBWType]
def __init__(self, XAxisPoly=None, YAxisPoly=None, FreqZero=None, EB=None,
HPBW=None, Array=None, Elem=None, GainBSPoly=None, EBFreqShift=None,
MLFreqDilation=None, **kwargs):
"""
Parameters
----------
XAxisPoly : XYZPolyType
YAxisPoly : XYZPolyType
FreqZero : float
EB : None|EBType
HPBW : None|HPBWType
Array : None|GainPhasePolyType
Elem : None|GainPhasePolyType
GainBSPoly : None|Poly1DType|numpy.ndarray|list|tuple
EBFreqShift : None|bool
MLFreqDilation : None|bool
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.HPBW = HPBW
super(AntParamType, self).__init__(
XAxisPoly=XAxisPoly, YAxisPoly=YAxisPoly, FreqZero=FreqZero, EB=EB,
Array=Array, Elem=Elem, GainBSPoly=GainBSPoly, EBFreqShift=EBFreqShift,
MLFreqDilation=MLFreqDilation, **kwargs)
class AntennaType(Serializable):
"""
Antenna parameters that describe antenna orientation, mainlobe steering and
gain patterns vs. time.
"""
_fields = ('NumTxAnt', 'NumRcvAnt', 'NumTWAnt', 'Tx', 'Rcv', 'TwoWay')
_required = ()
_collections_tags = {
'Tx': {'array': False, 'child_tag': 'Tx'},
'Rcv': {'array': False, 'child_tag': 'Rcv'},
'TwoWay': {'array': False, 'child_tag': 'TwoWay'}}
# descriptors
Tx = SerializableListDescriptor(
'Tx', AntParamType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Transmit antenna pattern parameters.'
) # type: Union[None, List[AntParamType]]
Rcv = SerializableListDescriptor(
'Rcv', AntParamType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Receive antenna pattern parameters.'
) # type: Union[None, List[AntParamType]]
TwoWay = SerializableListDescriptor(
'TwoWay', AntParamType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Two-way antenna pattern parameters.'
) # type: Union[None, List[AntParamType]]
def __init__(self, Tx=None, Rcv=None, TwoWay=None, **kwargs):
"""
Parameters
----------
Tx : None|List[AntParamType]
Rcv : None|List[AntParamType]
TwoWay : None|List[AntParamType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Tx = Tx
self.Rcv = Rcv
self.TwoWay = TwoWay
super(AntennaType, self).__init__(**kwargs)
@property
def NumTxAnt(self):
"""
int: The number of transmit elements.
"""
if self.Tx is None:
return 0
return len(self.Tx)
@property
def NumRcvAnt(self):
"""
int: The number of receive elements.
"""
if self.Rcv is None:
return 0
return len(self.Rcv)
@property
def NumTWAnt(self):
"""
int: The number of two way elements.
"""
if self.TwoWay is None:
return 0
return len(self.TwoWay)
| 5,442 | 29.926136 | 90 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd0_3_elements/Data.py | """
The DataType definition for CPHD 0.3.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import List
from sarpy.io.phase_history.cphd1_elements.base import DEFAULT_STRICT
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import StringEnumDescriptor, IntegerDescriptor, SerializableListDescriptor
class ArraySizeType(Serializable):
"""
Parameters that define the array sizes.
"""
_fields = ('NumVectors', 'NumSamples')
_required = ('NumVectors', 'NumSamples')
# descriptors
NumVectors = IntegerDescriptor(
'NumVectors', _required, strict=DEFAULT_STRICT, bounds=(1, None),
docstring='Number of slow time vectors in the PHD array in this channel.') # type: int
NumSamples = IntegerDescriptor(
'NumSamples', _required, strict=DEFAULT_STRICT, bounds=(1, None),
docstring='Number of samples per vector in the PHD array in this channel.') # type: int
def __init__(self, NumVectors=None, NumSamples=None, **kwargs):
"""
Parameters
----------
NumVectors : int
NumSamples : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.NumVectors = NumVectors
self.NumSamples = NumSamples
super(ArraySizeType, self).__init__(**kwargs)
class DataType(Serializable):
"""
Parameters that describe binary data components contained in the product.
"""
_fields = ('SampleType', 'NumCPHDChannels', 'NumBytesVBP', 'ArraySize')
_required = ('SampleType', 'NumBytesVBP', 'ArraySize')
_collections_tags = {'ArraySize': {'array': False, 'child_tag': 'ArraySize'}}
# descriptors
SampleType = StringEnumDescriptor(
'SampleType', ("RE32F_IM32F", "RE16I_IM16I", "RE08I_IM08I"), _required, strict=True,
docstring="Indicates the PHD sample format of the PHD array(s). All arrays "
"have the sample type. Real and imaginary components stored in adjacent "
"bytes, real component stored first.") # type: str
NumBytesVBP = IntegerDescriptor(
'NumBytesVBP', _required, strict=DEFAULT_STRICT, bounds=(1, None),
docstring='Number of bytes per set of Vector Based Parameters.') # type: int
ArraySize = SerializableListDescriptor(
'ArraySize', ArraySizeType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='CPHD array size parameters.') # type: List[ArraySizeType]
def __init__(self, SampleType=None, NumBytesVBP=None, ArraySize=None, **kwargs):
"""
Parameters
----------
SampleType : str
NumBytesVBP : int
ArraySize : List[ArraySizeType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.SampleType = SampleType
self.NumBytesVBP = NumBytesVBP
self.ArraySize = ArraySize
super(DataType, self).__init__(**kwargs)
@property
def NumCPHDChannels(self):
"""
int: The number of CPHD channels.
"""
if self.ArraySize is None:
return 0
return len(self.ArraySize)
| 3,405 | 33.40404 | 104 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd0_3_elements/Channel.py | """
The Channel definition for CPHD 0.3.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, List
from sarpy.io.phase_history.cphd1_elements.base import DEFAULT_STRICT, FLOAT_FORMAT
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import IntegerDescriptor, FloatDescriptor, \
SerializableListDescriptor
class ParametersType(Serializable):
"""
Channel dependent parameters.
"""
_fields = (
'SRP_Index', 'NomTOARateSF', 'FxCtrNom', 'BWSavedNom', 'TOASavedNom',
'TxAnt_Index', 'RcvAnt_Index', 'TWAnt_Index')
_required = (
'SRP_Index', 'NomTOARateSF', 'FxCtrNom', 'BWSavedNom', 'TOASavedNom')
_numeric_format = {
'NomTOARateSF': FLOAT_FORMAT, 'FxCtrNom': FLOAT_FORMAT, 'BWSavedNom': FLOAT_FORMAT,
'TOASavedNom': FLOAT_FORMAT}
# descriptors
SRP_Index = IntegerDescriptor(
'SRP_Index', _required, strict=DEFAULT_STRICT,
docstring='Index to identify the SRP position function used for the '
'channel.') # type: int
NomTOARateSF = FloatDescriptor(
'NomTOARateSF', _required, strict=DEFAULT_STRICT,
docstring='Scale factor to indicate the fraction of the Doppler spectrum '
'that is clear.') # type: float
FxCtrNom = FloatDescriptor(
'FxCtrNom', _required, strict=DEFAULT_STRICT,
docstring='Nominal center transmit frequency associated with the channel (Hz). '
'For DomainType = TOA, FxCtrNom is the center frequency for all '
'vectors.') # type: float
BWSavedNom = FloatDescriptor(
'BWSavedNom', _required, strict=DEFAULT_STRICT,
docstring='Nominal transmit bandwidth associated with the channel (Hz). '
'For DomainType = TOA, BWSavedNom is the bandwidth saved for all '
'vectors.') # type: float
TOASavedNom = FloatDescriptor(
'TOASavedNom', _required, strict=DEFAULT_STRICT,
docstring='Nominal span in TOA saved for the channel. For DomainType = FX, '
'TOASavedNom is the bandwidth saved for all '
'vectors.') # type: float
TxAnt_Index = IntegerDescriptor(
'TxAnt_Index', _required, strict=DEFAULT_STRICT,
docstring='Indicates the Transmit Antenna pattern for data collected to form '
'the CPHD channel.') # type: Union[None, int]
RcvAnt_Index = IntegerDescriptor(
'RcvAnt_Index', _required, strict=DEFAULT_STRICT,
docstring='Indicates the Receive Antenna pattern for data collected to form '
'the CPHD channel.') # type: Union[None, int]
TWAnt_Index = IntegerDescriptor(
'TWAnt_Index', _required, strict=DEFAULT_STRICT,
docstring='Indicates the T wo-way Antenna pattern for data collected to form '
'the CPHD channel.') # type: Union[None, int]
def __init__(self, SRP_Index=None, NomTOARateSF=None, FxCtrNom=None, BWSavedNom=None,
TOASavedNom=None, TxAnt_Index=None, RcvAnt_Index=None, TWAnt_Index=None,
**kwargs):
"""
Parameters
----------
SRP_Index : int
NomTOARateSF : float
FxCtrNom : float
BWSavedNom : float
TOASavedNom : float
TxAnt_Index : None|int
RcvAnt_Index : None|int
TWAnt_Index : None|int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.SRP_Index = SRP_Index
self.NomTOARateSF = NomTOARateSF
self.FxCtrNom = FxCtrNom
self.BWSavedNom = BWSavedNom
self.TOASavedNom = TOASavedNom
self.TxAnt_Index = TxAnt_Index
self.RcvAnt_Index = RcvAnt_Index
self.TWAnt_Index = TWAnt_Index
super(ParametersType, self).__init__(**kwargs)
class ChannelType(Serializable):
"""
Channel specific parameters for CPHD.
"""
_fields = ('Parameters', )
_required = ('Parameters', )
_collections_tags = {'Parameters': {'array': False, 'child_tag': 'Parameters'}}
# descriptors
Parameters = SerializableListDescriptor(
'Parameters', ParametersType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Channel dependent parameter list.') # type: List[ParametersType]
def __init__(self, Parameters=None, **kwargs):
"""
Parameters
----------
Parameters : List[ParametersType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Parameters = Parameters
super(ChannelType, self).__init__(**kwargs)
| 4,906 | 37.03876 | 91 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd0_3_elements/VectorParameters.py | """
The SRP definition for CPHD 0.3.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union
import numpy
from sarpy.io.phase_history.cphd1_elements.base import DEFAULT_STRICT
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import SerializableDescriptor, IntegerEnumDescriptor
class FxParametersType(Serializable):
"""
The FX vector parameters.
"""
_fields = ('Fx0', 'Fx_SS', 'Fx1', 'Fx2')
_required = _fields
# descriptors
Fx0 = IntegerEnumDescriptor(
'Fx0', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the Fx0 field') # type: int
Fx_SS = IntegerEnumDescriptor(
'Fx_SS', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the Fx_SS field') # type: int
Fx1 = IntegerEnumDescriptor(
'Fx1', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the Fx1 field') # type: int
Fx2 = IntegerEnumDescriptor(
'Fx2', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the Fx2 field') # type: int
def __init__(self, Fx0=8, Fx_SS=8, Fx1=8, Fx2=8, **kwargs):
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Fx0 = Fx0
self.Fx1 = Fx1
self.Fx2 = Fx2
self.Fx_SS = Fx_SS
super(FxParametersType, self).__init__(**kwargs)
@staticmethod
def get_size():
"""
The size in bytes of this component of the vector.
Returns
-------
int
"""
return 32
def get_position_offset_and_size(self, field):
"""
Get the offset and size of the given field from the beginning of the vector.
Parameters
----------
field : str
Returns
-------
None|int
"""
if field not in self._fields:
return None
out = 0
for fld in self._fields:
val = getattr(self, fld)
if fld == field:
return out, val
else:
out += val
return None
def get_dtype_components(self):
"""
Gets the dtype components.
Returns
-------
List[Tuple]
"""
return [(entry, '>f8') for entry in self._fields]
class TOAParametersType(Serializable):
"""
The TOA vector parameters.
"""
_fields = ('DeltaTOA0', 'TOA_SS')
_required = _fields
# descriptors
DeltaTOA0 = IntegerEnumDescriptor(
'DeltaTOA0', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the DeltaTOA0 field') # type: int
TOA_SS = IntegerEnumDescriptor(
'TOA_SS', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the TOA_SS field') # type: int
def __init__(self, DeltaTOA0=8, TOA_SS=8, **kwargs):
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.DeltaTOA0 = DeltaTOA0
self.TOA_SS = TOA_SS
super(TOAParametersType, self).__init__(**kwargs)
@staticmethod
def get_size():
"""
The size in bytes of this component of the vector.
Returns
-------
int
"""
return 16
def get_position_offset_and_size(self, field):
"""
Get the offset and size of the given field from the beginning of the vector.
Parameters
----------
field : str
Returns
-------
None|(int, int)
"""
if field not in self._fields:
return None
out = 0
for fld in self._fields:
val = getattr(self, fld)
if fld == field:
return out, val
else:
out += val
return None
def get_dtype_components(self):
"""
Gets the dtype components.
Returns
-------
List[Tuple]
"""
return [(entry, '>f8') for entry in self._fields]
class VectorParametersType(Serializable):
"""
The vector parameters sizes object.
"""
_fields = (
'TxTime', 'TxPos', 'RcvTime', 'RcvPos', 'SRPTime', 'SRPPos', 'AmpSF', 'TropoSRP',
'FxParameters', 'TOAParameters')
_required = (
'TxTime', 'TxPos', 'RcvTime', 'RcvPos', 'SRPPos')
_choice = ({'required': False, 'collection': ('FxParameters', 'TOAParameters')}, )
# descriptors
TxTime = IntegerEnumDescriptor(
'TxTime', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the TxTime field') # type: int
TxPos = IntegerEnumDescriptor(
'TxPos', (24, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the TxPos field') # type: int
RcvTime = IntegerEnumDescriptor(
'RcvTime', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the RcvTime field') # type: int
RcvPos = IntegerEnumDescriptor(
'RcvPos', (24, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the RcvPos field') # type: int
SRPTime = IntegerEnumDescriptor(
'SRPTime', (8, ), _required, strict=DEFAULT_STRICT, default_value=None,
docstring='The size of the SRPTime field') # type: int
SRPPos = IntegerEnumDescriptor(
'SRPPos', (24, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the SRPPos field') # type: int
AmpSF = IntegerEnumDescriptor(
'AmpSF', (8, ), _required, strict=DEFAULT_STRICT, default_value=None,
docstring='The size of the AmpSF field') # type: int
TropoSRP = IntegerEnumDescriptor(
'TropoSRP', (8, ), _required, strict=DEFAULT_STRICT, default_value=None,
docstring='The size of the TropoSRP field') # type: int
FxParameters = SerializableDescriptor(
'FxParameters', FxParametersType, _required, strict=DEFAULT_STRICT,
docstring='The frequency parameters, only present when DomainType is '
'FX.') # type: Union[None, FxParametersType]
TOAParameters = SerializableDescriptor(
'TOAParameters', FxParametersType, _required, strict=DEFAULT_STRICT,
docstring='The TOA parameters, only present when DomainType is '
'TOA.') # type: Union[None, TOAParametersType]
def __init__(self, TxTime=8, TxPos=24, RcvTime=8, RcvPos=24, SRPTime=None, SRPPos=24,
AmpSF=None, TropoSRP=None, FxParameters=None, TOAParameters=None, **kwargs):
"""
Parameters
----------
TxTime : int
TxPos : int
RcvTime : int
RcvPos : int
SRPTime : None|int
SRPPos : int
AmpSF : None|int
TropoSRP : None|int
FxParameters : None|FxParametersType
TOAParameters : None|TOAParametersType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TxTime = TxTime
self.TxPos = TxPos
self.RcvTime = RcvTime
self.RcvPos = RcvPos
self.SRPTime = SRPTime
self.SRPPos = SRPPos
self.AmpSF = AmpSF
self.TropoSRP = TropoSRP
self.FxParameters = FxParameters
self.TOAParameters = TOAParameters
super(VectorParametersType, self).__init__(**kwargs)
def get_size(self):
"""
The size in bytes of this component of the vector.
Returns
-------
int
"""
out = 0
for fld in self._fields:
val = getattr(self, fld)
if val is None:
pass
elif isinstance(val, int):
out += val
elif isinstance(val, (FxParametersType, TOAParametersType)):
out += val.get_size()
else:
raise TypeError('Got unhandled type {}'.format(type(val)))
return out
def get_position_offset_and_size(self, field):
"""
Get the offset and size of the given field from the beginning of the vector.
Parameters
----------
field : str
Returns
-------
None|(int, int)
"""
out = 0
for fld in self._fields:
val = getattr(self, fld)
if fld == field:
if val is not None:
return out, val
else:
return None
if val is None:
pass
elif isinstance(val, int):
out += val
elif isinstance(val, (FxParametersType, TOAParametersType)):
res = val.get_position_offset_and_size(field)
if res is not None:
return out+res[0], res[1]
else:
out += val.get_size()
else:
raise TypeError('Got unhandled type {}'.format(type(val)))
return None
def get_vector_dtype(self):
"""
Gets the dtype for the corresponding structured array for the full PVP array.
Returns
-------
numpy.dtype
This will be a compound dtype for a structured array.
"""
the_type_info = []
for fld in self._fields:
val = getattr(self, fld)
if val is None:
continue
if fld in ['FxParameters', 'TOAParameters']:
the_type_info.extend(val.get_dtype_components())
else:
assert isinstance(val, int), 'CPHD 0.3 PVP field {} ' \
'should be an integer, got {}'.format(fld, val)
if val == 8:
the_type_info.append((fld, '>f8'))
elif val == 24:
the_type_info.append((fld, '>f8', (3, )))
else:
raise ValueError('Got unhandled value {} for CPHD 0.3 PVP field {}'.format(val, fld))
return numpy.dtype(the_type_info)
| 10,466 | 30.244776 | 105 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd0_3_elements/CPHD.py | """
The Compensated Phase History Data 0.3 definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union
from sarpy.io.phase_history.cphd1_elements.base import DEFAULT_STRICT
from sarpy.io.complex.sicd_elements.CollectionInfo import CollectionInfoType
from sarpy.io.complex.sicd_elements.RadarCollection import RadarCollectionType
from sarpy.io.phase_history.cphd1_elements.CPHD import CPHDHeaderBase
from sarpy.io.phase_history.cphd0_3_elements.Data import DataType
from sarpy.io.phase_history.cphd0_3_elements.Global import GlobalType
from sarpy.io.phase_history.cphd0_3_elements.Channel import ChannelType
from sarpy.io.phase_history.cphd0_3_elements.SRP import SRPTyp
from sarpy.io.phase_history.cphd0_3_elements.Antenna import AntennaType
from sarpy.io.phase_history.cphd0_3_elements.VectorParameters import VectorParametersType
from sarpy.io.xml.base import Serializable, parse_xml_from_string, parse_xml_from_file
from sarpy.io.xml.descriptors import SerializableDescriptor, IntegerDescriptor, StringDescriptor
#########
# Module variables
_CPHD_SPECIFICATION_VERSION = '0.3'
_CPHD_SPECIFICATION_DATE = '2011-04-15T00:00:00Z'
_CPHD_SPECIFICATION_NAMESPACE = 'urn:CPHD:0.3'
#########
# CPHD header object
class CPHDHeader(CPHDHeaderBase):
_fields = (
'XML_DATA_SIZE', 'XML_BYTE_OFFSET', 'VB_DATA_SIZE', 'VB_BYTE_OFFSET',
'CPHD_DATA_SIZE', 'CPHD_BYTE_OFFSET', 'CLASSIFICATION', 'RELEASE_INFO')
_required = (
'XML_DATA_SIZE', 'XML_BYTE_OFFSET', 'VB_DATA_SIZE', 'VB_BYTE_OFFSET',
'CPHD_DATA_SIZE', 'CPHD_BYTE_OFFSET')
# descriptor
XML_DATA_SIZE = IntegerDescriptor(
'XML_DATA_SIZE', _required, strict=True,
docstring='Size of the XML Metadata in bytes. Does not include the 2 bytes '
'of the section terminator.') # type: int
XML_BYTE_OFFSET = IntegerDescriptor(
'XML_BYTE_OFFSET', _required, strict=True,
docstring='Offset to the first byte of the XML Metadata in bytes.') # type: int
VB_DATA_SIZE = IntegerDescriptor(
'VB_DATA_SIZE', _required, strict=True,
docstring='Size of the Vector Based Metadata in bytes.') # type: int
VB_BYTE_OFFSET = IntegerDescriptor(
'VB_BYTE_OFFSET', _required, strict=True,
docstring='Offset to the first byte of the Vector Based Metadata in bytes.') # type: int
CPHD_DATA_SIZE = IntegerDescriptor(
'CPHD_DATA_SIZE', _required, strict=True,
docstring='Size of the Compensated PHD arrays in bytes.') # type: int
CPHD_BYTE_OFFSET = IntegerDescriptor(
'CPHD_BYTE_OFFSET', _required, strict=True,
docstring='Offset to the first byte of the CPHD data in bytes.') # type: int
CLASSIFICATION = StringDescriptor(
'CLASSIFICATION', _required, strict=True, default_value='UNCLASSIFIED',
docstring='Product classification information that is the human-readable banner.') # type: str
RELEASE_INFO = StringDescriptor(
'RELEASE_INFO', _required, strict=True, default_value='UNRESTRICTED',
docstring='Product release information.') # type: str
def __init__(self, XML_DATA_SIZE=None, XML_BYTE_OFFSET=None,
VB_DATA_SIZE=None, VB_BYTE_OFFSET=None,
CPHD_DATA_SIZE=None, CPHD_BYTE_OFFSET=None,
CLASSIFICATION='UNCLASSIFIED', RELEASE_INFO='UNRESTRICTED'):
self.XML_DATA_SIZE = XML_DATA_SIZE
self.XML_BYTE_OFFSET = XML_BYTE_OFFSET
self.VB_DATA_SIZE = VB_DATA_SIZE
self.VB_BYTE_OFFSET = VB_BYTE_OFFSET
self.CPHD_DATA_SIZE = CPHD_DATA_SIZE
self.CPHD_BYTE_OFFSET = CPHD_BYTE_OFFSET
self.CLASSIFICATION = CLASSIFICATION
self.RELEASE_INFO = RELEASE_INFO
super(CPHDHeader, self).__init__()
class CPHDType(Serializable):
"""
"""
_fields = (
'CollectionInfo', 'Data', 'Global', 'Channel', 'SRP', 'RadarCollection', 'Antenna',
'VectorParameters')
_required = (
'CollectionInfo', 'Data', 'Global', 'Channel', 'SRP', 'VectorParameters')
# descriptors
CollectionInfo = SerializableDescriptor(
'CollectionInfo', CollectionInfoType, _required, strict=DEFAULT_STRICT,
docstring='General information about the collection.') # type: CollectionInfoType
Data = SerializableDescriptor(
'Data', DataType, _required, strict=DEFAULT_STRICT,
docstring='Parameters that describe binary data components contained in the '
'product.') # type: DataType
Global = SerializableDescriptor(
'Global', GlobalType, _required, strict=DEFAULT_STRICT,
docstring='Global parameters that apply to metadata components and CPHD '
'signal arrays.') # type: GlobalType
Channel = SerializableDescriptor(
'Channel', ChannelType, _required, strict=DEFAULT_STRICT,
docstring='Channel specific parameters for CPHD channels.') # type: ChannelType
SRP = SerializableDescriptor(
'SRP', SRPTyp, _required, strict=DEFAULT_STRICT,
docstring='The Stabilization Reference Point (SRP) parameters.') # type: SRPTyp
RadarCollection = SerializableDescriptor(
'RadarCollection', RadarCollectionType, _required, strict=DEFAULT_STRICT,
docstring='') # type: Union[None, RadarCollectionType]
Antenna = SerializableDescriptor(
'Antenna', AntennaType, _required, strict=DEFAULT_STRICT,
docstring='Antenna parameters that describe antenna orientation, mainlobe '
'steering and gain patterns vs. '
'time.') # type: Union[None, AntennaType]
VectorParameters = SerializableDescriptor(
'VectorParameters', VectorParametersType, _required, strict=DEFAULT_STRICT,
docstring='Structure specifying the Vector parameters provided for '
'each channel of a given product.') # type: VectorParametersType
def __init__(self, CollectionInfo=None, Data=None, Global=None, Channel=None,
SRP=None, RadarCollection=None, Antenna=None, VectorParameters=None, **kwargs):
"""
Parameters
----------
CollectionInfo : CollectionInfoType
Data : DataType
Global : GlobalType
Channel : ChannelType
SRP : SRPTyp
RadarCollection : None|RadarCollectionType
Antenna : None|AntennaType
VectorParameters : VectorParametersType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.CollectionInfo = CollectionInfo
self.Data = Data
self.Global = Global
self.Channel = Channel
self.SRP = SRP
self.RadarCollection = RadarCollection
self.Antenna = Antenna
self.VectorParameters = VectorParameters
super(CPHDType, self).__init__(**kwargs)
def to_xml_bytes(self, urn=None, tag='CPHD', check_validity=False, strict=DEFAULT_STRICT):
if urn is None:
urn = _CPHD_SPECIFICATION_NAMESPACE
return super(CPHDType, self).to_xml_bytes(urn=urn, tag=tag, check_validity=check_validity, strict=strict)
def to_xml_string(self, urn=None, tag='CPHD', check_validity=False, strict=DEFAULT_STRICT):
return self.to_xml_bytes(urn=urn, tag=tag, check_validity=check_validity, strict=strict).decode('utf-8')
def get_pvp_dtype(self):
"""
Gets the dtype for the corresponding PVP structured array. Note that they
must all have homogeneous dtype.
Returns
-------
numpy.dtype
This will be a compound dtype for a structured array.
"""
if self.VectorParameters is None:
raise ValueError('No VectorParameters defined.')
return self.VectorParameters.get_vector_dtype()
@classmethod
def from_xml_file(cls, file_path):
"""
Construct the cphd object from a stand-alone xml file path.
Parameters
----------
file_path : str
Returns
-------
CPHDType
"""
root_node, xml_ns = parse_xml_from_file(file_path)
ns_key = 'default' if 'default' in xml_ns else None
return cls.from_node(root_node, xml_ns=xml_ns, ns_key=ns_key)
@classmethod
def from_xml_string(cls, xml_string):
"""
Construct the cphd object from an xml string.
Parameters
----------
xml_string : str|bytes
Returns
-------
CPHDType
"""
root_node, xml_ns = parse_xml_from_string(xml_string)
ns_key = 'default' if 'default' in xml_ns else None
return cls.from_node(root_node, xml_ns=xml_ns, ns_key=ns_key)
| 8,844 | 40.525822 | 113 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd0_3_elements/__init__.py | """
**This sub-package is a work in progress to encapsulate pythonic object-oriented CPHD structure 0.3
"""
__classification__ = "UNCLASSIFIED"
| 145 | 23.333333 | 99 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd0_3_elements/SRP.py | """
The SRP definition for CPHD 0.3.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
from typing import Union, List
from sarpy.io.phase_history.cphd1_elements.base import DEFAULT_STRICT
from sarpy.io.complex.sicd_elements.blocks import XYZType, XYZPolyType
from sarpy.io.xml.base import Serializable, SerializableArray, parse_str, parse_int
from sarpy.io.xml.descriptors import SerializableArrayDescriptor
logger = logging.getLogger(__name__)
class PlainArrayType(SerializableArray):
_set_index = False
_set_size = False
class SRPTyp(Serializable):
"""
"""
_fields = ('SRPType', 'NumSRPs', 'FIXEDPT', 'PVTPOLY', 'PVVPOLY')
_required = ('SRPType', 'NumSRPs')
_collections_tags = {
'FIXEDPT': {'array': True, 'child_tag': 'SRPPT'},
'PVTPOLY': {'array': True, 'child_tag': 'SRPPVTPoly'},
'PVVPOLY': {'array': True, 'child_tag': 'SRPPVVPoly'}}
_choice = ({'required': False, 'collection': ('FIXEDPT', 'PVTPOLY', 'PVVPOLY')}, )
# descriptors
FIXEDPT = SerializableArrayDescriptor(
'FIXEDPT', XYZType, _collections_tags, _required, strict=DEFAULT_STRICT, array_extension=PlainArrayType,
docstring='') # type: Union[None, PlainArrayType, List[XYZType]]
PVTPOLY = SerializableArrayDescriptor(
'PVTPOLY', XYZPolyType, _collections_tags, _required, strict=DEFAULT_STRICT, array_extension=PlainArrayType,
docstring='') # type: Union[None, PlainArrayType, List[XYZPolyType]]
PVVPOLY = SerializableArrayDescriptor(
'PVVPOLY', XYZPolyType, _collections_tags, _required, strict=DEFAULT_STRICT, array_extension=PlainArrayType,
docstring='') # type: Union[None, PlainArrayType, List[XYZPolyType]]
def __init__(self, SRPType=None, NumSRPs=None, FIXEDPT=None, PVTPOLY=None, PVVPOLY=None,
**kwargs):
"""
Parameters
----------
SRPType : str
NumSRPs : int
FIXEDPT : None|PlainArrayType|List[XYZType]
PVTPOLY : None|PlainArrayType|List[XYZPolyType]
PVVPOLY : None|PlainArrayType|List[XYZPolyType]
kwargs
"""
self._SRPType = None
self._NumSRPs = None
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.FIXEDPT = FIXEDPT
self.PVTPOLY = PVTPOLY
self.PVVPOLY = PVVPOLY
self.SRPType = SRPType
self.NumSRPs = NumSRPs
super(SRPTyp, self).__init__(**kwargs)
@property
def SRPType(self):
"""
str: The type of SRP.
"""
if self.FIXEDPT is not None:
return 'FIXEDPT'
elif self.PVTPOLY is not None:
return 'PVTPOLY'
elif self.PVVPOLY is not None:
return 'PVVPOLY'
else:
return self._SRPType
@SRPType.setter
def SRPType(self, value):
if self.FIXEDPT is not None or self.PVTPOLY is not None or self.PVVPOLY is not None:
self._SRPType = None
else:
value = parse_str(value, 'SRPType', self).upper()
if value in ('FIXEDPT', 'PVTPOLY', 'PVVPOLY', 'STEPPED'):
self._SRPType = value
else:
logger.warning(
'Got {} for the SRPType field of class SRPTyp.\n\t'
'It is required to be one of {}.\n\t'
'Setting to None, which is required to be fixed.'.format(
value, ('FIXEDPT', 'PVTPOLY', 'PVVPOLY', 'STEPPED')))
self._SRPType = None
@property
def NumSRPs(self):
"""
None|int: The number of SRPs.
"""
if self.FIXEDPT is not None:
return self.FIXEDPT.size
elif self.PVTPOLY is not None:
return self.PVTPOLY.size
elif self.PVVPOLY is not None:
return self.PVVPOLY.size
else:
return self._NumSRPs
@NumSRPs.setter
def NumSRPs(self, value):
if self.FIXEDPT is not None or self.PVTPOLY is not None or self.PVVPOLY is not None:
self._NumSRPs = None
else:
self._NumSRPs = parse_int(value, 'NumSRPs', self)
| 4,290 | 33.328 | 116 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd0_3_elements/Global.py | """
The Global type definition for CPHD 0.3.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, List
import numpy
from sarpy.io.phase_history.cphd1_elements.base import DEFAULT_STRICT, FLOAT_FORMAT
from sarpy.io.complex.sicd_elements.base import SerializableCPArrayDescriptor, SerializableCPArray
from sarpy.io.complex.sicd_elements.blocks import LatLonHAECornerRestrictionType, Poly2DType
from sarpy.io.complex.sicd_elements.RadarCollection import ReferencePointType, XDirectionType, \
YDirectionType
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import FloatDescriptor, DateTimeDescriptor, \
StringEnumDescriptor, IntegerDescriptor, IntegerEnumDescriptor, \
SerializableDescriptor
class DwellTimeType(Serializable):
"""
The dwell time object.
"""
_fields = ('DwellTimePoly', 'CODTimePoly')
_required = _fields
# descriptors
DwellTimePoly = SerializableDescriptor(
'DwellTimePoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='The dwell time polynomial.') # type: Poly2DType
CODTimePoly = SerializableDescriptor(
'CODTimePoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='The cod time polynomial.') # type: Poly2DType
def __init__(self, DwellTimePoly=None, CODTimePoly=None, **kwargs):
"""
Parameters
----------
DwellTimePoly : Poly2DType|numpy.ndarray|list|tuple
CODTimePoly : Poly2DType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.DwellTimePoly = DwellTimePoly
self.CODTimePoly = CODTimePoly
super(DwellTimeType, self).__init__(**kwargs)
class PlaneType(Serializable):
"""
The reference plane.
"""
_fields = ('RefPt', 'XDir', 'YDir', 'DwellTime')
_required = ('RefPt', 'XDir', 'YDir')
# other class variable
# descriptors
RefPt = SerializableDescriptor(
'RefPt', ReferencePointType, _required, strict=DEFAULT_STRICT,
docstring='The reference point.') # type: ReferencePointType
XDir = SerializableDescriptor(
'XDir', XDirectionType, _required, strict=DEFAULT_STRICT,
docstring='The X direction collection plane parameters.') # type: XDirectionType
YDir = SerializableDescriptor(
'YDir', YDirectionType, _required, strict=DEFAULT_STRICT,
docstring='The Y direction collection plane parameters.') # type: YDirectionType
DwellTime = SerializableDescriptor(
'DwellTime', DwellTimeType, _required, strict=DEFAULT_STRICT,
docstring='The dwell time parameters.') # type: DwellTimeType
def __init__(self, RefPt=None, XDir=None, YDir=None, DwellTime=None, **kwargs):
"""
Parameters
----------
RefPt : ReferencePointType
XDir : XDirectionType
YDir : YDirectionType
DwellTime : DwellTimeType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.RefPt = RefPt
self.XDir = XDir
self.YDir = YDir
self.DwellTime = DwellTime
super(PlaneType, self).__init__(**kwargs)
class ImageAreaType(Serializable):
"""
The collection area.
"""
_fields = ('Corner', 'Plane')
_required = ('Corner', )
_collections_tags = {
'Corner': {'array': True, 'child_tag': 'ACP'}, }
# descriptors
Corner = SerializableCPArrayDescriptor(
'Corner', LatLonHAECornerRestrictionType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='The collection area corner point definition array.'
) # type: Union[SerializableCPArray, List[LatLonHAECornerRestrictionType]]
Plane = SerializableDescriptor(
'Plane', PlaneType, _required, strict=DEFAULT_STRICT,
docstring='A rectangular area in a geo-located display plane.') # type: PlaneType
def __init__(self, Corner=None, Plane=None, **kwargs):
"""
Parameters
----------
Corner : SerializableCPArray|List[LatLonHAECornerRestrictionType]|numpy.ndarray|list|tuple
Plane : PlaneType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Corner = Corner
self.Plane = Plane
super(ImageAreaType, self).__init__(**kwargs)
class GlobalType(Serializable):
"""
The Global type definition.
"""
_fields = (
'DomainType', 'PhaseSGN', 'RefFreqIndex', 'CollectStart',
'CollectDuration', 'TxTime1', 'TxTime2', 'ImageArea')
_required = (
'DomainType', 'PhaseSGN', 'CollectStart', 'CollectDuration',
'TxTime1', 'TxTime2', 'ImageArea')
_numeric_format = {
'CollectDuration': FLOAT_FORMAT, 'TxTime1': FLOAT_FORMAT, 'TxTime2': FLOAT_FORMAT}
# descriptors
DomainType = StringEnumDescriptor(
'DomainType', ('FX', 'TOA'), _required, strict=DEFAULT_STRICT,
docstring='Indicates the domain represented by the sample dimension of the '
'CPHD signal array(s), where "FX" denotes Transmit Frequency, and '
'"TOA" denotes Difference in Time of Arrival') # type: str
PhaseSGN = IntegerEnumDescriptor(
'PhaseSGN', (-1, 1), _required, strict=DEFAULT_STRICT,
docstring='Phase SGN applied to compute target signal phase as a function of '
r'target :math:`\Delta TOA^{TGT}`. Target phase in cycles. '
r'For simple phase model :math:`Phase(fx) = SGN \times fx \times \Delta TOA^{TGT}` '
r'In TOA domain, phase of the mainlobe peak '
r':math:`Phase(\Delta TOA^{TGT}) = SGN \times fx_C \times \Delta TOA^{TGT}`'
'.') # type: int
RefFreqIndex = IntegerDescriptor(
'RefFreqIndex', _required, strict=DEFAULT_STRICT,
docstring='Indicates if the RF frequency values are expressed as offsets from '
'a reference frequency (RefFreq).') # type: Union[None, int]
CollectStart = DateTimeDescriptor(
'CollectStart', _required, strict=DEFAULT_STRICT, numpy_datetime_units='us',
docstring='Collection Start date and time (UTC). Time reference used for times '
'measured from collection start (i.e. slow time t = 0). For bistatic '
'collections, the time is the transmit platform collection '
'start time. The default display precision is microseconds, but this '
'does not that accuracy in value.') # type: numpy.datetime64
CollectDuration = FloatDescriptor(
'CollectDuration', _required, strict=DEFAULT_STRICT,
docstring='The duration of the collection, in seconds.') # type: float
TxTime1 = FloatDescriptor(
'TxTime1', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Earliest TxTime value for any signal vector in the product. '
'Time relative to Collection Start in seconds.') # type: float
TxTime2 = FloatDescriptor(
'TxTime2', _required, strict=DEFAULT_STRICT, bounds=(0, None),
docstring='Latest TxTime value for any signal vector in the product. '
'Time relative to Collection Start in seconds.') # type: float
ImageArea = SerializableDescriptor(
'ImageArea', ImageAreaType, _required, strict=DEFAULT_STRICT,
docstring='Parameters describing the ground area covered by this '
'product.') # type: ImageAreaType
def __init__(self, DomainType=None, PhaseSGN=None, RefFreqIndex=None, CollectStart=None,
CollectDuration=None, TxTime1=None, TxTime2=None, ImageArea=None, **kwargs):
"""
Parameters
----------
DomainType : str
PhaseSGN : int
RefFreqIndex : None|int
CollectStart : numpy.datetime64|datetime.datetime|str
CollectDuration : float
TxTime1 : float
TxTime2 : float
ImageArea : ImageAreaType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.DomainType = DomainType
self.PhaseSGN = PhaseSGN
self.RefFreqIndex = RefFreqIndex
self.CollectStart = CollectStart
self.CollectDuration = CollectDuration
self.TxTime1 = TxTime1
self.TxTime2 = TxTime2
self.ImageArea = ImageArea
super(GlobalType, self).__init__(**kwargs)
| 8,931 | 39.234234 | 102 | py |
sarpy | sarpy-master/sarpy/io/phase_history/cphd_schema/__init__.py | """
This package contains the CPHD schema
"""
__classification__ = 'UNCLASSIFIED'
__author__ = "Thomas McCullough"
import os
import re
from typing import List, Dict, Tuple, Union
_CPHD_DEFAULT_TUPLE = (1, 1, 0)
_the_directory = os.path.split(__file__)[0]
urn_mapping = {
'urn:CPHD:0.3.0': {
'tuple': (0, 3, 0),
'version': '0.3.0',
'release': '0.3.0',
'date': ''},
'urn:CPHD:1.0.1': {
'tuple': (1, 0, 1),
'version': '1.0.1',
'release': '1.0.1',
'date': '2018-05-21T00:00:00Z',
'schema': os.path.join(_the_directory, 'CPHD_schema_V1.0.1_2018_05_21.xsd')},
'urn:CPHD:1.1.0': {
'tuple': (1, 1, 0),
'version': '1.1.0',
'release': '1.1.0',
'date': '2021-11-30T00:00:00Z',
'schema': os.path.join(_the_directory, 'CPHD_schema_V1.1.0_2021_11_30_FINAL.xsd')},
}
WRITABLE_VERSIONS = ('1.0.1', '1.1.0')
# validate the defined paths
for key, entry in urn_mapping.items():
schema_path = entry.get('schema', None)
if schema_path is not None and not os.path.exists(schema_path):
raise ValueError('`{}` has nonexistent schema path {}'.format(key, schema_path))
def get_default_tuple() -> Tuple[int, int, int]:
"""
Get the default CPHD version tuple.
Returns
-------
Tuple[int, int, int]
"""
return _CPHD_DEFAULT_TUPLE
def get_default_version_string() -> str:
"""
Get the default CPHD version string.
Returns
-------
str
"""
return '{}.{}.{}'.format(*_CPHD_DEFAULT_TUPLE)
def get_namespace(version: Union[str, Tuple[int, int, int]]) -> str:
if isinstance(version, (list, tuple)):
version = '{}.{}.{}'.format(version[0], version[1], version[2])
return 'http://api.nsgreg.nga.mil/schema/cphd/{}'.format(version)
def check_urn(urn_string: str) -> str:
"""
Checks that the urn string follows the correct pattern.
Parameters
----------
urn_string : str
Raises
------
ValueError
This raises an exception for a poorly formed or unmapped CPHD urn.
"""
if not isinstance(urn_string, str):
raise TypeError(
'Expected a urn input of string type, got type {}'.format(type(urn_string)))
the_match = re.match(r'^\d.\d.\d$', urn_string)
if the_match is not None:
urn_string = 'urn:CPHD:{}'.format(urn_string)
the_match = re.match(r'^urn:CPHD:\d.\d.\d$', urn_string)
if the_match is None:
raise ValueError(
'Input provided as `{}`,\nbut should be of the form '
'`urn:CPHD:<major>.<minor>.<release>'.format(urn_string))
return urn_string
def get_urn_details(urn_string: str) -> Dict[str, str]:
"""
Gets the associated details for the given CPHD urn, or raise an exception for
poorly formatted or unrecognized urn.
Parameters
----------
urn_string : str
Returns
-------
Dict[str, str]
"""
urn_string = check_urn(urn_string)
out = urn_mapping.get(urn_string, None)
if out is None:
raise KeyError(
'Got correctly formatted, but unmapped CPHD urn {}.'.format(urn_string))
return out
def get_schema_path(the_urn: str) -> str:
"""
Gets the path to the proper schema file for the given urn.
Parameters
----------
the_urn : str
Returns
-------
str
"""
result = get_urn_details(the_urn)
return result.get('schema', None)
def get_versions() -> List[str]:
"""
Gets a list of recognized CPHD urns.
Returns
-------
List[str]
"""
return list(sorted(urn_mapping.keys(), key=lambda x: urn_mapping[x]['tuple']))
| 3,687 | 22.793548 | 91 | py |
sarpy | sarpy-master/sarpy/io/complex/base.py | """
Base common features for complex readers
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, Tuple, Sequence, Callable
import numpy
from sarpy.io.complex.sicd_elements.SICD import SICDType
from sarpy.io.complex.sicd_elements.utils import is_general_match
from sarpy.io.general.base import BaseReader, FlatReader
from sarpy.io.general.data_segment import DataSegment, SubsetSegment
from sarpy.io.general.format_function import FormatFunction
class SICDTypeReader(BaseReader):
"""
A class for ensuring common SICD reading functionality.
**Changed in version 1.3.0** for reading changes.
"""
def __init__(self,
data_segment: Union[None, DataSegment, Sequence[DataSegment]],
sicd_meta: Union[None, SICDType, Sequence[SICDType]],
close_segments: bool = True,
delete_files: Union[None, str, Sequence[str]] = None):
"""
Parameters
----------
data_segment : None|DataSegment|Sequence[DataSegment]
sicd_meta : None|SICDType|Sequence[SICDType]
The SICD metadata object(s).
close_segments : bool
Call segment.close() for each data segment on reader.close()?
delete_files : None|Sequence[str]
Any temp files which should be cleaned up on reader.close()?
This will occur after closing segments.
"""
if sicd_meta is None:
self._sicd_meta = None
elif isinstance(sicd_meta, SICDType):
self._sicd_meta = sicd_meta
else:
temp_list = []
for el in sicd_meta:
if not isinstance(el, SICDType):
raise TypeError(
'Got a collection for sicd_meta, and all elements are required '
'to be instances of SICDType.')
temp_list.append(el)
self._sicd_meta = tuple(temp_list)
BaseReader.__init__(
self, data_segment, reader_type='SICD', close_segments=close_segments, delete_files=delete_files)
def _check_sizes(self) -> None:
data_sizes = self.get_data_size_as_tuple()
sicds = self.get_sicds_as_tuple()
agree = True
msg = ''
for i, (data_size, sicd) in enumerate(zip(data_sizes, sicds)):
expected_size = (sicd.ImageData.NumRows, sicd.ImageData.NumCols)
if data_size != expected_size:
agree = False
msg += 'data segment at index {} has data size {}\n\t' \
'and expected size (from the sicd) {}\n'.format(i, data_size, expected_size)
if not agree:
raise ValueError(msg)
@property
def sicd_meta(self) -> Union[None, SICDType, Tuple[SICDType, ...]]:
"""
None|SICDType|Tuple[SICDType, ...]: the sicd meta_data or meta_data collection.
"""
return self._sicd_meta
def get_sicds_as_tuple(self) -> Union[None, Tuple[SICDType, ...]]:
"""
Get the sicd or sicd collection as a tuple - for simplicity and consistency of use.
Returns
-------
None|Tuple[SICDType, ...]
"""
if self.sicd_meta is None:
return None
elif isinstance(self.sicd_meta, tuple):
return self.sicd_meta
else:
# noinspection PyRedundantParentheses
return (self.sicd_meta, )
def get_sicd_partitions(self, match_function: Callable = is_general_match) -> Tuple[Tuple[int, ...], ...]:
"""
Partition the sicd collection into sub-collections according to `match_function`,
which is assumed to establish an equivalence relation.
Parameters
----------
match_function : callable
This match function must have call signature `(SICDType, SICDType) -> bool`, and
defaults to :func:`sarpy.io.complex.sicd_elements.utils.is_general_match`.
This function is assumed reflexive, symmetric, and transitive.
Returns
-------
Tuple[Tuple[int, ...], ...]
"""
sicds = self.get_sicds_as_tuple()
# set up or state workspace
count = len(sicds)
matched = numpy.zeros((count,), dtype='bool')
matches = []
# assemble or match collections
for i in range(count):
if matched[i]:
# it's already matched somewhere
continue
matched[i] = True # shouldn't access backwards, but just to be thorough
this_match = [i, ]
for j in range(i + 1, count):
if not matched[j] and match_function(sicds[i], sicds[j]):
matched[j] = True
this_match.append(j)
matches.append(tuple(this_match))
return tuple(matches)
def get_sicd_bands(self) -> Tuple[str, ...]:
"""
Gets the list of bands for each sicd.
Returns
-------
Tuple[str, ...]
"""
return tuple(sicd.get_transmit_band_name() for sicd in self.get_sicds_as_tuple())
def get_sicd_polarizations(self) -> Tuple[str, ...]:
"""
Gets the list of polarizations for each sicd.
Returns
-------
Tuple[str]
"""
return tuple(sicd.get_processed_polarization() for sicd in self.get_sicds_as_tuple())
class FlatSICDReader(FlatReader, SICDTypeReader):
"""
Create a sicd type reader directly from an array.
**Changed in version 1.3.0** for reading changes.
"""
def __init__(self,
sicd_meta,
underlying_array,
formatted_dtype: Union[None, str, numpy.dtype] = None,
formatted_shape: Union[None, Tuple[int, ...]] = None,
reverse_axes: Union[None, int, Sequence[int]] = None,
transpose_axes: Union[None, Tuple[int, ...]] = None,
format_function: Union[None, FormatFunction] = None,
close_segments: bool = True):
"""
Parameters
----------
sicd_meta : None|SICDType
`None`, or the SICD metadata object
underlying_array : numpy.ndarray
formatted_dtype : None|str|numpy.dtype
formatted_shape : None|Tuple[int, ...]
reverse_axes : None|Sequence[int]
transpose_axes : None|Tuple[int, ...]
format_function : None|FormatFunction
close_segments : bool
"""
FlatReader.__init__(
self, underlying_array,
formatted_dtype=formatted_dtype, formatted_shape=formatted_shape,
reverse_axes=reverse_axes, transpose_axes=transpose_axes,
format_function=format_function, close_segments=close_segments)
SICDTypeReader.__init__(self, None, sicd_meta)
self._check_sizes()
def write_to_file(self, output_file, check_older_version=False, check_existence=False):
"""
Write a file for the given in-memory reader.
Parameters
----------
output_file : str
check_older_version : bool
Try to use a less recent version of SICD (1.1), for possible application compliance issues?
check_existence : bool
Should we check if the given file already exists, and raise an exception if so?
"""
if not isinstance(output_file, str):
raise TypeError(
'output_file is expected to a be a string, got type {}'.format(type(output_file)))
from sarpy.io.complex.sicd import SICDWriter
with SICDWriter(
output_file, self.sicd_meta,
check_older_version=check_older_version, check_existence=check_existence) as writer:
writer.write_chip(self[:, :], start_indices=(0, 0))
class SubsetSICDReader(SICDTypeReader):
"""
Create a reader based on a specific subset of a given SICDTypeReader.
**Changed in version 1.3.0** for reading changes.
"""
def __init__(self, reader, row_bounds, column_bounds, index=0, close_parent=False):
"""
Parameters
----------
reader : SICDTypeReader
The base reader.
row_bounds : None|Tuple[int, int]
Of the form `(min row, max row)`.
column_bounds : None|Tuple[int, int]
Of the form `(min column, max column)`.
index : int
The image index.
close_parent : bool
"""
sicd, row_bounds, column_bounds = reader.get_sicds_as_tuple()[index].create_subset_structure(
row_bounds, column_bounds)
parent_segment = reader.get_data_segment_as_tuple()[index]
subset_definition = (slice(*row_bounds), slice(*column_bounds))
data_segment = SubsetSegment(
parent_segment, subset_definition, coordinate_basis='formatted', close_parent=close_parent)
SICDTypeReader.__init__(self, data_segment, sicd)
@property
def file_name(self) -> None:
return None
| 9,108 | 34.034615 | 110 | py |
sarpy | sarpy-master/sarpy/io/complex/palsar2.py | """
Functionality for reading PALSAR ALOS 2 data into a SICD model.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
import os
import struct
from typing import Union, Tuple, List, Optional
import numpy
from numpy.polynomial import polynomial
from scipy.constants import speed_of_light
from sarpy.io.general.base import SarpyIOError
from sarpy.io.general.data_segment import DataSegment, NumpyMemmapSegment, \
SubsetSegment, BandAggregateSegment
from sarpy.io.general.format_function import ComplexFormatFunction
from sarpy.io.general.utils import get_seconds, parse_timestring, is_file_like
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.sicd_elements.blocks import Poly1DType, Poly2DType
from sarpy.io.complex.sicd_elements.SICD import SICDType
from sarpy.io.complex.sicd_elements.CollectionInfo import CollectionInfoType, RadarModeType
from sarpy.io.complex.sicd_elements.ImageCreation import ImageCreationType
from sarpy.io.complex.sicd_elements.RadarCollection import RadarCollectionType, \
WaveformParametersType, ChanParametersType, TxStepType
from sarpy.io.complex.sicd_elements.ImageData import ImageDataType
from sarpy.io.complex.sicd_elements.GeoData import GeoDataType, SCPType
from sarpy.io.complex.sicd_elements.Position import PositionType, XYZPolyType
from sarpy.io.complex.sicd_elements.Grid import GridType, DirParamType, WgtTypeType
from sarpy.io.complex.sicd_elements.Timeline import TimelineType, IPPSetType
from sarpy.io.complex.sicd_elements.ImageFormation import ImageFormationType, \
RcvChanProcType
from sarpy.io.complex.sicd_elements.RMA import RMAType, INCAType
from sarpy.io.complex.sicd_elements.Radiometric import RadiometricType
from sarpy.io.complex.sicd_elements.ErrorStatistics import ErrorStatisticsType, \
ErrorComponentsType, RadarSensorErrorType, PosVelErrType
from sarpy.io.complex.utils import two_dim_poly_fit, fit_position_xvalidation
logger = logging.getLogger(__name__)
##########
# basic helper functions for file parsing and interpretation
def _determine_file_type(file_name):
"""
This checks the initial bit of the header to determine file type.
Parameters
----------
file_name : str
Returns
-------
None|str
"""
with open(file_name, 'rb') as fi:
# read the first 12 bytes of the file
header = fi.read(12)
parts = struct.unpack('>IBBBBI', header)
if parts == (1, 192, 192, 18, 18, 360):
return 'VOL'
elif parts == (1, 11, 192, 18, 18, 720):
return 'LED'
elif parts == (1, 50, 192, 18, 18, 720):
return 'IMG'
elif parts == (1, 63, 192, 18, 18, 720):
return 'TRL'
else:
return None
def _make_float(bytes_in):
"""
Try to parse as a float.
Parameters
----------
bytes_in : bytes
Returns
-------
float
"""
if len(bytes_in.strip()) == 0:
return numpy.nan
else:
return float(bytes_in)
##########
# helper classes that contains common record elements
class _BaseElements(object):
"""
Common header element
"""
__slots__ = (
'rec_num', 'rec_subtype1', 'rec_type', 'rec_subtype2', 'rec_subtype3', 'rec_length')
def __init__(self, fi):
"""
Parameters
----------
fi
The file object.
"""
the_bytes = fi.read(12)
self.rec_num, self.rec_subtype1, self.rec_type, self.rec_subtype2, self.rec_subtype3, \
self.rec_length = struct.unpack('>IBBBBI', the_bytes) # type: int, int, int, int, int, int
# Elements common to most individual record types?
class _BaseElements2(_BaseElements):
__slots__ = ('ascii_ebcdic', )
def __init__(self, fi):
super(_BaseElements2, self).__init__(fi)
self.ascii_ebcdic = fi.read(2).decode('utf-8') # type: str
fi.seek(2, os.SEEK_CUR) # skip reserved field
# Elements common to IMG, LED, TRL, and VOL
class _CommonElements(_BaseElements2):
"""
Parser and interpreter for the elements common to IMG, LED, TRL, and VOL files
"""
__slots__ = (
'doc_id', 'doc_rev', 'rec_rev', 'soft_rel_rev')
def __init__(self, fi):
super(_CommonElements, self).__init__(fi)
self.doc_id = fi.read(12).decode('utf-8') # type: str
self.doc_rev = fi.read(2).decode('utf-8') # type: str
self.rec_rev = fi.read(2).decode('utf-8') # type: str
self.soft_rel_rev = fi.read(12).decode('utf-8') # type: str
# Elements common to IMG, LED, and TRL
class _CommonElements2(_CommonElements):
"""
Parser and interpreter for the elements common to IMG, LED, TRL files
"""
__slots__ = (
'file_num', 'file_id', 'rec_seq_loc_type_flag', 'seq_num_loc', 'fld_len_seq',
'rec_code_loc_type_flag', 'loc_rec_code', 'fld_len_code', 'rec_len_loc_type_flag',
'loc_rec_len', 'len_rec_len', 'num_data_rec', 'data_len')
def __init__(self, fi):
"""
Parameters
----------
fi
The open file object.
"""
super(_CommonElements2, self).__init__(fi)
self.file_num = fi.read(4).decode('utf-8') # type: str
self.file_id = fi.read(16).decode('utf-8') # type: str
self.rec_seq_loc_type_flag = fi.read(4).decode('utf-8') # type: str
self.seq_num_loc = fi.read(8).decode('utf-8') # type: str
self.fld_len_seq = fi.read(4).decode('utf-8') # type: str
self.rec_code_loc_type_flag = fi.read(4).decode('utf-8') # type: str
self.loc_rec_code = fi.read(8).decode('utf-8') # type: str
self.fld_len_code = fi.read(4).decode('utf-8') # type: str
self.rec_len_loc_type_flag = fi.read(4).decode('utf-8') # type: str
self.loc_rec_len = fi.read(8).decode('utf-8') # type: str
self.len_rec_len = fi.read(4).decode('utf-8') # type: str
fi.seek(68, os.SEEK_CUR) # skip reserved field
self.num_data_rec = int(struct.unpack('6s', fi.read(6))[0]) # type: int
self.data_len = int(struct.unpack('6s', fi.read(6))[0]) # type: int
# Elements common to LED and TRL
class _CommonElements3(_CommonElements2):
"""
Parser and interpreter for the elements common to LED and TRL files
"""
__slots__ = (
'num_map_rec', 'map_len', 'num_pos_rec', 'pos_len', 'num_att_rec', 'att_len',
'num_rad_rec', 'rad_len', 'num_rad_comp_rec', 'rad_comp_len',
'num_data_qual_rec', 'data_qual_len', 'num_hist_rec', 'hist_len',
'num_rng_spect_rec', 'rng_spect_len', 'num_dem_rec', 'dem_len',
'num_radar_rec', 'radar_len', 'num_annot_rec', 'annot_len',
'num_proc_rec', 'proc_len', 'num_cal_rec', 'cal_len',
'num_gcp_rec', 'gcp_len', 'num_fac_data_rec', 'fac_data_len')
def __init__(self, fi, facility_count: int):
super(_CommonElements3, self).__init__(fi)
self.num_map_rec = int(fi.read(6)) # type: int
self.map_len = int(fi.read(6)) # type: int
self.num_pos_rec = int(fi.read(6)) # type: int
self.pos_len = int(fi.read(6)) # type: int
self.num_att_rec = int(fi.read(6)) # type: int
self.att_len = int(fi.read(6)) # type: int
self.num_rad_rec = int(fi.read(6)) # type: int
self.rad_len = int(fi.read(6)) # type: int
self.num_rad_comp_rec = int(fi.read(6)) # type: int
self.rad_comp_len = int(fi.read(6)) # type: int
self.num_data_qual_rec = int(fi.read(6)) # type: int
self.data_qual_len = int(fi.read(6)) # type: int
self.num_hist_rec = int(fi.read(6)) # type: int
self.hist_len = int(fi.read(6)) # type: int
self.num_rng_spect_rec = int(fi.read(6)) # type: int
self.rng_spect_len = int(fi.read(6)) # type: int
self.num_dem_rec = int(fi.read(6)) # type: int
self.dem_len = int(fi.read(6)) # type: int
self.num_radar_rec = int(fi.read(6)) # type: int
self.radar_len = int(fi.read(6)) # type: int
self.num_annot_rec = int(fi.read(6)) # type: int
self.annot_len = int(fi.read(6)) # type: int
self.num_proc_rec = int(fi.read(6)) # type: int
self.proc_len = int(fi.read(6)) # type: int
self.num_cal_rec = int(fi.read(6)) # type: int
self.cal_len = int(fi.read(6)) # type: int
self.num_gcp_rec = int(fi.read(6)) # type: int
self.gcp_len = int(fi.read(6)) # type: int
fi.seek(60, os.SEEK_CUR) # skip reserved fields
# the data facility records
num_fac_data_rec = []
fac_data_len = []
for i in range(facility_count):
num_fac_data_rec.append(int(fi.read(6)))
fac_data_len.append(int(fi.read(8)))
self.num_fac_data_rec = tuple(num_fac_data_rec) # type: Tuple[int]
self.fac_data_len = tuple(fac_data_len) # type: Tuple[int]
##########
# IMG file interpretation
class _IMG_SignalElements(_BaseElements):
"""
Parser and interpreter for the signal header part of an IMG file.
"""
__slots__ = (
# general info
'line_num', 'sar_rec_ind', 'left_fill', 'num_pixels', 'right_fill',
# sensor parameters
'update_flg', 'year', 'day', 'msec', 'chan_id', 'chan_code',
'tx_pol', 'rcv_pol', 'prf', 'scan_id', 'rng_comp_flg', 'chirp_type',
'chirp_length', 'chirp_const', 'chirp_lin', 'chirp_quad', 'usec',
'gain', 'invalid_flg', 'elec_ele', 'mech_ele', 'elec_squint',
'mech_squint', 'slant_rng', 'window_position',
# platform reference
'pos_update_flg', 'plat_lat', 'plat_lon', 'plat_alt', 'grnd_spd',
'vel_x', 'vel_y', 'vel_z', 'acc_x', 'acc_y', 'acc_z', 'track',
'true_track', 'pitch', 'roll', 'yaw',
# sensor/facility specific auxiliary data
'lat_first', 'lat_center', 'lat_last', 'lon_first', 'lon_center',
'lon_last',
# scansar parameters
'burst_num', 'line_num',
# general
'frame_num')
def __init__(self, fi):
"""
Parameters
----------
fi
The file object, which has been advanced to the start
of the record.
"""
# define the initial common header
super(_IMG_SignalElements, self).__init__(fi)
# prefix data - general information
self.line_num, self.sar_rec_ind, self.left_fill, self.num_pixels, self.right_fill = \
struct.unpack('>iiiii', fi.read(5*4)) # type: int, int, int, int, int
# prefix data - sensor parameters
self.update_flg, self.year, self.day, self.msec = \
struct.unpack('>iiii', fi.read(4*4)) # type: int, int, int, int
self.chan_id, self.chan_code, self.tx_pol, self.rcv_pol = \
struct.unpack('>hhhh', fi.read(4*2)) # type: int, int, int, int
self.prf, self.scan_id = struct.unpack('>ii', fi.read(2*4)) # type: int, int
self.rng_comp_flg, self.chirp_type = struct.unpack('>hh', fi.read(2*2)) # type: int, int
self.chirp_length, self.chirp_const, self.chirp_lin, self.chirp_quad = \
struct.unpack('>iiii', fi.read(4*4)) # type: int, int, int, int
self.usec = struct.unpack('>Q', fi.read(8))[0] # type: int
self.gain, self.invalid_flg, self.elec_ele, self.mech_ele, \
self.elec_squint, self.mech_squint, self.slant_rng, self.window_position = \
struct.unpack('>iiiiiiii', fi.read(8*4)) # type: int, int, int, int, int, int, int, int
fi.seek(4, os.SEEK_CUR) # skip reserved fields
# prefix data - platform reference information
self.pos_update_flg, self.plat_lat, self.plat_lon, self.plat_alt, self.grnd_spd = \
struct.unpack('>iiiii', fi.read(5*4)) # type: int, int, int, int, int
self.vel_x, self.vel_y, self.vel_z, self.acc_x, self.acc_y, self.acc_z = \
struct.unpack('>iiiiii', fi.read(6*4)) # type: int, int, int, int, int, int
self.track, self.true_track, self.pitch, self.roll, self.yaw = \
struct.unpack('>iiiii', fi.read(5*4)) # type: int, int, int, int, int
# prefix data - sensor/facility auxiliary data
self.lat_first, self.lat_center, self.lat_last = \
struct.unpack('>iii', fi.read(3*4)) # type: int, int, int
self.lon_first, self.lon_center, self.lon_last = \
struct.unpack('>iii', fi.read(3*4)) # type: int, int, int
# scan sar
self.burst_num, self.line_num = struct.unpack('>ii', fi.read(2*4)) # type: int, int
fi.seek(60, os.SEEK_CUR) # reserved field
self.frame_num = struct.unpack('>i', fi.read(4))[0] # type: int
# NB: there are remaining unparsed fields of no interest before data
class _IMG_Elements(_CommonElements2):
"""
IMG file header parsing and interpretation
"""
__slots__ = (
# sample group data
'sample_len', 'num_samples', 'num_bytes', 'just_order',
# SAR related data
'num_chan', 'num_lines', 'num_left', 'num_pixels', 'num_right', 'num_top',
'num_bottom', 'interleave',
# record data
'phys_rec_line', 'phys_rec_multi_chan', 'prefix_bytes', 'sar_data_bytes',
'suffix_bytes', 'pre_suf_rpt_flg',
# prefix/suffix data locations
'loc_sar_data', 'loc_sar_chan_num', 'loc_time', 'loc_leftfill', 'loc_rightfill',
'pad_pixels', 'loc_data_qual', 'loc_cal_info', 'loc_gain', 'loc_bias',
'sar_datatype', 'sar_datatype_code', 'num_leftfill', 'num_rightfill',
'max_data_range', 'scansar_num_bursts', 'scansar_num_lines',
'scansar_num_overlap',
# some reserved fields for class metadata
'_file_name', 'signal_elements'
)
def __init__(self, file_name):
"""
Parameters
----------
file_name : str
"""
if _determine_file_type(file_name) != 'IMG':
raise SarpyIOError('file {} does not appear to be an IMG file'.format(file_name))
self._file_name = file_name # type: str
self.signal_elements = None # type: Union[None, Tuple[_IMG_SignalElements]]
with open(self._file_name, 'rb') as fi:
super(_IMG_Elements, self).__init__(fi)
self._parse_fields(fi)
self._basic_signal(fi)
def _parse_fields(self, fi):
"""
Parses all the field data.
Parameters
----------
fi
The file object.
Returns
-------
None
"""
# this has advanced past the data_len field
fi.seek(24, os.SEEK_CUR) # skip reserved field
# sample group data
self.sample_len = int(fi.read(4)) # type: int
self.num_samples = int(fi.read(4)) # type: int
self.num_bytes = int(fi.read(4)) # type: int
self.just_order = fi.read(4).decode('utf-8') # type: str
# SAR related data
self.num_chan = int(fi.read(4)) # type: int
self.num_lines = int(fi.read(8)) # type: int
self.num_left = int(fi.read(4)) # type: int
self.num_pixels = int(fi.read(8)) # type: int
self.num_right = int(fi.read(4)) # type: int
self.num_top = int(fi.read(4)) # type: int
self.num_bottom = int(fi.read(4)) # type: int
self.interleave = fi.read(4).decode('utf-8') # type: str
# record data
self.phys_rec_line = int(fi.read(2)) # type: int
self.phys_rec_multi_chan = int(fi.read(2)) # type: int
self.prefix_bytes = int(fi.read(4)) # type: int
self.sar_data_bytes = int(fi.read(8)) # type: int
self.suffix_bytes = int(fi.read(4)) # type: int
self.pre_suf_rpt_flg = fi.read(4).decode('utf-8') # type: str
# prefix/suffix data locations
self.loc_sar_data = fi.read(8).decode('utf-8') # type: str
self.loc_sar_chan_num = fi.read(8).decode('utf-8') # type: str
self.loc_time = fi.read(8).decode('utf-8') # type: str
self.loc_leftfill = fi.read(8).decode('utf-8') # type: str
self.loc_rightfill = fi.read(8).decode('utf-8') # type: str
self.pad_pixels = fi.read(4).decode('utf-8') # type: str
fi.seek(28, os.SEEK_CUR) # skip resevered fields
self.loc_data_qual = fi.read(8).decode('utf-8') # type: str
self.loc_cal_info = fi.read(8).decode('utf-8') # type: str
self.loc_gain = fi.read(8).decode('utf-8') # type: str
self.loc_bias = fi.read(8).decode('utf-8') # type: str
self.sar_datatype = fi.read(28).decode('utf-8') # type: str
self.sar_datatype_code = fi.read(4).decode('utf-8') # type: str
self.num_leftfill = fi.read(4).decode('utf-8') # type: str
self.num_rightfill = fi.read(4).decode('utf-8') # type: str
self.max_data_range = fi.read(8).decode('utf-8') # type: str
self.scansar_num_bursts = fi.read(4).decode('utf-8') # type: str
self.scansar_num_lines = fi.read(4).decode('utf-8') # type: str
self.scansar_num_overlap = fi.read(4).decode('utf-8') # type: str
fi.seek(260, os.SEEK_CUR) # skip reserved fields
def _parse_signal(self, fi, index):
"""
Parse the signal element at the given index.
Parameters
----------
fi
The open file object.
index : int
Returns
-------
_IMG_SignalElements
"""
index = int(index)
if not (0 <= index < self.num_data_rec):
raise KeyError('index {} must be in range [0, {})'.format(index, self.num_data_rec))
# find offset for the given record, and traverse to it
record_offset = self.rec_length + \
(self.prefix_bytes + self.num_pixels*self.num_bytes + self.suffix_bytes)*index
# go to the start of the given record
fi.seek(record_offset, os.SEEK_SET)
return _IMG_SignalElements(fi)
def _basic_signal(self, fi):
"""
Parse the signal portion of the IMG file.
Parameters
----------
fi
The file object.
Returns
-------
None
"""
file_id = self.file_id[7]
if file_id == 'B':
# signal data records
# we only need the first and potentially last record (for now?)
self.signal_elements = (
self._parse_signal(fi, 0),
self._parse_signal(fi, self.num_data_rec-1))
elif file_id in ['C', 'D']:
raise ValueError(
'IMG file {} appears to be a product image,\n\t'
'not a level 1.1 product'.format(self._file_name))
else:
raise ValueError(
'Got unhandled file_id {} in IMG file {}'.format(
self.file_id, self._file_name))
@property
def file_name(self):
"""
str: The parent directory.
"""
return self._file_name
@property
def is_scansar(self):
"""
bool: Does this correspond to part of a ScanSAR collect?
"""
return len(self.scansar_num_bursts.strip()) != 0 or \
len(self.scansar_num_lines.strip()) != 0 or \
len(self.scansar_num_overlap.strip()) != 0
def get_polarizations(self):
"""
Gets the transmit and receive polarization components.
Returns
-------
(str, str)
"""
if self.signal_elements is None or len(self.signal_elements) < 1:
return None, None
signal = self.signal_elements[0]
tx_pol = 'H' if signal.tx_pol == 0 else 'V'
rcv_pol = 'H' if signal.rcv_pol == 0 else 'V'
return tx_pol, rcv_pol
def construct_data_segment(self, flip_pixels):
"""
Construct the data segment associated with the IMG file.
Parameters
----------
flip_pixels : bool
Should we flip in the pixel (2nd raw) dimension?
Returns
-------
DataSegment
"""
reverse_axes = (1, ) if flip_pixels else None
pixel_size = self.num_bytes
prefix_bytes = self.prefix_bytes
suffix_bytes = self.suffix_bytes
if pixel_size == 8:
sar_datatype_code = self.sar_datatype_code.strip()
if sar_datatype_code != 'C*8':
raise ValueError(
'sar_datatype_code is expected to be "C*8", got "{}"'.format(sar_datatype_code))
raw_dtype = numpy.dtype('>f4')
elif pixel_size == 4:
raw_dtype = numpy.dtype('>i2')
else:
raise ValueError('Got unhandled pixel size = {}'.format(pixel_size))
entry_pixel_size = int(pixel_size/2)
if (prefix_bytes % entry_pixel_size) != 0:
raise ValueError(
'prefix size ({}) is not compatible with pixel size ({})'.format(prefix_bytes, pixel_size))
if (suffix_bytes % entry_pixel_size) != 0:
raise ValueError(
'suffix size ({}) is not compatible with pixel size ({})'.format(suffix_bytes, pixel_size))
pref_cols = int(prefix_bytes/entry_pixel_size)
suf_cols = int(suffix_bytes/entry_pixel_size)
raw_shape = (self.num_lines, pref_cols + 2*self.num_pixels + suf_cols)
parent_data_segment = NumpyMemmapSegment(
self._file_name, self.rec_length, raw_dtype, raw_shape, mode='r', close_file=True)
real_subset_def = (slice(0, self.num_lines, 1), slice(pref_cols, pref_cols + 2*self.num_pixels, 2))
imag_subset_def = (slice(0, self.num_lines, 1), slice(pref_cols+1, pref_cols + 2*self.num_pixels, 2))
real_subset = SubsetSegment(parent_data_segment, real_subset_def, coordinate_basis='raw', close_parent=True)
imag_subset = SubsetSegment(parent_data_segment, imag_subset_def, coordinate_basis='raw', close_parent=True)
formatted_shape = real_subset.raw_shape[::-1]
return BandAggregateSegment(
(real_subset, imag_subset), 2,
formatted_dtype='complex64', formatted_shape=formatted_shape,
reverse_axes=reverse_axes, transpose_axes=(1, 0, 2),
format_function=ComplexFormatFunction(raw_dtype, order='IQ', band_dimension=2),
close_children=True)
###########
# LED file interpretation
class _LED_Data(_BaseElements):
"""
The data set summary in the LED file.
"""
__slots__ = (
'rec_seq', 'sar_id', 'scene_id', 'num_scene_ref', 'scene_ctr_time',
'geo_lat', 'geo_long', 'heading', 'ellips', 'semimajor', 'semiminor',
'earth_mass', 'grav_const', 'J2', 'J3', 'J4', 'avg_terr', 'ctr_line',
'ctr_pixel', 'proc_length', 'proc_width', 'sar_chan', 'platform',
'sensor_id_mode', 'orbit', 'sensor_lat', 'sensor_lon', 'sensor_heading',
'clock_angle', 'incidence', 'wavelength', 'mocomp', 'range_pulse_code',
'range_pulse_amp_coef', 'range_pulse_phs_coef', 'chirp_index',
'sampling_rate', 'range_gate', 'pulse_width', 'baseband_flg',
'range_compressed_flg', 'rec_gain_like_pol', 'rec_gain_cross_pol',
'quant_bit', 'quant_desc', 'dc_bias_i', 'dc_bias_q', 'gain_imbalance',
'elec_bores', 'mech_bores', 'echo_tracker', 'prf', 'ant_beam_2way_el',
'ant_beam_2way_az', 'sat_time', 'sat_clock', 'sat_clock_inc',
'proc_fac', 'proc_sys', 'proc_ver', 'proc_fac_code', 'proc_lvl_code',
'prod_type', 'proc_alg', 'num_look_az', 'num_look_rng', 'bw_per_look_az',
'bw_per_look_rng', 'bw_az', 'bw_rng', 'wgt_az', 'wgt_rng',
'data_input_src', 'res_grnd_rng', 'res_az', 'rad_bias', 'rad_gain',
'at_dop', 'xt_dop', 'time_dir_pixel', 'time_dir_line',
'at_dop_rate', 'xt_dop_rate', 'line_constant', 'clutter_lock_flg',
'autofocus_flg', 'line_spacing', 'pixel_spacing', 'rng_comp_des', 'dop_freq',
'cal_mode_loc_flag', 'start_line_cal_start', 'end_line_cal_start',
'start_line_cal_end', 'end_line_cal_end',
'prf_switch', 'prf_switch_line', 'beam_ctr_dir', 'yaw_steer_flag',
'param_table_num', 'off_nadir', 'ant_beam_num', 'incidence_ang',
'num_annot')
def __init__(self, fi):
start_loc = fi.tell()
super(_LED_Data, self).__init__(fi)
self.rec_seq = fi.read(4).decode('utf-8') # type: str
self.sar_id = fi.read(4).decode('utf-8') # type: str
self.scene_id = fi.read(32).decode('utf-8') # type: str
self.num_scene_ref = fi.read(16).decode('utf-8') # type: str
self.scene_ctr_time = fi.read(32).decode('utf-8') # type: str
fi.seek(16, os.SEEK_CUR) # skip reserved fields
self.geo_lat = fi.read(16).decode('utf-8') # type: str
self.geo_long = fi.read(16).decode('utf-8') # type: str
self.heading = fi.read(16).decode('utf-8') # type: str
self.ellips = fi.read(16).decode('utf-8') # type: str
self.semimajor = fi.read(16).decode('utf-8') # type: str
self.semiminor = fi.read(16).decode('utf-8') # type: str
self.earth_mass = fi.read(16).decode('utf-8') # type: str
self.grav_const = fi.read(16).decode('utf-8') # type: str
self.J2 = fi.read(16).decode('utf-8') # type: str
self.J3 = fi.read(16).decode('utf-8') # type: str
self.J4 = fi.read(16).decode('utf-8') # type: str
fi.seek(16, os.SEEK_CUR) # skip reserved fields
self.avg_terr = fi.read(16).decode('utf-8') # type: str
self.ctr_line = int(fi.read(8)) # type: int
self.ctr_pixel = int(fi.read(8)) # type: int
self.proc_length = fi.read(16).decode('utf-8') # type: str
self.proc_width = fi.read(16).decode('utf-8') # type: str
fi.seek(16, os.SEEK_CUR) # skip reserved fields
self.sar_chan = fi.read(4).decode('utf-8') # type: str
fi.seek(4, os.SEEK_CUR) # skip reserved fields
self.platform = fi.read(16).decode('utf-8') # type: str
self.sensor_id_mode = fi.read(32).decode('utf-8') # type: str
self.orbit = fi.read(8).decode('utf-8') # type: str
self.sensor_lat = fi.read(8).decode('utf-8') # type: str
self.sensor_lon = fi.read(8).decode('utf-8') # type: str
self.sensor_heading = fi.read(8).decode('utf-8') # type: str
self.clock_angle = float(fi.read(8)) # type: float
self.incidence = float(fi.read(8)) # type: float
fi.seek(8, os.SEEK_CUR) # skip reserved fields
self.wavelength = float(fi.read(16)) # type: float
self.mocomp = fi.read(2).decode('utf-8') # type: str
self.range_pulse_code = fi.read(16).decode('utf-8') # type: str
self.range_pulse_amp_coef = [fi.read(16).decode('utf-8') for _ in range(5)] # type: List[str]
self.range_pulse_phs_coef = [fi.read(16).decode('utf-8') for _ in range(5)] # type: List[str]
self.chirp_index = fi.read(8).decode('utf-8') # type: str
fi.seek(8, os.SEEK_CUR) # skip reserved fields
self.sampling_rate = float(fi.read(16)) # type: float
self.range_gate = float(fi.read(16)) # type: float
self.pulse_width = float(fi.read(16)) # type: float
self.baseband_flg = fi.read(4).decode('utf-8') # type: str
self.range_compressed_flg = fi.read(4).decode('utf-8') # type: str
self.rec_gain_like_pol = float(fi.read(16)) # type: float
self.rec_gain_cross_pol = float(fi.read(16)) # type: float
self.quant_bit = fi.read(8).decode('utf-8') # type: str
self.quant_desc = fi.read(12).decode('utf-8') # type: str
self.dc_bias_i = float(fi.read(16)) # type: float
self.dc_bias_q = float(fi.read(16)) # type: float
self.gain_imbalance = float(fi.read(16)) # type: float
fi.seek(32, os.SEEK_CUR) # skip reserved fields
self.elec_bores = float(fi.read(16)) # type: float
self.mech_bores = float(fi.read(16)) # type: float
self.echo_tracker = fi.read(4).decode('utf-8') # type: str
self.prf = float(fi.read(16)) # type: float
self.ant_beam_2way_el = float(fi.read(16)) # type: float
self.ant_beam_2way_az = float(fi.read(16)) # type: float
self.sat_time = fi.read(16).decode('utf-8') # type: str
self.sat_clock = fi.read(32).decode('utf-8') # type: str
self.sat_clock_inc = fi.read(16).decode('utf-8') # type: str
self.proc_fac = fi.read(16).decode('utf-8') # type: str
self.proc_sys = fi.read(8).decode('utf-8') # type: str
self.proc_ver = fi.read(8).decode('utf-8') # type: str
self.proc_fac_code = fi.read(16).decode('utf-8') # type: str
self.proc_lvl_code = fi.read(16).decode('utf-8') # type: str
self.prod_type = fi.read(32).decode('utf-8') # type: str
self.proc_alg = fi.read(32).decode('utf-8') # type: str
self.num_look_az = float(fi.read(16)) # type: float
self.num_look_rng = float(fi.read(16)) # type: float
self.bw_per_look_az = float(fi.read(16)) # type: float
self.bw_per_look_rng = float(fi.read(16)) # type: float
self.bw_az = float(fi.read(16)) # type: float
self.bw_rng = float(fi.read(16)) # type: float
self.wgt_az = fi.read(32).decode('utf-8') # type: str
self.wgt_rng = fi.read(32).decode('utf-8') # type: str
self.data_input_src = fi.read(16).decode('utf-8') # type: str
self.res_grnd_rng = fi.read(16).decode('utf-8') # type: str
self.rad_bias = fi.read(16).decode('utf-8') # type: str
self.res_az = fi.read(16).decode('utf-8') # type: str
self.rad_gain = fi.read(16).decode('utf-8') # type: str
self.at_dop = [float(fi.read(16)) for _ in range(3)] # type: List[float]
fi.seek(16, os.SEEK_CUR) # skip reserved fields
self.xt_dop = [float(fi.read(16)) for _ in range(3)] # type: List[float]
self.time_dir_pixel = fi.read(8).decode('utf-8') # type: str
self.time_dir_line = fi.read(8).decode('utf-8') # type: str
self.at_dop_rate = [float(fi.read(16)) for _ in range(3)] # type: List[float]
fi.seek(16, os.SEEK_CUR) # skip reserved fields
self.xt_dop_rate = [float(fi.read(16)) for _ in range(3)] # type: List[float]
fi.seek(16, os.SEEK_CUR) # skip reserved fields
self.line_constant = fi.read(8).decode('utf-8') # type: str
self.clutter_lock_flg = fi.read(4).decode('utf-8') # type: str
self.autofocus_flg = fi.read(4).decode('utf-8') # type: str
self.line_spacing = float(fi.read(16)) # type: float
self.pixel_spacing = float(fi.read(16)) # type: float
self.rng_comp_des = fi.read(16).decode('utf-8') # type: str
self.dop_freq = [float(fi.read(16)) for _ in range(2)] # type: List[float]
self.cal_mode_loc_flag = fi.read(4).decode('utf-8') # type: str
self.start_line_cal_start = fi.read(8).decode('utf-8') # type: str
self.end_line_cal_start = fi.read(8).decode('utf-8') # type: str
self.start_line_cal_end = fi.read(8).decode('utf-8') # type: str
self.end_line_cal_end = fi.read(8).decode('utf-8') # type: str
self.prf_switch = fi.read(4).decode('utf-8') # type: str
self.prf_switch_line = fi.read(8).decode('utf-8') # type: str
self.beam_ctr_dir = float(fi.read(16)) # type: float
self.yaw_steer_flag = fi.read(4).decode('utf-8') # type: str
self.param_table_num = fi.read(4).decode('utf-8') # type: str
self.off_nadir = float(fi.read(16)) # type: float
self.ant_beam_num = fi.read(4).decode('utf-8') # type: str
fi.seek(28, os.SEEK_CUR) # skip reserved fields
self.incidence_ang = [float(fi.read(20)) for _ in range(6)] # type: List[float]
self.num_annot = float(fi.read(8)) # type: float
fi.seek(start_loc + self.rec_length, os.SEEK_SET) # skip reserved fields, for map projection?
class _LED_Position(_BaseElements):
"""
The position summary in the LED file.
"""
__slots__ = (
'orb_elem', 'pos', 'vel', 'num_pts', 'year', 'month', 'day', 'day_in_year',
'sec', 'int', 'ref_coord_sys', 'greenwich_mean_hr_ang',
'at_pos_err', 'ct_pos_err', 'rad_pos_err',
'at_vel_err', 'ct_vel_err', 'rad_vel_err',
'pts_pos', 'pts_vel', 'leap_sec')
def __init__(self, fi):
start_pos = fi.tell()
super(_LED_Position, self).__init__(fi)
self.orb_elem = fi.read(32).decode('utf-8') # type: str
self.pos = numpy.array([float(fi.read(16)) for _ in range(3)], dtype='float64') # type: numpy.ndarray
self.vel = numpy.array([float(fi.read(16)) for _ in range(3)], dtype='float64') # type: numpy.ndarray
self.num_pts = int(fi.read(4)) # type: int
self.year = int(fi.read(4)) # type: int
self.month = int(fi.read(4)) # type: int
self.day = int(fi.read(4)) # type: int
self.day_in_year = int(fi.read(4)) # type: int
self.sec = float(fi.read(22)) # type: float
self.int = float(fi.read(22)) # type: float
self.ref_coord_sys = fi.read(64).decode('utf-8') # type: str
self.greenwich_mean_hr_ang = fi.read(22).decode('utf-8') # type: str
self.at_pos_err = float(fi.read(16)) # type: float
self.ct_pos_err = float(fi.read(16)) # type: float
self.rad_pos_err = float(fi.read(16)) # type: float
self.at_vel_err = float(fi.read(16)) # type: float
self.ct_vel_err = float(fi.read(16)) # type: float
self.rad_vel_err = float(fi.read(16)) # type: float
self.pts_pos = numpy.zeros((self.num_pts, 3), dtype='float64') # type: numpy.ndarray
self.pts_vel = numpy.zeros((self.num_pts, 3), dtype='float64') # type: numpy.ndarray
for i in range(self.num_pts):
self.pts_pos[i, :] = [float(fi.read(22)) for _ in range(3)]
self.pts_vel[i, :] = [float(fi.read(22)) for _ in range(3)]
fi.seek(18, os.SEEK_CUR) # skip reserved fields
self.leap_sec = fi.read(1).decode('utf-8') # type: str
fi.seek(start_pos + self.rec_length, os.SEEK_SET) # skip reserved fields
class _LED_AttitudePoint(object):
"""
An attitude point.
"""
__slots__ = (
'day_year', 'msec_day',
'pitch_flag', 'roll_flag', 'yaw_flag',
'pitch', 'roll', 'yaw',
'pitch_rate_flag', 'roll_rate_flag', 'yaw_rate_flag',
'pitch_rate', 'roll_rate', 'yaw_rate')
def __init__(self, fi):
self.day_year = int(fi.read(4)) # type: int
self.msec_day = int(fi.read(8)) # type: int
self.pitch_flag = fi.read(4).decode('utf-8') # type: str
self.roll_flag = fi.read(4).decode('utf-8') # type: str
self.yaw_flag = fi.read(4).decode('utf-8') # type: str
self.pitch = float(fi.read(14)) # type: float
self.roll = float(fi.read(14)) # type: float
self.yaw = float(fi.read(14)) # type: float
self.pitch_rate_flag = fi.read(4).decode('utf-8') # type: str
self.roll_rate_flag = fi.read(4).decode('utf-8') # type: str
self.yaw_rate_flag = fi.read(4).decode('utf-8') # type: str
self.pitch_rate = float(fi.read(14)) # type: float
self.roll_rate = float(fi.read(14)) # type: float
self.yaw_rate = float(fi.read(14)) # type: float
class _LED_Attitude(_BaseElements):
"""
The attitude summary in the LED file.
"""
__slots__ = (
'num_pts', 'pts')
def __init__(self, fi):
start_loc = fi.tell()
super(_LED_Attitude, self).__init__(fi)
self.num_pts = int(fi.read(4)) # type: int
self.pts = [_LED_AttitudePoint(fi) for _ in range(self.num_pts)]
fi.seek(start_loc + self.rec_length, os.SEEK_SET) # skip remaining reserved
class _LED_Radiometric(_BaseElements):
"""
The attitude summary in the LED file.
"""
__slots__ = (
'seq_num', 'num_pts', 'cal_factor', 'tx_distortion', 'rcv_distortion')
def __init__(self, fi):
start_loc = fi.tell()
super(_LED_Radiometric, self).__init__(fi)
self.seq_num = int(fi.read(4)) # type: int
self.num_pts = int(fi.read(4)) # type: int
self.cal_factor = float(fi.read(16)) # type: float
self.tx_distortion = numpy.zeros((2, 2), dtype='complex128') # type: numpy.ndarray
self.rcv_distortion = numpy.zeros((2, 2), dtype='complex128') # type: numpy.ndarray
for i in range(2):
for j in range(2):
self.tx_distortion[i, j] = complex(real=float(fi.read(16)), imag=float(fi.read(16)))
for i in range(2):
for j in range(2):
self.rcv_distortion[i, j] = complex(real=float(fi.read(16)), imag=float(fi.read(16)))
fi.seek(start_loc + self.rec_length, os.SEEK_SET) # skip remaining reserved
class _LED_DataQuality(_BaseElements):
"""
The data quality summary in the LED file.
"""
__slots__ = (
'dq_rec_num', 'chan_id', 'date', 'num_chans', 'islr', 'pslr',
'aar', 'rar', 'snr', 'ber', 'sr_res', 'az_res', 'rad_res', 'dyn_rng',
'abs_cal_mag', 'abs_cal_phs', 'rel_cal_mag', 'rel_cal_phs',
'abs_err_at', 'abs_err_ct', 'distort_line', 'distort_pixel',
'distort_skew', 'orient_err', 'at_misreg_err', 'ct_misreg_err')
def __init__(self, fi):
start_loc = fi.tell()
super(_LED_DataQuality, self).__init__(fi)
self.dq_rec_num = fi.read(4).decode('utf-8') # type: str
self.chan_id = fi.read(4).decode('utf-8') # type: str
self.date = fi.read(6).decode('utf-8') # type: str
self.num_chans = int(fi.read(4)) # type: int
self.islr = _make_float(fi.read(16)) # type: float
self.pslr = _make_float(fi.read(16)) # type: float
self.aar = _make_float(fi.read(16)) # type: float
self.rar = _make_float(fi.read(16)) # type: float
self.snr = _make_float(fi.read(16)) # type: float
self.ber = fi.read(16).decode('utf-8') # type: str
self.sr_res = _make_float(fi.read(16)) # type: float
self.az_res = _make_float(fi.read(16)) # type: float
self.rad_res = fi.read(16).decode('utf-8') # type: str
self.dyn_rng = fi.read(16).decode('utf-8') # type: str
self.abs_cal_mag = fi.read(16).decode('utf-8') # type: str
self.abs_cal_phs = fi.read(16).decode('utf-8') # type: str
self.rel_cal_mag = numpy.zeros((self.num_chans, ), dtype='float64') # type: numpy.ndarray
self.rel_cal_phs = numpy.zeros((self.num_chans, ), dtype='float64') # type: numpy.ndarray
for i in range(self.num_chans):
self.rel_cal_mag[i] = _make_float(fi.read(16))
self.rel_cal_phs[i] = _make_float(fi.read(16))
fi.seek(512 - self.num_chans*32, os.SEEK_CUR) # skip reserved
self.abs_err_at = fi.read(16).decode('utf-8') # type: str
self.abs_err_ct = fi.read(16).decode('utf-8') # type: str
self.distort_line = fi.read(16).decode('utf-8') # type: str
self.distort_skew = fi.read(16).decode('utf-8') # type: str
self.orient_err = fi.read(16).decode('utf-8') # type: str
self.at_misreg_err = numpy.zeros((self.num_chans, ), dtype='float64') # type: numpy.ndarray
self.ct_misreg_err = numpy.zeros((self.num_chans, ), dtype='float64') # type: numpy.ndarray
for i in range(self.num_chans):
self.at_misreg_err[i] = _make_float(fi.read(16))
self.ct_misreg_err[i] = _make_float(fi.read(16))
fi.seek(start_loc + self.rec_length, os.SEEK_SET) # skip reserved
class _LED_Facility(_BaseElements):
"""
The data quality summary in the LED file.
NOTE: this is too failure prone, and is functionally being skipped for now.
"""
__slots__ = (
'fac_seq_num', 'mapproj2pix', 'mapproj2line', 'cal_mode_data_loc_flg',
'start_line_upper', 'end_line_upper',
'start_line_bottom', 'end_line_bottom',
'prf_switch_flag', 'prf_switch_line',
'num_loss_lines_10', 'num_loss_lines_11',
'pixelline2lat', 'pixelline2lon', 'origin_pixel', 'origin_line',
'latlon2pixel', 'latlon2line', 'origin_lat', 'origin_lon')
def __init__(self, fi, parse_all=False):
start_loc = fi.tell()
super(_LED_Facility, self).__init__(fi)
self.fac_seq_num = int(fi.read(4)) # type: int
if not parse_all:
fi.seek(start_loc + self.rec_length, os.SEEK_SET)
return
self.mapproj2pix = numpy.zeros((10, ), dtype='float64') # type: numpy.ndarray
for i in range(10):
self.mapproj2pix[i] = _make_float(fi.read(20))
self.mapproj2line = numpy.zeros((10, ), dtype='float64') # type: numpy.ndarray
for i in range(10):
self.mapproj2line[i] = _make_float(fi.read(20))
self.cal_mode_data_loc_flg = fi.read(4).decode('utf-8') # type: str
self.start_line_upper = fi.read(8).decode('utf-8') # type: str
self.end_line_upper = fi.read(8).decode('utf-8') # type: str
self.start_line_bottom = fi.read(8).decode('utf-8') # type: str
self.end_line_bottom = fi.read(8).decode('utf-8') # type: str
self.prf_switch_flag = fi.read(4).decode('utf-8') # type: str
self.prf_switch_line = fi.read(8).decode('utf-8') # type: str
fi.seek(8, os.SEEK_CUR) # skip reserved field
self.num_loss_lines_10 = fi.read(8).decode('utf-8') # type: str
self.num_loss_lines_11 = fi.read(8).decode('utf-8') # type: str
fi.seek(312, os.SEEK_CUR) # skip empty fields
fi.seek(224, os.SEEK_CUR) # skip reserved fields
self.pixelline2lat = numpy.zeros((25, ), dtype='float64') # type: numpy.ndarray
self.pixelline2lon = numpy.zeros((25, ), dtype='float64') # type: numpy.ndarray
for i in range(25):
self.pixelline2lat[i] = _make_float(fi.read(20))
for i in range(25):
self.pixelline2lon[i] = _make_float(fi.read(20))
self.origin_pixel = float(fi.read(20)) # type: float
self.origin_line = float(fi.read(20)) # type: float
self.latlon2pixel = numpy.zeros((25, ), dtype='float64') # type: numpy.ndarray
self.latlon2line = numpy.zeros((25, ), dtype='float64') # type: numpy.ndarray
for i in range(25):
self.latlon2pixel[i] = _make_float(fi.read(20))
for i in range(25):
self.latlon2line[i] = _make_float(fi.read(20))
self.origin_lat = float(fi.read(20)) # type: float
self.origin_lon = float(fi.read(20)) # type: float
fi.seek(start_loc + self.rec_length, os.SEEK_SET) # skip empty fields
class _LED_Elements(_CommonElements3):
"""
LED file header parsing and interpretation
"""
__slots__ = (
'_file_name', 'data', 'position', 'attitude', 'radiometric',
'data_quality', 'facility')
def __init__(self, file_name):
"""
Parameters
----------
file_name : str
"""
if _determine_file_type(file_name) != 'LED':
raise SarpyIOError('file {} does not appear to be an LED file'.format(file_name))
self._file_name = file_name # type: str
with open(self._file_name, 'rb') as fi:
_CommonElements3.__init__(self, fi, 5)
fi.seek(230, os.SEEK_CUR) # skip reserved fields
self.data = _LED_Data(fi) # type: _LED_Data
if self.num_map_rec > 0:
# skip any map projection
# should not be present for level 1.1
fi.seek(self.map_len, os.SEEK_CUR)
self.position = _LED_Position(fi) # type: _LED_Position
self.attitude = _LED_Attitude(fi) # type: _LED_Attitude
self.radiometric = _LED_Radiometric(fi) # type: _LED_Radiometric
self.data_quality = _LED_DataQuality(fi) # type: _LED_DataQuality
facilities = []
for i in range(5):
facilities.append(_LED_Facility(fi, parse_all=False))
self.facility = facilities
@property
def file_name(self):
"""
str: The parent directory.
"""
return self._file_name
############
# TRL file interpretation
class _TRL_LowResRecord(object):
"""
Low resolution record in TRL file.
"""
__slots__ = ('length', 'pixels', 'lines', 'bytes')
def __init__(self, fi):
self.length = int(fi.read(8))
self.pixels = int(fi.read(6))
self.lines = int(fi.read(6))
self.bytes = int(fi.read(6))
class _TRL_Elements(_CommonElements3):
"""
TRL file header parsing and interpretation
"""
__slots__ = (
'_file_name', 'num_low_res_rec', 'low_res')
def __init__(self, file_name):
"""
Parameters
----------
file_name : str
"""
if _determine_file_type(file_name) != 'TRL':
raise SarpyIOError('file {} does not appear to be an LED file'.format(file_name))
self._file_name = file_name # type: str
with open(self._file_name, 'rb') as fi:
_CommonElements3.__init__(self, fi, 5)
self.num_low_res_rec = int(fi.read(6)) # type: int
self.low_res = tuple([_TRL_LowResRecord(fi) for _ in range(self.num_low_res_rec)])
fi.seek(720, os.SEEK_CUR) # skip reserved data
# comment carried over from matlab -
# There seems to be an array the size of the low resolution image on
# the end of this file, but it doesn't seem to contain any data
@property
def file_name(self):
"""
str: The parent directory.
"""
return self._file_name
############
# VOL file interpretation
class _VOL_File(_BaseElements2):
"""
The VOL file object.
"""
__slots__ = (
'num', 'name', 'clas', 'clas_code', 'typ', 'typ_code', 'num_recs',
'len_first_rec', 'max_rec_len', 'rec_len_type', 'rec_len_type_code',
'phys_vol_first', 'phys_vol_last', 'rec_num_first', 'rec_num_last')
def __init__(self, fi):
super(_VOL_File, self).__init__(fi)
self.num = fi.read(4).decode('utf-8') # type: str
self.name = fi.read(16).decode('utf-8') # type: str
self.clas = fi.read(28).decode('utf-8') # type: str
self.clas_code = fi.read(4).decode('utf-8') # type: str
self.typ = fi.read(28).decode('utf-8') # type: str
self.typ_code = fi.read(4).decode('utf-8') # type: str
self.num_recs = int(fi.read(8)) # type: int
self.len_first_rec = int(fi.read(8)) # type: int
self.max_rec_len = int(fi.read(8)) # type: int
self.rec_len_type = fi.read(12).decode('utf-8') # type: str
self.rec_len_type_code = fi.read(4).decode('utf-8') # type: str
self.phys_vol_first = int(fi.read(2)) # type: int
self.phys_vol_last = int(fi.read(2)) # type: int
self.rec_num_first = int(fi.read(8)) # type: int
self.rec_num_last = int(fi.read(8)) # type: int
fi.seek(200, os.SEEK_CUR) # skipping reserved fields
class _VOL_Text(_BaseElements2):
"""
The VOL text object.
"""
__slots__ = (
'prod_id', 'location', 'phys_id', 'scene_id', 'scene_loc_id')
def __init__(self, fi):
super(_VOL_Text, self).__init__(fi)
self.prod_id = fi.read(40).decode('utf-8') # type: str
self.location = fi.read(60).decode('utf-8') # type: str
self.phys_id = fi.read(40).decode('utf-8') # type: str
self.scene_id = fi.read(40).decode('utf-8') # type: str
self.scene_loc_id = fi.read(40).decode('utf-8') # type: str
fi.seek(124, os.SEEK_CUR) # skip reserved fields
class _VOL_Elements(_CommonElements):
"""
VOL file header parsing and interpretation
"""
__slots__ = (
'_file_name', 'phys_vol_id', 'log_vol_id', 'vol_set_id', 'num_phys_vol',
'phys_seq_first', 'phys_seq_last', 'phys_seq_cur', 'file_num', 'log_vol',
'log_vol_num', 'log_vol_create_date', 'log_vol_create_time', 'log_vol_co',
'log_vol_agency', 'log_vol_facility', 'num_file_ptr', 'num_text_rec',
'files', 'texts')
def __init__(self, file_name):
"""
Parameters
----------
file_name : str
"""
if _determine_file_type(file_name) != 'VOL':
raise SarpyIOError('file {} does not appear to be an LED file'.format(file_name))
self._file_name = file_name # type: str
with open(self._file_name, 'rb') as fi:
super(_VOL_Elements, self).__init__(fi)
self.phys_vol_id = fi.read(16).decode('utf-8') # type: str
self.log_vol_id = fi.read(16).decode('utf-8') # type: str
self.vol_set_id = fi.read(16).decode('utf-8') # type: str
self.num_phys_vol = fi.read(2).decode('utf-8') # type: str
self.phys_seq_first = fi.read(2).decode('utf-8') # type: str
self.phys_seq_last = fi.read(2).decode('utf-8') # type: str
self.phys_seq_cur = fi.read(2).decode('utf-8') # type: str
self.file_num = fi.read(4).decode('utf-8') # type: str
self.log_vol = fi.read(4).decode('utf-8') # type: str
self.log_vol_num = fi.read(4).decode('utf-8') # type: str
self.log_vol_create_date = fi.read(8).decode('utf-8') # type: str
self.log_vol_create_time = fi.read(8).decode('utf-8') # type: str
self.log_vol_co = fi.read(12).decode('utf-8') # type: str
self.log_vol_agency = fi.read(8).decode('utf-8') # type: str
self.log_vol_facility = fi.read(12).decode('utf-8') # type: str
self.num_file_ptr = int(fi.read(4)) # type: int
self.num_text_rec = int(fi.read(4)) # type: int
fi.seek(192, os.SEEK_CUR)
self.files = tuple([_VOL_File(fi) for _ in range(self.num_file_ptr)]) # type: Tuple[_VOL_File]
self.texts = tuple([_VOL_Text(fi) for _ in range(self.num_text_rec)]) # type: Tuple[_VOL_Text]
@property
def file_name(self):
"""
str: The parent directory.
"""
return self._file_name
#############
# The reader implementation
class PALSARDetails(object):
"""
Parses and converts the PALSAR meta-data.
"""
__slots__ = (
'_file_name', '_img_elements', '_led_element', '_trl_element', '_vol_element')
def __init__(self, file_name: str):
"""
Parameters
----------
file_name : str
"""
self._file_name = None
self._img_elements = None # type: Union[None, Tuple[_IMG_Elements, ...]]
self._led_element = None # type: Union[None, _LED_Elements]
self._trl_element = None # type: Union[None, _TRL_Elements]
self._vol_element = None # type: Union[None, _VOL_Elements]
self._validate_filename(file_name)
for entry in self._img_elements:
if entry.is_scansar:
raise ValueError(
'image file {} corresponds to part of a ScanSAR collect, '
'which is currently unsupported'.format(entry.file_name))
def _validate_filename(self, file_name: str) -> None:
"""
Validate the input path, and find the associated files.
Parameters
----------
file_name : str
Returns
-------
None
"""
if not os.path.exists(file_name):
raise SarpyIOError('path {} does not exists'.format(file_name))
file_name = os.path.abspath(file_name)
if os.path.isfile(file_name):
the_dir = os.path.split(file_name)[0]
elif os.path.isdir(file_name):
the_dir = file_name
else:
raise ValueError('path {} is neither a directory or file'.format(file_name))
self._file_name = the_dir
# find the files of each type
img_files = []
led_files = []
trl_files = []
vol_files = []
for fil in os.listdir(the_dir):
full_file = os.path.join(the_dir, fil)
if fil.startswith('IMG-') and _determine_file_type(full_file) == 'IMG':
img_files.append(full_file)
elif fil.startswith('LED-') and _determine_file_type(full_file) == 'LED':
led_files.append(full_file)
elif fil.startswith('TRL-') and _determine_file_type(full_file) == 'TRL':
trl_files.append(full_file)
elif fil.startswith('VOL-') and _determine_file_type(full_file) == 'VOL':
vol_files.append(full_file)
if len(img_files) == 0:
raise SarpyIOError('No IMG files found in directory {}'.format(the_dir))
if len(led_files) == 0:
raise ValueError('IMG files found, but no LED files found in directory {}'.format(the_dir))
if len(led_files) > 1 or len(trl_files) > 1 or len(vol_files) > 1:
raise ValueError('Multiple LED, TRL, or VOL files found in directory {}'.format(the_dir))
self._img_elements = tuple([_IMG_Elements(entry) for entry in img_files])
self._led_element = _LED_Elements(led_files[0])
self._trl_element = _TRL_Elements(trl_files[0]) if len(trl_files) > 0 else None
self._vol_element = _VOL_Elements(vol_files[0]) if len(vol_files) > 0 else None
@property
def file_name(self) -> str:
"""
str: The parent directory.
"""
return self._file_name
@property
def img_elements(self) -> Tuple[_IMG_Elements, ...]:
"""
Tuple[_IMG_Elements, ...]: The img elements
"""
return self._img_elements
def _get_sicd(self,
index: int,
tx_pols: List[str],
tx_rcv_pols: List[str]) -> SICDType:
"""
Gets the SICD structure for image at `index`.
Parameters
----------
index : int
tx_pols : List[str]
tx_rcv_pols : List[str]
Returns
-------
SICDType
"""
def get_collection_info() -> CollectionInfoType:
if self._led_element.data.scene_id.startswith('ALOS2'):
collector_name = 'ALOS2'
elif self._led_element.data.scene_id.startswith('STRIX'):
collector_name = self._led_element.data.scene_id[:6]
else:
collector_name = None
core_name = self._led_element.data.scene_id.strip()
mode_id = self._vol_element.texts[-1].prod_id[8:11]
mode_type = 'SPOTLIGHT' if mode_id == 'SBS' else 'STRIPMAP'
return CollectionInfoType(
CollectorName=collector_name,
CoreName=core_name,
CollectType='MONOSTATIC',
Classification='UNCLASSIFIED',
RadarMode=RadarModeType(ModeID=mode_id, ModeType=mode_type))
def get_image_creation() -> ImageCreationType:
from sarpy.__about__ import __version__
the_date = self._vol_element.log_vol_create_date
the_time = self._vol_element.log_vol_create_time
create_time = '{}-{}-{}T{}:{}:{}'.format(
the_date[:4], the_date[4:6], the_date[6:8],
the_time[:2], the_time[2:4], the_time[4:6]+'.'+the_time[6:].strip())
site = self._vol_element.log_vol_facility.strip()
application = '{} {}, Doc Rev {}, Rec Rev {}'.format(
self._vol_element.doc_id.strip(),
self._vol_element.soft_rel_rev.strip(),
self._vol_element.doc_rev.strip(),
self._vol_element.rec_rev.strip())
return ImageCreationType(Application=application,
DateTime=parse_timestring(create_time, precision='us'),
Site=site,
Profile='sarpy {}'.format(__version__))
def get_image_data() -> ImageDataType:
rows = img_element.num_pixels
cols = img_element.num_lines
if img_element.num_bytes == 8:
pixel_type = 'RE32F_IM32F'
elif img_element.num_bytes == 4:
pixel_type = 'RE16I_IM16I'
else:
raise ValueError('Unsupported pixel size {}'.format(img_element.num_bytes))
scp_row = led_element.data.ctr_pixel
if led_element.data.clock_angle > 0:
# right looking
scp_col = led_element.data.ctr_line
else:
# left looking
scp_col = cols - led_element.data.ctr_line - 1
return ImageDataType(
PixelType=pixel_type,
NumRows=rows,
NumCols=cols,
FirstRow=0,
FirstCol=0,
FullImage=(rows, cols),
SCPPixel=(scp_row, scp_col))
def get_geo_data() -> GeoDataType:
# NB: lat/lon are expressed in 10-6 degrees
scp_lat = 5e-7*(start_signal.lat_center + end_signal.lat_center)
scp_lon = 5e-7*(start_signal.lon_center + end_signal.lon_center)
return GeoDataType(SCP=SCPType(LLH=[scp_lat, scp_lon, 0.0]))
def get_timeline() -> TimelineType:
starting_usec = start_signal.usec if start_signal.usec != 0 else 1000*start_signal.msec
ending_usec = end_signal.usec if end_signal.usec != 0 else 1000*end_signal.msec
start_time = numpy.datetime64('{0:04d}-01-01'.format(start_signal.year), 'us') + \
numpy.timedelta64((start_signal.day-1)*86400*1000000 + starting_usec, 'us')
end_time = numpy.datetime64('{0:04d}-01-01'.format(end_signal.year), 'us') + \
numpy.timedelta64((end_signal.day-1)*86400*1000000 + ending_usec, 'us')
duration = get_seconds(end_time, start_time, precision='us')
# NB: I opt to calculate duration this way instead of subtracting usec directly,
# just in case midnight UTC occurs during the collect
prf = start_signal.prf*1e-3
return TimelineType(
CollectStart=start_time,
CollectDuration=duration,
IPP=[IPPSetType(TStart=0,
TEnd=duration,
IPPStart=0,
IPPEnd=round(prf*duration) - 1,
IPPPoly=[0, prf]), ])
def get_position() -> PositionType:
pos_element = led_element.position
position_start = numpy.datetime64(
'{0:04d}-{1:02d}-{2:02d}'.format(pos_element.year, pos_element.month, pos_element.day), 'us') + \
numpy.timedelta64(int(pos_element.sec*1000000), 'us')
arp_pos = pos_element.pts_pos
arp_vel = pos_element.pts_vel
diff_time = get_seconds(position_start, timeline.CollectStart, precision='us')
times_s = diff_time + numpy.arange(pos_element.num_pts)*pos_element.int
mask = (times_s >= -5.5*60) & (times_s <= 5.5*60)
P_x, P_y, P_z = fit_position_xvalidation(
times_s[mask], arp_pos[mask, :], arp_vel[mask, :], max_degree=8)
return PositionType(ARPPoly=XYZPolyType(X=P_x, Y=P_y, Z=P_z))
def get_radar_collection() -> RadarCollectionType:
data = led_element.data
bw = data.bw_rng*1e3 # NB: bandwidth is given in strange units?
tx_freq_min = center_frequency - bw*0.5 # NB: bandwidth is given in milliHz
tx_freq_max = center_frequency + bw*0.5
if data.range_pulse_code.strip() == 'LINEAR FM CHIRP':
waveform = [WaveformParametersType(
TxPulseLength=data.pulse_width*1e-6,
TxRFBandwidth=bw,
TxFreqStart=tx_freq_min,
TxFMRate=float(data.range_pulse_amp_coef[1]),
RcvDemodType='CHIRP',
ADCSampleRate=data.sampling_rate*1e6), ]
else:
logger.error(
'Got unexpected range_pulse_code "{}", no waveform details '
'populated'.format(data.range_pulse_code))
waveform = None
txps = list(set(tx_pols))
if len(txps) == 1:
txp = tx_pol
tx_sequence = None
else:
txp = 'SEQUENCE'
tx_sequence = [TxStepType(TxPolarization=tx_p, index=j+1) for j, tx_p in enumerate(txps)]
return RadarCollectionType(
TxPolarization=txp,
TxSequence=tx_sequence,
TxFrequency=(tx_freq_min, tx_freq_max),
Waveform=waveform,
RcvChannels=[
ChanParametersType(TxRcvPolarization=tx_rcv_p, index=j+1)
for j, tx_rcv_p in enumerate(tx_rcv_pols)])
def get_image_formation() -> ImageFormationType:
az_autofocus = 'GLOBAL' if led_element.data.autofocus_flg.strip() == 'YES' else 'NO'
tx_min_freq = radar_collection.TxFrequency.Min
tx_max_freq = radar_collection.TxFrequency.Max
return ImageFormationType(
TxRcvPolarizationProc=tx_rcv_pol,
TxFrequencyProc=(tx_min_freq, tx_max_freq),
TStartProc=0,
TEndProc=timeline.CollectDuration,
ImageFormAlgo='RMA',
STBeamComp='NO',
ImageBeamComp='NO',
AzAutofocus=az_autofocus,
RgAutofocus='NO',
RcvChanProc=RcvChanProcType(NumChanProc=1,
ChanIndices=[index+1, ]))
def get_radiometric() -> RadiometricType:
sigma_zero = 10**(0.1*(led_element.radiometric.cal_factor - 32))
return RadiometricType(SigmaZeroSFPoly=[[sigma_zero, ]])
def get_error_stats() -> ErrorStatisticsType:
pos_element = led_element.position
range_bias = 1e-2
# NB: there is a comment in the matlab code for range bias error:
# "Don't know this. Just put a small number."
return ErrorStatisticsType(
Components=ErrorComponentsType(
PosVelErr=PosVelErrType(
Frame='RIC_ECF',
P1=pos_element.rad_pos_err if numpy.isfinite(pos_element.rad_pos_err) else None,
P2=pos_element.at_pos_err if numpy.isfinite(pos_element.at_pos_err) else None,
P3=pos_element.ct_pos_err if numpy.isfinite(pos_element.ct_pos_err) else None,
V1=pos_element.rad_vel_err if numpy.isfinite(pos_element.rad_vel_err) else None,
V2=pos_element.at_vel_err if numpy.isfinite(pos_element.at_vel_err) else None,
V3=pos_element.ct_vel_err if numpy.isfinite(pos_element.ct_vel_err) else None),
RadarSensor=RadarSensorErrorType(RangeBias=range_bias)))
def get_grid_and_rma() -> Tuple[GridType, RMAType]:
data = led_element.data
dop_bw = data.bw_az
ss_zd_s = 1000.0/data.prf
scp_row = image_data.SCPPixel.Row
scp_col = image_data.SCPPixel.Col
pal_scp_col = led_element.data.ctr_line
if data.clock_angle < 0:
# left looking, so the palsar azimuth coordinate is inverse of the SICD col
ss_zd_s *= -1
time_ca_poly = numpy.array([scp_col*ss_zd_s, ss_zd_s/data.line_spacing], dtype='float64')
# construct the DopCentroidPoly
dop_poly_az = Poly1DType(Coefs=data.at_dop)
dop_poly_rng = Poly1DType(Coefs=data.xt_dop)
dop_centroid = numpy.zeros((3, 3), dtype='float64')
dop_centroid[0, 0] = dop_poly_rng(scp_row) + dop_poly_az(pal_scp_col) - \
0.5*(dop_poly_rng.Coefs[0] + dop_poly_az.Coefs[0])
col_scale = data.line_spacing if data.clock_angle > 0 else -data.line_spacing
dop_poly_az_shifted = dop_poly_az.shift(pal_scp_col, alpha=col_scale, return_poly=False)
dop_poly_rng_shifted = dop_poly_rng.shift(scp_row, alpha=data.pixel_spacing, return_poly=False)
dop_centroid[1:, 0] = dop_poly_rng_shifted[1:]
dop_centroid[0, 1:] = dop_poly_az_shifted[1:]
# construct the DRateSFPoly
r_ca_scp = start_signal.slant_rng + data.pixel_spacing*scp_row
dop_rate_poly_rng = Poly1DType(Coefs=data.xt_dop_rate)
dop_rate_poly_rng_scaled = dop_rate_poly_rng.shift(scp_row, alpha=data.pixel_spacing)
# NB: changes in velocity or doppler rate over the azimuth dimension
# are small, and will be neglected
vel_ca = position.ARPPoly.derivative_eval(time_ca_poly[0], der_order=1)
vm_ca_sq = numpy.sum(vel_ca*vel_ca)
r_ca = numpy.array([r_ca_scp, 1], dtype='float64')
drate_sf_poly = -polynomial.polymul(
dop_rate_poly_rng_scaled, r_ca)*speed_of_light/(2*center_frequency*vm_ca_sq)
# construct the TimeCOAPoly
poly_order = 2
samples = 20
coords_az_m = (numpy.linspace(0, image_data.NumCols-1, samples) - image_data.SCPPixel.Col)*data.line_spacing
coords_rng_m = (numpy.linspace(0, image_data.NumCols-1, samples) - scp_row)*data.pixel_spacing
coords_az_2d, coords_rng_2d = numpy.meshgrid(coords_az_m, coords_rng_m)
timeca_sampled = polynomial.polyval(coords_az_2d, time_ca_poly)
dop_centroid_sampled = polynomial.polyval2d(coords_rng_2d, coords_az_2d, dop_centroid)
dop_rate_sampled = polynomial.polyval(coords_rng_2d, dop_rate_poly_rng_scaled)
time_coa_sampled = timeca_sampled + (dop_centroid_sampled/dop_rate_sampled)
time_coa_poly, residuals, rank, sing_values = two_dim_poly_fit(
coords_rng_2d, coords_az_2d, time_coa_sampled,
x_order=poly_order, y_order=poly_order, x_scale=1e-3, y_scale=1e-3, rcond=1e-40)
logger.info(
'The TimeCOAPoly fit details:\n\t'
'root mean square residuals = {}\n\t'
'rank = {}\n\t'
'singular values = {}'.format(residuals, rank, sing_values))
row_wgt = WgtTypeType(WindowName='UNIFORM') if data.wgt_az.strip() == '1' else None
col_wgt = WgtTypeType(WindowName='UNIFORM') if data.wgt_rng.strip() == '1' else None
row = DirParamType(
SS=data.pixel_spacing,
Sgn=-1,
KCtr=2.0/data.wavelength,
ImpRespBW=2e3*data.bw_rng/speed_of_light,
ImpRespWid=led_element.data_quality.sr_res if numpy.isfinite(led_element.data_quality.sr_res) else None,
DeltaKCOAPoly=Poly2DType(Coefs=[[0, ], ]),
WgtType=row_wgt)
col = DirParamType(
SS=data.line_spacing,
Sgn=-1,
KCtr=0,
ImpRespBW=dop_bw*ss_zd_s/data.line_spacing,
ImpRespWid=led_element.data_quality.az_res if numpy.isfinite(led_element.data_quality.az_res) else None,
DeltaKCOAPoly=Poly2DType(Coefs=dop_centroid*ss_zd_s/data.line_spacing),
WgtType=col_wgt)
t_grid = GridType(
Type='RGZERO',
ImagePlane='SLANT',
Row=row,
Col=col,
TimeCOAPoly=Poly2DType(Coefs=time_coa_poly))
inca = INCAType(
FreqZero=center_frequency,
TimeCAPoly=time_ca_poly,
R_CA_SCP=r_ca_scp,
DRateSFPoly=Poly2DType(Coefs=numpy.reshape(drate_sf_poly, (-1, 1))),
DopCentroidPoly=Poly2DType(Coefs=dop_centroid),
DopCentroidCOA=True)
t_rma = RMAType(
RMAlgoType='OMEGA_K',
INCA=inca)
return t_grid, t_rma
def adjust_scp() -> None:
scp_pixel = sicd.ImageData.SCPPixel.get_array()
scp_ecf = sicd.project_image_to_ground(scp_pixel)
sicd.update_scp(scp_ecf, coord_system='ECF')
tx_pol = tx_pols[index]
tx_rcv_pol = tx_rcv_pols[index]
img_element = self._img_elements[index]
if img_element.signal_elements is None:
raise ValueError(
'Cannot extract required data from IMG file with no signal elements.')
start_signal = img_element.signal_elements[0]
end_signal = img_element.signal_elements[1]
led_element = self._led_element
center_frequency = speed_of_light/led_element.data.wavelength
collect_info = get_collection_info()
image_creation = get_image_creation()
image_data = get_image_data()
geo_data = get_geo_data()
timeline = get_timeline()
position = get_position()
radar_collection = get_radar_collection()
image_formation = get_image_formation()
radiometric = get_radiometric()
error_stats = get_error_stats()
grid, rma = get_grid_and_rma()
sicd = SICDType(
CollectionInfo=collect_info,
ImageCreation=image_creation,
ImageData=image_data,
GeoData=geo_data,
Timeline=timeline,
Position=position,
RadarCollection=radar_collection,
ImageFormation=image_formation,
Radiometric=radiometric,
ErrorStatistics=error_stats,
Grid=grid,
RMA=rma)
adjust_scp()
sicd.derive()
return sicd
def get_sicd_collection(self) -> List[SICDType]:
"""
Gets the sicd structure collection.
Returns
-------
List[SICDType]
"""
# get the polarizations
tx_pols = []
tx_rcv_pols = []
for entry in self._img_elements:
txp, rcvp = entry.get_polarizations()
if txp is None:
raise ValueError('Got no polarization from IMG file {}'.format(entry.file_name))
tx_pols.append(txp)
tx_rcv_pols.append('{}:{}'.format(txp, rcvp))
return [self._get_sicd(index, tx_pols, tx_rcv_pols) for index, _ in enumerate(self._img_elements)]
class PALSARReader(SICDTypeReader):
"""
A PALSAR ALOS2 SLC file package reader implementation.
**Changed in version 1.3.0** for reading changes.
"""
__slots__ = (
'_palsar_details', )
def __init__(self, palsar_details):
"""
Parameters
----------
palsar_details : str|PALSARDetails
Path name to file package or palsar details object.
"""
if isinstance(palsar_details, str):
palsar_details = PALSARDetails(palsar_details)
if not isinstance(palsar_details, PALSARDetails):
raise TypeError(
'The input argument for PALSARReader must be a '
'filename or PALSARDetails object')
self._palsar_details = palsar_details # type: PALSARDetails
sicds = self._palsar_details.get_sicd_collection()
data_segments = []
data_sizes = []
for sicd, img_details in zip(sicds, self._palsar_details.img_elements):
data_sizes.append((sicd.ImageData.NumCols, sicd.ImageData.NumRows))
data_segments.append(img_details.construct_data_segment(sicd.SCPCOA.SideOfTrack == 'L'))
SICDTypeReader.__init__(self, data_segments, sicds, close_segments=True)
self._check_sizes()
@property
def file_name(self) -> str:
return self._palsar_details.file_name
########
# base expected functionality for a module with an implemented Reader
def is_a(file_name: str) -> Optional[PALSARReader]:
"""
Tests whether a given file_name corresponds to a PALSAR ALOS2file. Returns a reader instance, if so.
Parameters
----------
file_name : str
the file_name to check
Returns
-------
PALSARReader|None
`PALSARReader` instance if PALSAR file, `None` otherwise
"""
if is_file_like(file_name):
return None
try:
palsar_details = PALSARDetails(file_name)
logger.info('File {} is determined to be a PALSAR ALOS2 file.'.format(file_name))
return PALSARReader(palsar_details)
except (ImportError, SarpyIOError):
return None
| 73,012 | 42.38265 | 120 | py |
sarpy | sarpy-master/sarpy/io/complex/csk.py | """
Functionality for reading Cosmo Skymed data into a SICD model.
"""
__classification__ = "UNCLASSIFIED"
__author__ = ("Thomas McCullough", "Jarred Barber", "Wade Schwartzkopf")
import logging
from collections import OrderedDict
import os
import re
from typing import Tuple, Dict, BinaryIO, Union, Optional
from datetime import datetime
import numpy
from numpy.polynomial import polynomial
from scipy.constants import speed_of_light
from sarpy.compliance import bytes_to_string
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.sicd_elements.blocks import Poly1DType, Poly2DType, RowColType
from sarpy.io.complex.sicd_elements.SICD import SICDType
from sarpy.io.complex.sicd_elements.CollectionInfo import CollectionInfoType, RadarModeType
from sarpy.io.complex.sicd_elements.ImageCreation import ImageCreationType
from sarpy.io.complex.sicd_elements.RadarCollection import RadarCollectionType, \
WaveformParametersType, ChanParametersType, TxStepType
from sarpy.io.complex.sicd_elements.ImageData import ImageDataType
from sarpy.io.complex.sicd_elements.GeoData import GeoDataType, SCPType
from sarpy.io.complex.sicd_elements.SCPCOA import SCPCOAType
from sarpy.io.complex.sicd_elements.Position import PositionType, XYZPolyType
from sarpy.io.complex.sicd_elements.Grid import GridType, DirParamType, WgtTypeType
from sarpy.io.complex.sicd_elements.Timeline import TimelineType, IPPSetType
from sarpy.io.complex.sicd_elements.ImageFormation import ImageFormationType, \
RcvChanProcType
from sarpy.io.complex.sicd_elements.RMA import RMAType, INCAType
from sarpy.io.complex.sicd_elements.Radiometric import RadiometricType
from sarpy.io.general.base import SarpyIOError
from sarpy.io.general.data_segment import HDF5DatasetSegment
from sarpy.io.general.format_function import ComplexFormatFunction
from sarpy.io.general.utils import get_seconds, parse_timestring, is_file_like, is_hdf5, h5py
from sarpy.io.complex.utils import fit_time_coa_polynomial, fit_position_xvalidation
try:
from sarpy.io.complex import csk_addin
except ImportError:
csk_addin = None
logger = logging.getLogger(__name__)
_unhandled_id_text = 'Unhandled mission id `{}`'
##########
# helper functions
def _extract_attrs(h5_element, out=None):
if out is None:
out = OrderedDict()
for the_key in h5_element.attrs:
val = h5_element.attrs[the_key]
out[the_key] = bytes_to_string(val) if isinstance(val, bytes) else val
return out
###########
# parser and interpreter for hdf5 attributes
class CSKDetails(object):
"""
Parses and converts the Cosmo Skymed metadata
"""
__slots__ = ('_file_name', '_mission_id', '_product_type')
def __init__(self, file_name: str):
"""
Parameters
----------
file_name : str
"""
if h5py is None:
raise ImportError("Can't read Cosmo Skymed files, because the h5py dependency is missing.")
if not os.path.isfile(file_name):
raise SarpyIOError('Path {} is not a file'.format(file_name))
with h5py.File(file_name, 'r') as hf:
try:
self._mission_id = hf.attrs['Mission ID'].decode('utf-8')
except KeyError:
raise SarpyIOError('The hdf file does not have the top level attribute "Mission ID"')
try:
self._product_type = hf.attrs['Product Type'].decode('utf-8')
except KeyError:
raise SarpyIOError('The hdf file does not have the top level attribute "Product Type"')
if self._mission_id not in ['CSK', 'CSG', 'KMPS']:
raise ValueError('Expected hdf5 attribute `Mission ID` should be one of "CSK", "CSG", or "KMPS"). '
'Got Mission ID = {}.'.format(self._mission_id))
if 'SCS' not in self._product_type:
raise ValueError('Expected hdf to contain complex products '
'(attribute `Product Type` which contains "SCS"). '
'Got Product Type = {}'.format(self._product_type))
self._file_name = file_name
@property
def file_name(self) -> str:
"""
str: the file name
"""
return self._file_name
@property
def mission_id(self) -> str:
"""
str: the mission id
"""
return self._mission_id
@property
def product_type(self) -> str:
"""
str: the product type
"""
return self._product_type
def _get_hdf_dicts(self) -> (dict, dict, Dict[str, Tuple[int, ...]], Dict[str, numpy.dtype], Dict[str, str]):
with h5py.File(self._file_name, 'r') as hf:
h5_dict = _extract_attrs(hf)
band_dict = OrderedDict()
shape_dict = OrderedDict()
dtype_dict = OrderedDict()
pixeltype_dict = OrderedDict()
for gp_name in sorted(hf.keys()):
if self._mission_id == 'CSG' and gp_name == 'LRHM':
continue # this is extraneous
gp = hf[gp_name]
band_dict[gp_name] = OrderedDict()
_extract_attrs(gp, out=band_dict[gp_name])
if 'B0001' in gp:
beam_info = gp['B0001']
_extract_attrs(beam_info, out=band_dict[gp_name])
if self._mission_id in ['CSK', 'KMPS']:
the_dataset = gp['SBI']
elif self._mission_id == 'CSG':
the_dataset = gp['IMG']
else:
raise ValueError(_unhandled_id_text.format(self._mission_id))
_extract_attrs(the_dataset, out=band_dict[gp_name])
shape_dict[gp_name] = the_dataset.shape[:2]
dtype_dict[gp_name] = the_dataset.dtype
if the_dataset.dtype.name == 'float32':
pixeltype_dict[gp_name] = 'RE32F_IM32F'
elif the_dataset.dtype.name == 'int16':
pixeltype_dict[gp_name] = 'RE16I_IM16I'
else:
raise ValueError(
'Got unexpected data type {}, name {}'.format(
the_dataset.dtype, the_dataset.dtype.name))
return h5_dict, band_dict, shape_dict, dtype_dict, pixeltype_dict
@staticmethod
def _parse_pol(str_in: str) -> str:
return '{}:{}'.format(str_in[0], str_in[1])
def _get_polarization(self, h5_dict: dict, band_dict: dict, band_name: str) -> str:
if 'Polarisation' in band_dict[band_name]:
return band_dict[band_name]['Polarisation']
elif 'Polarization' in h5_dict:
return h5_dict['Polarization']
else:
raise ValueError(
'Failed finding polarization for file {}\n\t'
'mission id {} and band name {}'.format(self.file_name, self.mission_id, band_name))
def _get_base_sicd(self, h5_dict: dict, band_dict: dict) -> SICDType:
def get_collection_info() -> (dict, CollectionInfoType):
acq_mode = h5_dict['Acquisition Mode'].upper()
if self.mission_id == 'CSK':
if acq_mode in ['HIMAGE', 'PINGPONG']:
mode_type = 'STRIPMAP'
elif acq_mode in ['WIDEREGION', 'HUGEREGION']:
# scansar, processed as stripmap
mode_type = 'STRIPMAP'
elif acq_mode in ['ENHANCED SPOTLIGHT', 'SMART']:
mode_type = 'DYNAMIC STRIPMAP'
else:
logger.warning('Got unexpected acquisition mode {}'.format(acq_mode))
mode_type = 'DYNAMIC STRIPMAP'
elif self.mission_id == 'KMPS':
if acq_mode in ['STANDARD', 'ENHANCED STANDARD']:
mode_type = 'STRIPMAP'
elif acq_mode in ['WIDE SWATH', 'ENHANCED WIDE SWATH']:
# scansar, processed as stripmap
mode_type = 'STRIPMAP'
elif acq_mode in ['HIGH RESOLUTION', 'ENHANCED HIGH RESOLUTION', 'ULTRA HIGH RESOLUTION']:
# "spotlight"
mode_type = 'DYNAMIC STRIPMAP'
else:
logger.warning('Got unexpected acquisition mode {}'.format(acq_mode))
mode_type = 'DYNAMIC STRIPMAP'
elif self.mission_id == 'CSG':
if acq_mode.startswith('SPOTLIGHT'):
mode_type = 'DYNAMIC STRIPMAP'
elif acq_mode in ['STRIPMAP', 'QUADPOL']:
mode_type = "STRIPMAP"
else:
logger.warning(
'Got unhandled acquisition mode {},\n\t'
'setting to DYNAMIC STRIPMAP'.format(acq_mode))
mode_type = 'DYNAMIC STRIPMAP'
else:
raise ValueError(_unhandled_id_text.format(self._mission_id))
start_time_dt = collect_start.astype('datetime64[s]').astype(datetime)
date_str = start_time_dt.strftime('%d%b%y').upper()
time_str = start_time_dt.strftime('%H%M%S') + 'Z'
core_name = '{}_{}_{}'.format(date_str, h5_dict['Satellite ID'], time_str)
collect_info = CollectionInfoType(
Classification='UNCLASSIFIED',
CollectorName=h5_dict['Satellite ID'],
CoreName=core_name,
CollectType='MONOSTATIC',
RadarMode=RadarModeType(ModeID=h5_dict['Multi-Beam ID'],
ModeType=mode_type))
return collect_info
def get_image_creation() -> ImageCreationType:
from sarpy.__about__ import __version__
return ImageCreationType(
DateTime=parse_timestring(h5_dict['Product Generation UTC'], precision='ns'),
Site=h5_dict['Processing Centre'],
Application='L0: `{}`, L1: `{}`'.format(
h5_dict.get('L0 Software Version', 'NONE'),
h5_dict.get('L1A Software Version', 'NONE')),
Profile='sarpy {}'.format(__version__))
def get_grid() -> GridType:
def get_wgt_type(weight_name, coefficient, direction):
if weight_name == 'GENERAL_COSINE':
# probably only for kompsat?
weight_name = 'HAMMING'
coefficient = 1-coefficient
if coefficient is None:
params = None
else:
params = {'COEFFICIENT': '{0:0.17E}'.format(coefficient)}
out = WgtTypeType(WindowName=weight_name, Parameters=params)
if weight_name != 'HAMMING':
logger.warning(
'Got unexpected weight scheme {} for {}.\n\t'
'The weighting will not be properly populated.'.format(weight_name, direction))
return out
if re.sub(' ', '', h5_dict['Projection ID']).upper() == 'SLANTRANGE/AZIMUTH':
image_plane = 'SLANT'
gr_type = 'RGZERO'
else:
image_plane = 'GROUND'
gr_type = 'PLANE'
# Row
row_window_name = h5_dict['Range Focusing Weighting Function'].rstrip().upper()
row_coefficient = h5_dict.get('Range Focusing Weighting Coefficient', None)
row_weight = get_wgt_type(row_window_name, row_coefficient, 'Row')
row = DirParamType(Sgn=-1,
KCtr=2*center_frequency/speed_of_light,
DeltaKCOAPoly=Poly2DType(Coefs=[[0, ], ]),
WgtType=row_weight)
# Col
col_window_name = h5_dict['Azimuth Focusing Weighting Function'].rstrip().upper()
col_coefficient = h5_dict.get('Azimuth Focusing Weighting Coefficient', None)
col_weight = get_wgt_type(col_window_name, col_coefficient, 'Col')
col = DirParamType(Sgn=-1, KCtr=0, WgtType=col_weight)
return GridType(ImagePlane=image_plane, Type=gr_type, Row=row, Col=col)
def get_timeline() -> TimelineType:
# NB: IPPEnd must be set, but will be replaced
return TimelineType(CollectStart=collect_start,
CollectDuration=duration,
IPP=[IPPSetType(index=0, TStart=0, TEnd=0, IPPStart=0, IPPEnd=0), ])
def get_position() -> PositionType:
T = h5_dict['State Vectors Times'] # in seconds relative to ref time
T += ref_time_offset
Pos = h5_dict['ECEF Satellite Position']
Vel = h5_dict['ECEF Satellite Velocity']
P_x, P_y, P_z = fit_position_xvalidation(T, Pos, Vel, max_degree=8)
return PositionType(ARPPoly=XYZPolyType(X=P_x, Y=P_y, Z=P_z))
def get_radar_collection() -> RadarCollectionType:
tx_pols = []
chan_params = []
if self.mission_id == 'CSG' and len(band_dict) == 1 and \
h5_dict['Acquisition Mode'].upper() == 'QUADPOL':
# it seems like 2nd generation files only contain one polarization
pols = ['HH', 'HV', 'VH', 'VV']
tx_pols.extend([pol[0] for pol in pols])
chan_params.extend([
ChanParametersType(TxRcvPolarization=self._parse_pol(pol), index=i+1)
for i, pol in enumerate(pols)])
else:
for i, bdname in enumerate(band_dict):
pol = self._get_polarization(h5_dict, band_dict, bdname)
tx_pols.append(pol[0])
chan_params.append(ChanParametersType(TxRcvPolarization=self._parse_pol(pol), index=i+1))
if len(tx_pols) == 1:
return RadarCollectionType(RcvChannels=chan_params, TxPolarization=tx_pols[0])
else:
return RadarCollectionType(RcvChannels=chan_params,
TxPolarization='SEQUENCE',
TxSequence=[TxStepType(TxPolarization=pol,
index=i+1) for i, pol in enumerate(tx_pols)])
def get_image_formation() -> ImageFormationType:
return ImageFormationType(ImageFormAlgo='RMA',
TStartProc=0,
TEndProc=duration,
STBeamComp='NO',
ImageBeamComp='SV',
AzAutofocus='NO',
RgAutofocus='NO',
RcvChanProc=RcvChanProcType(NumChanProc=1,
PRFScaleFactor=1))
def get_rma() -> RMAType:
inca = INCAType(FreqZero=center_frequency)
return RMAType(RMAlgoType='OMEGA_K',
INCA=inca)
def get_scpcoa() -> SCPCOAType:
return SCPCOAType(SideOfTrack=h5_dict['Look Side'][0:1].upper())
# some common use parameters
center_frequency = h5_dict['Radar Frequency']
# relative times in csk are wrt some reference time - for sicd they should be relative to start time
collect_start = parse_timestring(h5_dict['Scene Sensing Start UTC'], precision='ns')
collect_end = parse_timestring(h5_dict['Scene Sensing Stop UTC'], precision='ns')
duration = get_seconds(collect_end, collect_start, precision='ns')
ref_time = parse_timestring(h5_dict['Reference UTC'], precision='ns')
ref_time_offset = get_seconds(ref_time, collect_start, precision='ns')
# assemble our pieces
collection_info = get_collection_info()
image_creation = get_image_creation()
grid = get_grid()
timeline = get_timeline()
position = get_position()
radar_collection = get_radar_collection()
image_formation = get_image_formation()
rma = get_rma()
scpcoa = get_scpcoa()
sicd = SICDType(
CollectionInfo=collection_info,
ImageCreation=image_creation,
Grid=grid,
Timeline=timeline,
Position=position,
RadarCollection=radar_collection,
ImageFormation=image_formation,
RMA=rma,
SCPCOA=scpcoa)
return sicd
def _get_dop_poly_details(self,
h5_dict: dict,
band_dict: dict,
band_name: str) -> (float, float, numpy.ndarray, numpy.ndarray, numpy.ndarray):
def strip_poly(arr: numpy.ndarray) -> numpy.ndarray:
# strip worthless (all zero) highest order terms
# find last non-zero index
last_ind = arr.size
for i in range(arr.size-1, -1, -1):
if arr[i] != 0:
break
last_ind = i
if last_ind == 0:
return numpy.array([0, ], dtype=arr.dtype)
return arr[:last_ind]
dop_rate_poly_rg = h5_dict.get('Doppler Rate vs Range Time Polynomial', None)
if dop_rate_poly_rg is None:
dop_rate_poly_rg = band_dict[band_name].get('Doppler Rate vs Range Time Polynomial', None)
if dop_rate_poly_rg is None:
raise ValueError('No Doppler Rate Range Time polynomial found')
dop_rate_poly_rg = strip_poly(dop_rate_poly_rg)
if self._mission_id in ['CSK', 'KMPS']:
az_ref_time = h5_dict['Azimuth Polynomial Reference Time'] # seconds
rg_ref_time = h5_dict['Range Polynomial Reference Time']
dop_poly_az = strip_poly(h5_dict['Centroid vs Azimuth Time Polynomial'])
dop_poly_rg = strip_poly(h5_dict['Centroid vs Range Time Polynomial'])
elif self._mission_id == 'CSG':
az_ref_time_nozd = band_dict[band_name]['Azimuth Polynomial Reference Time']
first_time = band_dict[band_name]['Azimuth First Time']
last_time = band_dict[band_name]['Azimuth Last Time']
az_fit_times = numpy.linspace(first_time, last_time, num=11)
geom_dop_cent_poly = band_dict[band_name]['Doppler Centroid vs Azimuth Time Polynomial - RAW']
dop_rate_poly = h5_dict.get('Doppler Rate vs Azimuth Time Polynomial', None)
if dop_rate_poly is None:
dop_rate_poly = band_dict[band_name].get('Doppler Rate vs Azimuth Time Polynomial', None)
if dop_rate_poly is None:
raise ValueError('No Doppler Rate Range Time polynomial found')
centroid_values = polynomial.polyval(az_fit_times - az_ref_time_nozd, geom_dop_cent_poly)
rate_values = polynomial.polyval(az_fit_times - az_ref_time_nozd, dop_rate_poly)
zd_times = az_fit_times - centroid_values / rate_values
az_ref_time = band_dict[band_name]['Azimuth Polynomial Reference Time - ZD']
dop_poly_az = strip_poly(polynomial.polyfit(zd_times - az_ref_time, centroid_values, 4))
rg_ref_time = band_dict[band_name]['Range Polynomial Reference Time']
dop_poly_rg = strip_poly(band_dict[band_name]['Doppler Centroid vs Range Time Polynomial'])
else:
raise ValueError(_unhandled_id_text.format(self._mission_id))
return az_ref_time, rg_ref_time, dop_poly_az, dop_poly_rg, dop_rate_poly_rg
def _get_band_specific_sicds(self,
base_sicd: SICDType,
h5_dict: dict,
band_dict: dict,
shape_dict: dict,
pixeltype_dict: dict) -> Dict[str, SICDType]:
def update_scp_prelim(sicd: SICDType, band_name: str) -> None:
if self._mission_id in ['CSK', 'KMPS']:
LLH = band_dict[band_name]['Centre Geodetic Coordinates']
elif self._mission_id == 'CSG':
LLH = h5_dict['Scene Centre Geodetic Coordinates']
else:
raise ValueError(_unhandled_id_text.format(self._mission_id))
sicd.GeoData = GeoDataType(SCP=SCPType(LLH=LLH)) # EarthModel & ECF will be populated
def update_image_data(sicd: SICDType, band_name: str) -> (float, float, float, float, int):
cols, rows = shape_dict[band_name]
# zero doppler time of first/last columns
t_az_first_time = band_dict[band_name]['Zero Doppler Azimuth First Time']
t_az_last_time = band_dict[band_name]['Zero Doppler Azimuth Last Time']
t_ss_az_s = band_dict[band_name]['Line Time Interval']
t_use_sign2 = 1
if h5_dict['Look Side'].upper() == 'LEFT':
t_use_sign2 = -1
t_az_first_time, t_az_last_time = t_az_last_time, t_az_first_time
# zero doppler time of first row
t_rg_first_time = band_dict[band_name]['Zero Doppler Range First Time']
# row spacing in range time (seconds)
t_ss_rg_s = band_dict[band_name]['Column Time Interval']
sicd.ImageData = ImageDataType(NumRows=rows,
NumCols=cols,
FirstRow=0,
FirstCol=0,
FullImage=(rows, cols),
PixelType=pixeltype_dict[band_name],
SCPPixel=RowColType(Row=int(rows/2),
Col=int(cols/2)))
return t_rg_first_time, t_ss_rg_s, t_az_first_time, t_ss_az_s, t_use_sign2
def check_switch_state() -> (int, Poly1DType):
use_sign = 1 if t_dop_rate_poly_rg[0] < 0 else -1
return use_sign, Poly1DType(Coefs=use_sign*t_dop_rate_poly_rg)
def update_timeline(sicd: SICDType, band_name: str) -> None:
prf = band_dict[band_name]['PRF']
duration = sicd.Timeline.CollectDuration
ipp_el = sicd.Timeline.IPP[0]
ipp_el.IPPEnd = round(duration*prf) - 1
ipp_el.TEnd = duration
ipp_el.IPPPoly = Poly1DType(Coefs=(0, prf))
def update_radar_collection(sicd: SICDType, band_name: str) -> None:
ind = None
for the_chan_index, chan in enumerate(sicd.RadarCollection.RcvChannels):
if chan.TxRcvPolarization == polarization:
ind = the_chan_index
break
if ind is None:
raise ValueError('Failed to find receive channel for polarization {}'.format(polarization))
chirp_length = band_dict[band_name]['Range Chirp Length']
chirp_rate = abs(band_dict[band_name]['Range Chirp Rate'])
sample_rate = band_dict[band_name]['Sampling Rate']
ref_dechirp_time = band_dict[band_name].get('Reference Dechirping Time', 0) # TODO: is this right?
win_length = band_dict[band_name]['Echo Sampling Window Length']
rcv_fm_rate = 0 if numpy.isnan(ref_dechirp_time) else chirp_rate
band_width = chirp_length*chirp_rate
fr_min = center_frequency - 0.5*band_width
fr_max = center_frequency + 0.5*band_width
sicd.RadarCollection.TxFrequency = (fr_min, fr_max)
sicd.RadarCollection.Waveform = [
WaveformParametersType(index=0,
TxPulseLength=chirp_length,
TxRFBandwidth=band_width,
TxFreqStart=fr_min,
TxFMRate=chirp_rate,
ADCSampleRate=sample_rate,
RcvFMRate=rcv_fm_rate,
RcvWindowLength=win_length/sample_rate), ]
sicd.ImageFormation.RcvChanProc.ChanIndices = [ind+1, ]
sicd.ImageFormation.TxFrequencyProc = (fr_min, fr_max)
def update_rma_and_grid(sicd: SICDType, band_name: str) -> None:
rg_scp_time = rg_first_time + (ss_rg_s*sicd.ImageData.SCPPixel.Row)
az_scp_time = az_first_time + (use_sign2*ss_az_s*sicd.ImageData.SCPPixel.Col)
r_ca_scp = rg_scp_time*speed_of_light/2
sicd.RMA.INCA.R_CA_SCP = r_ca_scp
# compute DRateSFPoly
scp_ca_time = az_scp_time + ref_time_offset
vel_poly = sicd.Position.ARPPoly.derivative(der_order=1, return_poly=True)
vel_ca_vec = vel_poly(scp_ca_time)
vel_ca_sq = numpy.sum(vel_ca_vec*vel_ca_vec)
vel_ca = numpy.sqrt(vel_ca_sq)
r_ca = numpy.array([r_ca_scp, 1.], dtype=numpy.float64)
dop_rate_poly_rg_shifted = dop_rate_poly_rg.shift(
rg_ref_time-rg_scp_time, alpha=ss_rg_s/row_ss, return_poly=False)
drate_sf_poly = -(polynomial.polymul(dop_rate_poly_rg_shifted, r_ca) *
speed_of_light/(2*center_frequency*vel_ca_sq))
# update grid.row
sicd.Grid.Row.SS = row_ss
sicd.Grid.Row.ImpRespBW = row_bw
sicd.Grid.Row.DeltaK1 = -0.5 * row_bw
sicd.Grid.Row.DeltaK2 = 0.5 * row_bw
# update grid.col
col_ss = abs(vel_ca*ss_az_s*drate_sf_poly[0])
sicd.Grid.Col.SS = col_ss
if self.mission_id == 'CSK':
col_bw = min(band_dict[band_name]['Azimuth Focusing Transition Bandwidth']*ss_az_s, 1) / col_ss
elif self.mission_id in ['CSG', 'KMPS']:
col_bw = min(band_dict[band_name]['Azimuth Focusing Bandwidth']*ss_az_s, 1) / col_ss
else:
raise ValueError('Got unhandled mission_id {}'.format(self.mission_id))
sicd.Grid.Col.ImpRespBW = col_bw
# update inca
sicd.RMA.INCA.DRateSFPoly = Poly2DType(Coefs=numpy.reshape(drate_sf_poly, (-1, 1)))
sicd.RMA.INCA.TimeCAPoly = Poly1DType(Coefs=[scp_ca_time, use_sign2*ss_az_s/col_ss])
# compute DopCentroidPoly & DeltaKCOAPoly
dop_centroid_poly = numpy.zeros((dop_poly_rg.order1+1, dop_poly_az.order1+1), dtype=numpy.float64)
dop_centroid_poly[0, 0] = dop_poly_rg(rg_scp_time-rg_ref_time) + \
dop_poly_az(az_scp_time-az_ref_time) - \
0.5*(dop_poly_rg[0] + dop_poly_az[0])
dop_poly_rg_shifted = dop_poly_rg.shift(rg_ref_time-rg_scp_time, alpha=ss_rg_s/row_ss)
dop_poly_az_shifted = dop_poly_az.shift(az_ref_time-az_scp_time, alpha=ss_az_s/col_ss)
dop_centroid_poly[1:, 0] = dop_poly_rg_shifted[1:]
dop_centroid_poly[0, 1:] = dop_poly_az_shifted[1:]
sicd.RMA.INCA.DopCentroidPoly = Poly2DType(Coefs=dop_centroid_poly)
sicd.RMA.INCA.DopCentroidCOA = True
sicd.Grid.Col.DeltaKCOAPoly = Poly2DType(Coefs=use_sign*dop_centroid_poly*ss_az_s/col_ss)
# fit TimeCOAPoly
sicd.Grid.TimeCOAPoly = fit_time_coa_polynomial(
sicd.RMA.INCA, sicd.ImageData, sicd.Grid, dop_rate_poly_rg_shifted, poly_order=2)
if csk_addin is not None:
csk_addin.check_sicd(sicd, self.mission_id, h5_dict)
def update_radiometric(sicd: SICDType, band_name: str) -> None:
if self.mission_id in ['KMPS', 'CSG']:
# TODO: skipping for now - strange results for flag == 77. Awaiting gidance - see Wade.
return
if h5_dict['Range Spreading Loss Compensation Geometry'] != 'NONE':
slant_range = h5_dict['Reference Slant Range']
exp = h5_dict['Reference Slant Range Exponent']
sf = slant_range**(2*exp)
rsf = h5_dict['Rescaling Factor']
sf /= rsf * rsf
if h5_dict.get('Calibration Constant Compensation Flag', None) == 0:
cal = band_dict[band_name]['Calibration Constant']
sf /= cal
sicd.Radiometric = RadiometricType(BetaZeroSFPoly=Poly2DType(Coefs=[[sf, ], ]))
def update_geodata(sicd: SICDType) -> None:
scp_pixel = [sicd.ImageData.SCPPixel.Row, sicd.ImageData.SCPPixel.Col]
ecf = sicd.project_image_to_ground(scp_pixel, projection_type='HAE')
sicd.update_scp(ecf, coord_system='ECF')
SCP = sicd.GeoData.SCP.ECF.get_array(dtype='float64')
scp_time = sicd.RMA.INCA.TimeCAPoly[0]
ca_pos = sicd.Position.ARPPoly(scp_time)
RG = SCP - ca_pos
sicd.RMA.INCA.R_CA_SCP = numpy.linalg.norm(RG)
out = {}
center_frequency = h5_dict['Radar Frequency']
# relative times in csk are wrt some reference time - for sicd they should be relative to start time
collect_start = parse_timestring(h5_dict['Scene Sensing Start UTC'], precision='ns')
ref_time = parse_timestring(h5_dict['Reference UTC'], precision='ns')
ref_time_offset = get_seconds(ref_time, collect_start, precision='ns')
for bd_name in band_dict:
polarization = self._parse_pol(self._get_polarization(h5_dict, band_dict, bd_name))
az_ref_time, rg_ref_time, t_dop_poly_az, t_dop_poly_rg, t_dop_rate_poly_rg = \
self._get_dop_poly_details(h5_dict, band_dict, bd_name)
dop_poly_az = Poly1DType(Coefs=t_dop_poly_az)
dop_poly_rg = Poly1DType(Coefs=t_dop_poly_rg)
t_sicd = base_sicd.copy()
t_sicd.ImageFormation.TxRcvPolarizationProc = polarization
row_bw = band_dict[bd_name]['Range Focusing Bandwidth']*2/speed_of_light
row_ss = band_dict[bd_name]['Column Spacing']
rg_first_time, ss_rg_s, az_first_time, ss_az_s, use_sign2 = update_image_data(t_sicd, bd_name)
use_sign, dop_rate_poly_rg = check_switch_state()
update_timeline(t_sicd, bd_name)
update_radar_collection(t_sicd, bd_name)
update_rma_and_grid(t_sicd, bd_name)
update_radiometric(t_sicd, bd_name)
update_scp_prelim(t_sicd, bd_name) # set preliminary value for SCP (required for projection)
update_geodata(t_sicd)
t_sicd.derive()
# t_sicd.populate_rniirs(override=False)
out[bd_name] = t_sicd
return out
@staticmethod
def _get_symmetry(h5_dict: dict) -> (Optional[Tuple[int, ...]], Tuple[int, ...]):
reverse_axes = []
line_order = h5_dict['Lines Order'].upper()
look_side = h5_dict['Look Side'].upper()
symm_0 = ((line_order == 'EARLY-LATE') != (look_side == 'RIGHT'))
if symm_0:
reverse_axes.append(0)
column_order = h5_dict['Columns Order'].upper()
symm_1 = column_order != 'NEAR-FAR'
if symm_1:
reverse_axes.append(1)
transpose_axes = (1, 0, 2)
return tuple(reverse_axes), transpose_axes
def get_sicd_collection(self) -> (
Dict[str, SICDType], Dict[str, Tuple[int, ...]], Optional[Tuple[int, ...]], Tuple[int, ...]):
"""
Get the sicd collection for the bands.
Returns
-------
sicd_dict : Dict[str, SICDType]
Of the form {band_name: sicd}
shape_dict : Dict[str, Tuple[int, ...]]
Of the form {band_name: shape}
dtype_dict : Dict[str, numpy.dtype]
Of the form {band_name: data type string}
reverse_axes : Optional[Tuple[int, ...]]
transpose_axes : Tuple[int, ...]
"""
h5_dict, band_dict, shape_dict, dtype_dict, pixeltype_dict = self._get_hdf_dicts()
base_sicd = self._get_base_sicd(h5_dict, band_dict)
# noinspection PyTypeChecker
return (self._get_band_specific_sicds(base_sicd, h5_dict, band_dict, shape_dict, pixeltype_dict), shape_dict, dtype_dict) + self._get_symmetry(h5_dict)
################
# The CSK reader
class CSKReader(SICDTypeReader):
"""
A Cosmo SkyMed 1st or 2nd generation SLC reader implementation.
**Changed in version 1.3.0** for reading changes.
"""
__slots__ = ('_csk_details', )
def __init__(self, csk_details):
"""
Parameters
----------
csk_details : str|CSKDetails
file name or CSKDetails object
"""
if isinstance(csk_details, str):
csk_details = CSKDetails(csk_details)
if not isinstance(csk_details, CSKDetails):
raise TypeError('The input argument for a CSKReader must be a '
'filename or CSKDetails object')
self._csk_details = csk_details
sicd_data, shape_dict, dtype_dict, reverse_axes, transpose_axes = csk_details.get_sicd_collection()
data_segments = []
sicds = []
for band_name in sicd_data:
if self._csk_details.mission_id in ['CSK', 'KMPS']:
the_band = '{}/SBI'.format(band_name)
elif self._csk_details.mission_id == 'CSG':
the_band = '{}/IMG'.format(band_name)
else:
raise ValueError(_unhandled_id_text.format(self._csk_details.mission_id))
sicds.append(sicd_data[band_name])
basic_shape = shape_dict[band_name]
data_segments.append(
HDF5DatasetSegment(
csk_details.file_name, the_band,
formatted_dtype='complex64', formatted_shape=(basic_shape[1], basic_shape[0]),
reverse_axes=reverse_axes, transpose_axes=transpose_axes,
format_function=ComplexFormatFunction(raw_dtype=dtype_dict[band_name], order='IQ', band_dimension=2),
close_file=True))
SICDTypeReader.__init__(self, data_segments, sicds, close_segments=True)
self._check_sizes()
@property
def csk_details(self) -> CSKDetails:
"""
CSKDetails: The details object.
"""
return self._csk_details
@property
def file_name(self) -> str:
return self.csk_details.file_name
########
# base expected functionality for a module with an implemented Reader
def is_a(file_name: Union[str, BinaryIO]) -> Union[None, CSKReader]:
"""
Tests whether a given file_name corresponds to a Cosmo Skymed file. Returns a reader instance, if so.
Parameters
----------
file_name : str|BinaryIO
the file_name to check
Returns
-------
CSKReader|None
`CSKReader` instance if Cosmo Skymed file, `None` otherwise
"""
if is_file_like(file_name):
return None
if not is_hdf5(file_name):
return None
if h5py is None:
return None
try:
csk_details = CSKDetails(file_name)
logger.info('File {} is determined to be a Cosmo Skymed file.'.format(file_name))
return CSKReader(csk_details)
except SarpyIOError:
return None
| 35,415 | 45.174707 | 159 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd.py | """
Module for reading and writing SICD files
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import re
import logging
from datetime import datetime
from typing import BinaryIO, Union, Optional, Dict, Tuple, Sequence
import numpy
from sarpy.__about__ import __title__, __version__
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.sicd_elements.SICD import SICDType
from sarpy.io.complex.sicd_elements.ImageCreation import ImageCreationType
from sarpy.io.general.base import SarpyIOError
from sarpy.io.general.format_function import FormatFunction, ComplexFormatFunction
from sarpy.io.general.nitf import NITFDetails, NITFReader, NITFWriter, \
interpolate_corner_points_string, default_image_segmentation, \
ImageSubheaderManager, TextSubheaderManager, DESSubheaderManager, \
RESSubheaderManager, NITFWritingDetails
from sarpy.io.general.nitf_elements.nitf_head import NITFHeader
from sarpy.io.general.nitf_elements.des import DataExtensionHeader, XMLDESSubheader
from sarpy.io.general.nitf_elements.security import NITFSecurityTags
from sarpy.io.general.nitf_elements.image import ImageSegmentHeader, \
ImageSegmentHeader0, ImageBands, ImageBand
from sarpy.io.general.utils import is_file_like
from sarpy.io.xml.base import parse_xml_from_string
logger = logging.getLogger(__name__)
#########
# Helper object for initially parses NITF header
class AmpLookupFunction(ComplexFormatFunction):
__slots__ = ('_magnitude_lookup_table', )
_allowed_ordering = ('MP', )
def __init__(
self,
raw_dtype: Union[str, numpy.dtype],
magnitude_lookup_table: numpy.ndarray,
raw_shape: Optional[Tuple[int, ...]] = None,
formatted_shape: Optional[Tuple[int, ...]] = None,
reverse_axes: Optional[Tuple[int, ...]] = None,
transpose_axes: Optional[Tuple[int, ...]] = None,
band_dimension: int = -1):
"""
Parameters
----------
raw_dtype : str|numpy.dtype
The raw datatype. Must be `uint8` up to endianness.
magnitude_lookup_table : numpy.ndarray
raw_shape : None|Tuple[int, ...]
formatted_shape : None|Tuple[int, ...]
reverse_axes : None|Tuple[int, ...]
transpose_axes : None|Tuple[int, ...]
band_dimension : int
Which band is the complex dimension, **after** the transpose operation.
"""
ComplexFormatFunction.__init__(
self, raw_dtype, 'MP', raw_shape=raw_shape, formatted_shape=formatted_shape,
reverse_axes=reverse_axes, transpose_axes=transpose_axes, band_dimension=band_dimension)
self._magnitude_lookup_table = None
self.set_magnitude_lookup(magnitude_lookup_table)
@property
def magnitude_lookup_table(self) -> numpy.ndarray:
"""
The magnitude lookup table, for SICD usage with `AMP8I_PHS8I` pixel type.
Returns
-------
numpy.ndarray
"""
return self._magnitude_lookup_table
def set_magnitude_lookup(self, lookup_table: numpy.ndarray) -> None:
if not isinstance(lookup_table, numpy.ndarray):
raise ValueError('requires a numpy.ndarray, got {}'.format(type(lookup_table)))
if lookup_table.dtype.name not in ['float32', 'float64']:
raise ValueError('requires a numpy.ndarray of float32 or 64 dtype, got {}'.format(lookup_table.dtype))
if lookup_table.dtype.name != 'float32':
lookup_table = numpy.cast['float32'](lookup_table)
if lookup_table.shape != (256,):
raise ValueError('Requires a one-dimensional numpy.ndarray with 256 elements, '
'got shape {}'.format(lookup_table.shape))
if self._raw_dtype.name != 'uint8':
raise ValueError(
'A magnitude lookup table has been supplied,\n\t'
'but the raw datatype is not `uint8`.')
self._magnitude_lookup_table = lookup_table
def _forward_magnitude_theta(
self,
data: numpy.ndarray,
out: numpy.ndarray,
magnitude: numpy.ndarray,
theta: numpy.ndarray,
subscript: Tuple[slice, ...]) -> None:
magnitude = self.magnitude_lookup_table[magnitude]
ComplexFormatFunction._forward_magnitude_theta(
self, data, out, magnitude, theta, subscript)
def _reverse_magnitude_theta(
self,
data: numpy.ndarray,
out: numpy.ndarray,
magnitude: numpy.ndarray,
theta: numpy.ndarray,
slice0: Tuple[slice, ...],
slice1: Tuple[slice, ...]) -> None:
magnitude = numpy.digitize(
numpy.round(magnitude.ravel()), self.magnitude_lookup_table, right=False).reshape(data.shape)
ComplexFormatFunction._reverse_magnitude_theta(self, data, out, magnitude, theta, slice0, slice1)
class SICDDetails(NITFDetails):
"""
SICD are stored in NITF 2.1 files.
"""
__slots__ = (
'_des_index', '_des_header', '_is_sicd', '_sicd_meta')
def __init__(self, file_object: Union[str, BinaryIO]):
"""
Parameters
----------
file_object : str|BinaryIO
file name or file like object for a NITF 2.1 or 2.0 containing a SICD.
"""
self._des_index = None
self._des_header = None
self._img_headers = None
self._is_sicd = False
self._sicd_meta = None
NITFDetails.__init__(self, file_object)
if self._nitf_header.ImageSegments.subhead_sizes.size == 0:
raise SarpyIOError('There are no image segments defined.')
if self._nitf_header.GraphicsSegments.item_sizes.size > 0:
raise SarpyIOError('A SICD file does not allow for graphics segments.')
if self._nitf_header.DataExtensions.subhead_sizes.size == 0:
raise SarpyIOError(
'A SICD file requires at least one data extension, containing the '
'SICD xml structure.')
# define the sicd metadata
self._find_sicd()
if not self.is_sicd:
raise SarpyIOError('Could not find the SICD XML des.')
@property
def is_sicd(self) -> bool:
"""
bool: whether file name corresponds to a SICD file, or not.
"""
return self._is_sicd
@property
def sicd_meta(self) -> SICDType:
"""
SICDType: the sicd meta-data structure.
"""
return self._sicd_meta
@property
def des_header(self) -> Optional[DataExtensionHeader]:
"""
The DES subheader object associated with the SICD.
Returns
-------
DataExtensionHeader
"""
return self._des_header
def _find_sicd(self) -> None:
self._is_sicd = False
self._sicd_meta = None
if self.des_subheader_offsets is None:
return
for i in range(self.des_subheader_offsets.size):
subhead_bytes = self.get_des_subheader_bytes(i)
if subhead_bytes.startswith(b'DEXML_DATA_CONTENT'):
des_header = DataExtensionHeader.from_bytes(subhead_bytes, start=0)
des_bytes = self.get_des_bytes(i)
# noinspection PyBroadException
try:
root_node, xml_ns = parse_xml_from_string(des_bytes.decode('utf-8').strip().encode())
if 'SIDD' in root_node.tag: # namespace makes this ugly
# NOTE that SIDD files are supposed to have the corresponding
# SICD xml as one of the DES AFTER the SIDD xml.
# The same basic format is used for both headers.
# So, abandon if we find a SIDD xml
self._des_index = None
self._des_header = None
self._is_sicd = False
break
elif 'SICD' in root_node.tag: # namespace makes this ugly
self._des_index = i
self._des_header = des_header
self._is_sicd = True
if xml_ns is None:
self._sicd_meta = SICDType.from_node(root_node, xml_ns, ns_key=None)
else:
self._sicd_meta = SICDType.from_node(root_node, xml_ns, ns_key='default')
break
except Exception:
continue
elif subhead_bytes.startswith(b'DESIDD_XML'):
# This is an old format SIDD and can't be a SICD
self._des_index = None
self._des_header = None
self._is_sicd = False
break
elif subhead_bytes.startswith(b'DESICD_XML'):
# This is an old format SICD
des_bytes = self.get_des_bytes(i)
try:
root_node, xml_ns = parse_xml_from_string(des_bytes.decode('utf-8').strip().encode())
if 'SICD' in root_node.tag: # namespace makes this ugly
self._des_index = i
self._des_header = None
self._is_sicd = True
if xml_ns is None:
self._sicd_meta = SICDType.from_node(root_node, xml_ns, ns_key=None)
else:
self._sicd_meta = SICDType.from_node(root_node, xml_ns, ns_key='default')
break
except Exception as e:
logger.error(
'We found an apparent old-style SICD DES header,\n\t'
'but failed parsing with error {}'.format(e))
continue
if not self._is_sicd:
return
# noinspection PyBroadException
try:
self._sicd_meta.derive()
except Exception:
pass
# TODO: account for the reference frequency offset situation
#######
# The actual reading implementation
class SICDReader(NITFReader, SICDTypeReader):
"""
A SICD reader implementation - file is NITF container following specified rules.
**Changed in version 1.3.0** for reading changes.
"""
_maximum_number_of_images = 1
def __init__(self, nitf_details):
"""
Parameters
----------
nitf_details : : str|BinaryIO|SICDDetails
filename, file-like object, or SICDDetails object
"""
if isinstance(nitf_details, str) or is_file_like(nitf_details):
nitf_details = SICDDetails(nitf_details)
if not isinstance(nitf_details, SICDDetails):
raise TypeError(
'The input argument for SICDReader must be a filename, file-like object, '
'or SICDDetails object.')
SICDTypeReader.__init__(self, None, nitf_details.sicd_meta)
NITFReader.__init__(self, nitf_details, reader_type='SICD')
self._check_sizes()
@property
def nitf_details(self) -> SICDDetails:
"""
SICDDetails: The SICD NITF details object.
"""
# noinspection PyTypeChecker
return self._nitf_details
def get_nitf_dict(self) -> Dict:
"""
Populate a dictionary with the pertinent NITF header information. This
is for use in more faithful preservation of NITF header information
in copying or rewriting sicd files.
Returns
-------
dict
"""
out = {}
security = {}
security_obj = self.nitf_details.nitf_header.Security
# noinspection PyProtectedMember
for field in NITFSecurityTags._ordering:
value = getattr(security_obj, field).strip()
if value != '':
security[field] = value
if len(security) > 0:
out['Security'] = security
out['OSTAID'] = self.nitf_details.nitf_header.OSTAID
out['FTITLE'] = self.nitf_details.nitf_header.FTITLE
out['ISORCE'] = self.nitf_details.img_headers[0].ISORCE
out['IID2'] = self.nitf_details.img_headers[0].IID2
return out
def populate_nitf_information_into_sicd(self):
"""
Populate some pertinent NITF header information into the SICD structure.
This provides more faithful copying or rewriting options.
"""
self._sicd_meta.NITF = self.get_nitf_dict()
def depopulate_nitf_information(self):
"""
Eliminates the NITF information dict from the SICD structure.
"""
self._sicd_meta.NITF = {}
def get_format_function(
self,
raw_dtype: numpy.dtype,
complex_order: Optional[str],
lut: Optional[numpy.ndarray],
band_dimension: int,
image_segment_index: Optional[int] = None,
**kwargs) -> Optional[FormatFunction]:
if complex_order is not None and complex_order != 'IQ':
if complex_order != 'MP' or raw_dtype.name != 'uint8' or band_dimension != 2:
raise ValueError('Got unsupported SICD band type definition')
if self.sicd_meta.ImageData.PixelType != 'AMP8I_PHS8I' or self.sicd_meta.ImageData.AmpTable is None:
raise ValueError('Expected AMP8I_PHS8I')
return AmpLookupFunction(raw_dtype, self.sicd_meta.ImageData.AmpTable)
return NITFReader.get_format_function(
self, raw_dtype, complex_order, lut, band_dimension, image_segment_index, **kwargs)
def _check_image_segment_for_compliance(
self,
index: int,
img_header: Union[ImageSegmentHeader, ImageSegmentHeader0]) -> bool:
out = NITFReader._check_image_segment_for_compliance(self, index, img_header)
if not out:
return out
raw_dtype, formatted_dtype, formatted_bands, complex_order, lut = self._get_dtypes(index)
if complex_order is None or complex_order not in ['IQ', 'MP']:
logger.error(
'Image segment at index {} is not of appropriate type for a SICD Image Segment'.format(index))
return False
if formatted_bands != 1:
logger.error(
'Image segment at index {} has multiple complex bands'.format(index))
return False
raw_name = raw_dtype.name
pixel_type = self.sicd_meta.ImageData.PixelType
if pixel_type == 'RE32F_IM32F':
if complex_order != 'IQ' or raw_name != 'float32':
logger.error(
'Image segment at index {} required to be compatible\n\t'
'with PIXEL_TYPE {}'.format(index, pixel_type))
return False
elif pixel_type == 'RE16I_IM16I':
if complex_order != 'IQ' or raw_name != 'int16':
logger.error(
'Image segment at index {} required to be compatible\n\t'
'with PIXEL_TYPE {}'.format(index, pixel_type))
return False
elif pixel_type == 'AMP8I_PHS8I':
if complex_order != 'MP' or raw_name != 'uint8':
logger.error(
'Image segment at index {} required to be compatible\n\t'
'with PIXEL_TYPE {}'.format(index, pixel_type))
return False
else:
raise ValueError('Unhandled PIXEL_TYPE {}'.format(pixel_type))
return True
def find_image_segment_collections(self) -> Tuple[Tuple[int, ...]]:
return (
tuple(index for index in range(len(self.nitf_details.img_headers))
if index not in self.unsupported_segments), )
########
# base expected functionality for a module with an implemented Reader
def is_a(file_name: Union[str, BinaryIO]) -> Optional[SICDReader]:
"""
Tests whether a given file_name corresponds to a SICD file, and returns
a reader instance, if so.
Parameters
----------
file_name : str
the file_name to check
Returns
-------
SICDReader|None
"""
try:
nitf_details = SICDDetails(file_name)
if nitf_details.is_sicd:
logger.info('File {} is determined to be a SICD (NITF format) file.'.format(file_name))
return SICDReader(nitf_details)
else:
return None
except SarpyIOError:
return None
#######
# The actual writing implementation
def validate_sicd_for_writing(sicd_meta: SICDType) -> SICDType:
"""
Helper method which ensures the provided SICD structure provides enough
information to support file writing, as well as ensures a few basic items
are populated as appropriate.
Parameters
----------
sicd_meta : SICDType
Returns
-------
SICDType
This returns a deep copy of the provided SICD structure, with any
necessary modifications.
"""
if not isinstance(sicd_meta, SICDType):
raise ValueError('sicd_meta is required to be an instance of SICDType, got {}'.format(type(sicd_meta)))
if sicd_meta.ImageData is None:
raise ValueError('The sicd_meta has un-populated ImageData, and nothing useful can be inferred.')
if sicd_meta.ImageData.NumCols is None or sicd_meta.ImageData.NumRows is None:
raise ValueError('The sicd_meta has ImageData with unpopulated NumRows or NumCols, '
'and nothing useful can be inferred.')
if sicd_meta.ImageData.PixelType is None:
logger.warning('The PixelType for sicd_meta is unset, so defaulting to RE32F_IM32F.')
sicd_meta.ImageData.PixelType = 'RE32F_IM32F'
sicd_meta = sicd_meta.copy()
profile = '{} {}'.format(__title__, __version__)
if sicd_meta.ImageCreation is None:
sicd_meta.ImageCreation = ImageCreationType(
Application=profile,
DateTime=numpy.datetime64(datetime.now()),
Profile=profile)
else:
sicd_meta.ImageCreation.Profile = profile
if sicd_meta.ImageCreation.DateTime is None:
sicd_meta.ImageCreation.DateTime = numpy.datetime64(datetime.now())
return sicd_meta
def extract_clas(sicd: SICDType) -> str:
"""
Extract the classification string from a SICD as appropriate for NITF Security
tags CLAS attribute.
Parameters
----------
sicd : SICDType
Returns
-------
str
"""
if sicd.CollectionInfo is None or sicd.CollectionInfo.Classification is None:
return 'U'
c_str = sicd.CollectionInfo.Classification.upper().strip()
if 'UNCLASS' in c_str or c_str == 'U':
return 'U'
elif 'CONFIDENTIAL' in c_str or c_str == 'C' or c_str.startswith('C/'):
return 'C'
elif 'TOP SECRET' in c_str or c_str == 'TS' or c_str.startswith('TS/'):
return 'T'
elif 'SECRET' in c_str or c_str == 'S' or c_str.startswith('S/'):
return 'S'
elif 'FOUO' in c_str.upper() or 'RESTRICTED' in c_str.upper():
return 'R'
else:
logger.error(
'Unclear how to extract CLAS for classification string {}.\n\t'
'Should be set appropriately.'.format(c_str))
return 'U'
def create_security_tags_from_sicd(sicd_meta: SICDType) -> NITFSecurityTags:
def get_basic_args():
out = {}
sec_tags = sicd_meta.NITF.get('Security', {})
# noinspection PyProtectedMember
for fld in NITFSecurityTags._ordering:
if fld in sec_tags:
out[fld] = sec_tags[fld]
return out
def get_clas():
if 'CLAS' in args:
return
args['CLAS'] = extract_clas(sicd_meta)
def get_code(in_str):
if 'CODE' in args:
return
# TODO: this is pretty terrible...
code = re.search('(?<=/)[^/].*', in_str)
if code is not None:
args['CODE'] = code.group()
def get_clsy():
if args.get('CLSY', '').strip() == '':
args['CLSY'] = 'US'
args = get_basic_args()
if sicd_meta.CollectionInfo is not None:
get_clas()
get_code(sicd_meta.CollectionInfo.Classification)
get_clsy()
return NITFSecurityTags(**args)
class SICDWritingDetails(NITFWritingDetails):
"""
Manager for all the NITF subheader information associated with the SICD.
Introduced in version 1.3.0.
"""
__slots__ = (
'_sicd_meta', '_security_tags', '_row_limit', '_check_older_version',
'_required_version')
def __init__(
self,
sicd_meta: SICDType,
row_limit: Optional[int] = None,
additional_des: Optional[Sequence[DESSubheaderManager]] = None,
text_managers: Optional[Tuple[TextSubheaderManager, ...]] = None,
res_managers: Optional[Tuple[RESSubheaderManager, ...]] = None,
check_older_version: bool = False):
"""
Parameters
----------
sicd_meta : SICDType
row_limit : None|int
Desired row limit for the sicd image segments. Non-positive values
or values > 99999 will be ignored.
additional_des : None|Sequence[DESSubheaderManager]
text_managers: Optional[Tuple[TextSubheaderManager, ...]]
res_managers: Optional[Tuple[RESSubheaderManager, ...]]
check_older_version : bool
Try to create an older version sicd, for compliance
"""
self._check_older_version = bool(check_older_version)
self._security_tags = None
self._sicd_meta = None
self._set_sicd_meta(sicd_meta)
self._required_version = self.sicd_meta.version_required()
self._create_security_tags()
self._row_limit = None
self._set_row_limit(row_limit)
header = self._create_header()
image_managers, image_segment_collections, image_segment_coordinates = self._create_image_segments()
des_managers = self._create_des_segments(additional_des)
# NB: graphics not permitted in sicd
NITFWritingDetails.__init__(
self,
header,
image_managers=image_managers,
image_segment_collections=image_segment_collections,
image_segment_coordinates=image_segment_coordinates,
text_managers=text_managers,
des_managers=des_managers,
res_managers=res_managers)
@property
def sicd_meta(self) -> SICDType:
"""
SICDType: The sicd metadata
"""
return self._sicd_meta
def _set_sicd_meta(self, value):
if self._sicd_meta is not None:
raise ValueError('sicd_meta is read only')
self._sicd_meta = validate_sicd_for_writing(value)
@property
def requires_version(self) -> Tuple[int, int, int]:
"""
Tuple[int, int, int]: What is the required (at minimum) sicd version?
"""
return self._required_version
@property
def row_limit(self) -> int:
return self._row_limit
def _set_row_limit(self, value):
if value is not None:
if not isinstance(value, int):
raise TypeError('row_bounds must be an integer')
if value < 1:
value = None
if value is None or value > 99999:
value = 99999
im_seg_limit = 10**10 - 2 # allowable image segment size
row_memory_size = self.sicd_meta.ImageData.NumCols*self.sicd_meta.ImageData.get_pixel_size()
memory_limit = int(numpy.floor(im_seg_limit/row_memory_size))
self._row_limit = min(value, memory_limit)
@property
def security_tags(self) -> NITFSecurityTags:
"""
NITFSecurityTags: The default NITF security tags for use.
"""
return self._security_tags
def _create_security_tags(self) -> None:
"""
Creates a NITF security tags object with `CLAS` and `CODE` attributes in
the sicd_meta.NITF property and/or extracted from the
SICD.CollectionInfo.Classification value.
Returns
-------
None
"""
self._security_tags = create_security_tags_from_sicd(self.sicd_meta)
def _get_ftitle(self) -> str:
ftitle = self.sicd_meta.NITF.get('FTITLE', None)
if ftitle is None:
ftitle = self.sicd_meta.NITF.get('SUGGESTED_NAME', None)
if ftitle is None:
ftitle = self.sicd_meta.get_suggested_name(1)
if ftitle is None and self.sicd_meta.CollectionInfo is not None and \
self.sicd_meta.CollectionInfo.CoreName is not None:
ftitle = 'SICD: {}'.format(self.sicd_meta.CollectionInfo.CoreName)
if ftitle is None:
ftitle = 'SICD: Unknown'
if self._check_older_version and self._required_version < (1, 2, 0) and \
not ftitle.startswith('SICD:'):
ftitle = 'SICD:' + ftitle
return ftitle
def _get_fdt(self):
return re.sub(r'[^0-9]', '', str(self.sicd_meta.ImageCreation.DateTime.astype('datetime64[s]')))
def _get_idatim(self) -> str:
idatim = ' '
if self.sicd_meta.Timeline is not None and self.sicd_meta.Timeline.CollectStart is not None:
idatim = re.sub(r'[^0-9]', '', str(self.sicd_meta.Timeline.CollectStart.astype('datetime64[s]')))
return idatim
def _get_ostaid(self) -> str:
ostaid = self.sicd_meta.NITF.get('OSTAID', 'Unknown')
return ostaid
def _get_isorce(self) -> str:
isorce = self.sicd_meta.NITF.get('ISORCE', None)
if isorce is None and \
self.sicd_meta.CollectionInfo is not None and \
self.sicd_meta.CollectionInfo.CollectorName is not None:
isorce = 'SICD: {}'.format(self.sicd_meta.CollectionInfo.CollectorName)
if isorce is None:
isorce = 'SICD: Unknown Collector'
return isorce
def _get_iid2(self) -> str:
iid2 = self.sicd_meta.NITF.get('IID2', self._get_ftitle())
if self._check_older_version and self._required_version < (1, 2, 0) and \
not iid2.startswith('SICD:'):
iid2 = 'SICD:' + iid2
return iid2
def _create_header(self) -> NITFHeader:
"""
Create the main NITF header.
Returns
-------
NITFHeader
"""
# NB: CLEVEL and FL will be corrected...
return NITFHeader(
Security=self.security_tags, CLEVEL=3, OSTAID=self._get_ostaid(),
FDT=self._get_fdt(), FTITLE=self._get_ftitle(), FL=0)
def _create_image_segments(self) -> Tuple[
Tuple[ImageSubheaderManager, ...],
Tuple[Tuple[int, ...], ...],
Tuple[Tuple[Tuple[int, ...], ...]]]:
image_managers = []
basic_args = {
'IREP': 'NODISPLY',
'IC': 'NC',
'ICAT': 'SAR',
'IID2': self._get_iid2(),
'IDATIM': self._get_idatim(),
'ISORCE': self._get_isorce()
}
pixel_type = self.sicd_meta.ImageData.PixelType # required to be defined
# NB: SICDs are required to be stored as big-endian, so the endian-ness
# of the memmap must be explicit
if pixel_type == 'RE32F_IM32F':
basic_args['PVTYPE'] = 'R'
basic_args['NBPP'] = 32
basic_args['ABPP'] = 32
isubcat = ('I', 'Q')
elif pixel_type == 'RE16I_IM16I':
basic_args['PVTYPE'] = 'SI'
basic_args['NBPP'] = 16
basic_args['ABPP'] = 16
isubcat = ('I', 'Q')
elif pixel_type == 'AMP8I_PHS8I':
basic_args['PVTYPE'] = 'INT'
basic_args['NBPP'] = 8
basic_args['ABPP'] = 8
isubcat = ('M', 'P')
else:
raise ValueError('Got unhandled pixel_type {}'.format(pixel_type))
rows = self.sicd_meta.ImageData.NumRows
cols = self.sicd_meta.ImageData.NumCols
icp = None
if self.sicd_meta.GeoData is not None and self.sicd_meta.GeoData.ImageCorners is not None:
# noinspection PyTypeChecker
icp = self.sicd_meta.GeoData.ImageCorners.get_array(dtype=numpy.float64)
image_segment_limits = default_image_segmentation(rows, cols, self.row_limit)
image_segment_collection = (tuple(range(len(image_segment_limits))), )
image_segment_coordinates = (image_segment_limits, )
for i, entry in enumerate(image_segment_limits):
if i == 0:
iloc = '0000000000'
else:
prev_lims = image_segment_limits[i-1]
prev_rows = prev_lims[1] - prev_lims[0]
iloc = '{0:05d}00000'.format(prev_rows)
this_rows = entry[1]-entry[0]
this_cols = entry[3]-entry[2]
subhead = ImageSegmentHeader(
IID1='SICD{0:03d}'.format(0 if len(image_segment_limits) == 1 else i+1),
NROWS=this_rows,
NCOLS=this_cols,
IGEOLO=interpolate_corner_points_string(numpy.array(entry, dtype=numpy.int64), rows, cols, icp),
NPPBH=0 if this_cols > 8192 else this_cols,
NPPBV=0 if this_rows > 8192 else this_rows,
NBPC=1,
NBPR=1,
IDLVL=i+1,
IALVL=i,
ILOC=iloc,
Bands=ImageBands(values=[ImageBand(ISUBCAT=entry) for entry in isubcat]),
Security=self._security_tags,
**basic_args)
image_managers.append(ImageSubheaderManager(subhead))
return tuple(image_managers), image_segment_collection, image_segment_coordinates
def _create_sicd_des(self) -> DESSubheaderManager:
uh_args = self.sicd_meta.get_des_details(self._check_older_version)
desshdt = str(self.sicd_meta.ImageCreation.DateTime.astype('datetime64[s]'))
if desshdt[-1] != 'Z':
desshdt += 'Z'
uh_args['DESSHDT'] = desshdt
desshlpg = ''
if self.sicd_meta.GeoData is not None and self.sicd_meta.GeoData.ImageCorners is not None:
# noinspection PyTypeChecker
icp = self.sicd_meta.GeoData.ImageCorners.get_array(dtype=numpy.float64)
temp = []
for entry in icp:
temp.append('{0:0=+12.8f}{1:0=+13.8f}'.format(entry[0], entry[1]))
temp.append(temp[0])
desshlpg = ''.join(temp)
uh_args['DESSHLPG'] = desshlpg
subhead = DataExtensionHeader(
Security=self.security_tags,
UserHeader=XMLDESSubheader(**uh_args))
return DESSubheaderManager(
subhead, self.sicd_meta.to_xml_bytes(tag='SICD', urn=uh_args['DESSHTN']))
def _create_des_segments(
self,
additional_des: Optional[Sequence[DESSubheaderManager]]) -> Tuple[DESSubheaderManager, ...]:
if additional_des is not None:
des_managers = list(additional_des)
else:
des_managers = []
des_managers.append(self._create_sicd_des())
return tuple(des_managers)
class SICDWriter(NITFWriter):
"""
Writer class for a SICD file - a NITF file containing complex radar data and
SICD data extension.
**Changed in version 1.3.0** to reflect NITFWriter changes.
"""
def __init__(
self,
file_object: Union[str, BinaryIO],
sicd_meta: Optional[SICDType] = None,
sicd_writing_details: Optional[SICDWritingDetails] = None,
check_older_version: bool = False,
check_existence: bool = True):
"""
Parameters
----------
file_object : str|BinaryIO
sicd_meta : None|SICDType
sicd_writing_details : None|SICDWritingDetails
check_older_version : bool
Try to create an older version sicd, for compliance with standard
NGA applications like SOCET or RemoteView
check_existence : bool
Should we check if the given file already exists?
"""
if sicd_meta is None and sicd_writing_details is None:
raise ValueError('One of sicd_meta or sicd_writing_details must be provided.')
if sicd_writing_details is None:
sicd_writing_details = SICDWritingDetails(sicd_meta, check_older_version=check_older_version)
NITFWriter.__init__(
self, file_object, sicd_writing_details, check_existence=check_existence)
@property
def nitf_writing_details(self) -> SICDWritingDetails:
"""
SICDWritingDetails: The SICD/NITF subheader details.
"""
# noinspection PyTypeChecker
return self._nitf_writing_details
@nitf_writing_details.setter
def nitf_writing_details(self, value):
if self._nitf_writing_details is not None:
raise ValueError('nitf_writing_details is read-only')
if not isinstance(value, SICDWritingDetails):
raise TypeError('nitf_writing_details must be of type {}'.format(SICDWritingDetails))
self._nitf_writing_details = value
@property
def sicd_meta(self) -> SICDType:
return self.nitf_writing_details.sicd_meta
def get_format_function(
self,
raw_dtype: numpy.dtype,
complex_order: Optional[str],
lut: Optional[numpy.ndarray],
band_dimension: int,
image_segment_index: Optional[int] = None,
**kwargs) -> Optional[FormatFunction]:
if complex_order is not None and complex_order != 'IQ':
if complex_order != 'MP' or raw_dtype.name != 'uint8' or band_dimension != 2:
raise ValueError('Got unsupported SICD band type definition')
if self.sicd_meta.ImageData.PixelType != 'AMP8I_PHS8I' or self.sicd_meta.ImageData.AmpTable is None:
raise ValueError('Expected AMP8I_PHS8I')
return AmpLookupFunction(raw_dtype, self.sicd_meta.ImageData.AmpTable)
return NITFWriter.get_format_function(
self, raw_dtype, complex_order, lut, band_dimension, image_segment_index, **kwargs)
| 34,414 | 36.205405 | 114 | py |
sarpy | sarpy-master/sarpy/io/complex/utils.py | """
Common functionality for converting metadata
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
from typing import Iterator, Tuple, List, Optional, Union
import numpy
from numpy.polynomial import polynomial
from scipy.stats import scoreatpercentile
from scipy.linalg import lstsq
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.sicd_elements.blocks import Poly2DType
logger = logging.getLogger(__name__)
def two_dim_poly_fit(
x: numpy.ndarray,
y: numpy.ndarray,
z: numpy.ndarray,
x_order: int = 2,
y_order: int = 2,
x_scale: float = 1.,
y_scale: float = 1.,
rcond: Optional[float] = None) -> Tuple[
numpy.ndarray, numpy.ndarray, int, numpy.ndarray]:
"""
Perform fit of data to two-dimensional polynomial.
Parameters
----------
x : numpy.ndarray
the x data
y : numpy.ndarray
the y data
z : numpy.ndarray
the z data
x_order : int
the order for x
y_order : int
the order for y
x_scale : float
In order to help the fitting problem to become better conditioned, the independent
variables can be scaled, the fit performed, and then the solution rescaled.
y_scale : float
rcond : None|float
passed through to :func:`scipy.linalg.lstsq` (as `cond`).
Returns
-------
solution : numpy.ndarray
the coefficient array
residuals : numpy.ndarray
this is often an empty array
rank : int
the rank of the fitting matrix
sing_values : numpy.ndarray
the singular values of the fitting matrix
"""
if not isinstance(x, numpy.ndarray) or not isinstance(y, numpy.ndarray) or not isinstance(z, numpy.ndarray):
raise TypeError('x, y, z must be numpy arrays')
if (x.size != z.size) or (y.size != z.size):
raise ValueError('x, y, z must have the same cardinality size.')
x = x.flatten()*x_scale
y = y.flatten()*y_scale
z = z.flatten()
# first, we need to formulate this as A*t = z
# where A has shape (x.size, (x_order+1)*(y_order+1))
# and t has shape ((x_order+1)*(y_order+1), )
A = numpy.empty((x.size, (x_order+1)*(y_order+1)), dtype=numpy.float64)
# noinspection PyTypeChecker
for i, index in enumerate(numpy.ndindex((x_order+1, y_order+1))):
A[:, i] = numpy.power(x, index[0])*numpy.power(y, index[1])
# perform least squares fit
sol, residuals, rank, sing_values = lstsq(A, z, cond=rcond)
if isinstance(residuals, (numpy.ndarray, numpy.number)):
residuals /= float(x.size)
sol = numpy.power(x_scale, numpy.arange(x_order+1))[:, numpy.newaxis] * \
numpy.reshape(sol, (x_order+1, y_order+1)) * \
numpy.power(y_scale, numpy.arange(y_order+1))
return sol, residuals, rank, sing_values
def get_im_physical_coords(
array,
grid,
image_data,
direction):
"""
Converts one dimension of "pixel" image (row or column) coordinates to
"physical" image (range or azimuth in meters) coordinates, for use in the
various two-variable sicd polynomials.
Parameters
----------
array : numpy.ndarray|float|int
either row or col coordinate component
grid : sarpy.io.complex.sicd_elements.Grid.GridType
image_data : sarpy.io.complex.sicd_elements.ImageData.ImageDataType
direction : str
one of 'Row' or 'Col' (case insensitive) to determine which
Returns
-------
numpy.array|float
"""
if direction.upper() == 'ROW':
return (array - image_data.SCPPixel.Row + image_data.FirstRow)*grid.Row.SS
elif direction.upper() == 'COL':
return (array - image_data.SCPPixel.Col + image_data.FirstCol)*grid.Col.SS
else:
raise ValueError('Unrecognized direction {}'.format(direction))
def fit_time_coa_polynomial(
inca,
image_data,
grid,
dop_rate_scaled_coeffs,
poly_order=2):
"""
Parameters
----------
inca : sarpy.io.complex.sicd_elements.RMA.INCAType
image_data : sarpy.io.complex.sicd_elements.ImageData.ImageDataType
grid : sarpy.io.complex.sicd_elements.Grid.GridType
dop_rate_scaled_coeffs : numpy.ndarray
the dop rate polynomial relative to physical coordinates - this a
common construct in converting metadata for csk/sentinel/radarsat
poly_order : int
the degree of the polynomial to fit.
Returns
-------
Poly2DType
"""
grid_samples = poly_order + 8
coords_az = get_im_physical_coords(
numpy.linspace(0, image_data.NumCols - 1, grid_samples, dtype='float64'), grid, image_data, 'col')
coords_rg = get_im_physical_coords(
numpy.linspace(0, image_data.NumRows - 1, grid_samples, dtype='float64'), grid, image_data, 'row')
coords_az_2d, coords_rg_2d = numpy.meshgrid(coords_az, coords_rg)
time_ca_sampled = inca.TimeCAPoly(coords_az_2d)
dop_centroid_sampled = inca.DopCentroidPoly(coords_rg_2d, coords_az_2d)
doppler_rate_sampled = polynomial.polyval(coords_rg_2d, dop_rate_scaled_coeffs)
time_coa_sampled = time_ca_sampled + dop_centroid_sampled / doppler_rate_sampled
coefs, residuals, rank, sing_values = two_dim_poly_fit(
coords_rg_2d, coords_az_2d, time_coa_sampled,
x_order=poly_order, y_order=poly_order, x_scale=1e-3, y_scale=1e-3, rcond=1e-40)
logger.info(
'The time_coa_fit details:\n\t'
'root mean square residuals = {}\n\t'
'rank = {}\n\t'
'singular values = {}'.format(residuals, rank, sing_values))
return Poly2DType(Coefs=coefs)
def fit_position_xvalidation(
time_array: numpy.ndarray,
position_array: numpy.ndarray,
velocity_array: numpy.ndarray,
max_degree: int = 5) -> Tuple[
numpy.ndarray, numpy.ndarray, numpy.ndarray]:
"""
Empirically fit the polynomials for the X, Y, Z ECF position array, using cross
validation with the velocity array to determine the best fit degree up to a
given maximum degree.
Parameters
----------
time_array : numpy.ndarray
position_array : numpy.ndarray
velocity_array : numpy.ndarray
max_degree : int
Returns
-------
X_poly : numpy.ndarray
Y_poly : numpy.ndarray
Z_poly : numpy.ndarray
"""
if not isinstance(time_array, numpy.ndarray) or \
not isinstance(position_array, numpy.ndarray) or \
not isinstance(velocity_array, numpy.ndarray):
raise TypeError('time_array, position_array, and velocity_array must be numpy.ndarray instances.')
if time_array.ndim != 1 and time_array.size > 1:
raise ValueError('time_array must be one-dimensional with at least 2 elements.')
if position_array.shape != velocity_array.shape:
raise ValueError('position_array and velocity_array must have the same shape.')
if position_array.shape[0] != time_array.size:
raise ValueError('The first dimension of position_array must be the same size as time_array.')
if position_array.shape[1] != 3:
raise ValueError('The second dimension of position array must have size 3, '
'representing X, Y, Z ECF coordinates.')
max_degree = int(max_degree)
if max_degree < 1:
raise ValueError('max_degree must be at least 1.')
if max_degree > 10:
logger.warning(
'max_degree greater than 10 for polynomial fitting may lead\n\t'
'to poorly conditioned (i.e. badly behaved) fit.')
deg = 1
prev_vel_error = numpy.inf
P_x, P_y, P_z = None, None, None
while deg <= min(max_degree, position_array.shape[0]-1):
# fit position
P_x = polynomial.polyfit(time_array, position_array[:, 0], deg=deg)
P_y = polynomial.polyfit(time_array, position_array[:, 1], deg=deg)
P_z = polynomial.polyfit(time_array, position_array[:, 2], deg=deg)
# extract estimated velocities
vel_est = numpy.hstack(
(polynomial.polyval(time_array, polynomial.polyder(P_x))[:, numpy.newaxis],
polynomial.polyval(time_array, polynomial.polyder(P_y))[:, numpy.newaxis],
polynomial.polyval(time_array, polynomial.polyder(P_z))[:, numpy.newaxis]))
# check our velocity error
vel_err = vel_est - velocity_array
cur_vel_error = numpy.sum((vel_err * vel_err))
deg += 1
# stop if the error is not smaller than at the previous step
if cur_vel_error >= prev_vel_error:
break
# noinspection PyTypeChecker
return P_x, P_y, P_z
def sicd_reader_iterator(
reader,
partitions=None,
polarization=None,
band=None):
"""
Provides an iterator over a collection of partitions (tuple of tuple of integer
indices for the reader) for a sicd type reader object.
Parameters
----------
reader : SICDTypeReader
partitions : None|tuple
The partitions collection. If None, then partitioning from
`reader.get_sicd_partitions()` will be used.
polarization : None|str
The polarization string to match.
band : None|str
The band to match.
Returns
-------
Iterator[tuple]
Yields the partition index, the sicd reader index, and the sicd structure.
"""
def sicd_match():
match = True
if band is not None:
match &= (this_sicd.get_transmit_band_name() == band)
if polarization is not None:
match &= (this_sicd.get_processed_polarization() == polarization)
return match
if not isinstance(reader, SICDTypeReader):
raise TypeError('reader must be an instance of SICDTypeReader. Got type {}'.format(type(reader)))
if reader.reader_type != "SICD":
raise ValueError('The provided reader must be of SICD type.')
if partitions is None:
# noinspection PyUnresolvedReferences
partitions = reader.get_sicd_partitions()
# noinspection PyUnresolvedReferences
the_sicds = reader.get_sicds_as_tuple()
for this_partition, entry in enumerate(partitions):
for this_index in entry:
this_sicd = the_sicds[this_index]
if sicd_match():
yield this_partition, this_index, this_sicd
def get_physical_coordinates(
the_sicd,
row_value: Union[int, float, numpy.ndarray],
col_value: Union[int, float, numpy.ndarray]) -> Tuple[Union[float, numpy.ndarray], Union[float, numpy.ndarray]]:
"""
Transform from image coordinates to physical coordinates, for polynomial evaluation.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
row_value : int|float|numpy.ndarray
col_value : int|float|numpy.ndarray
Returns
-------
row_coords : float|numpy.ndarray
col_coords : float|numpy.ndarray
"""
return get_im_physical_coords(row_value, the_sicd.Grid, the_sicd.ImageData, 'row'), \
get_im_physical_coords(col_value, the_sicd.Grid, the_sicd.ImageData, 'col')
###################
# helper functions
def get_fetch_block_size(
start_element: int,
stop_element: int,
block_size_in_bytes: int,
bands: int = 1):
"""
Gets the appropriate block size, given fetch parameters and constraints.
Note: this is a helper function, and no checking of argument validity will
be performed.
Parameters
----------
start_element : int
stop_element : int
block_size_in_bytes : int
bands : int
Returns
-------
int
"""
if stop_element == start_element:
return None
if block_size_in_bytes is None:
return None
full_size = float(abs(stop_element - start_element))
return max(1, int(numpy.ceil(block_size_in_bytes / float(bands*8*full_size))))
def extract_blocks(
the_range: Tuple[int, int, int],
index_block_size: Union[None, int, float]) -> Tuple[List[Tuple[int, int, int]], List[Tuple[int, int]]]:
"""
Convert the single range definition into a series of range definitions in
keeping with fetching of the appropriate block sizes.
Note: this is a helper function, and no checking of argument validity will
be performed.
Parameters
----------
the_range : Tuple[int, int, int]
The input (off processing axis) range.
index_block_size : None|int|float
The size of blocks (number of indices).
Returns
-------
range_def: List[Tuple[int, int, int]]
The sequence of range definitions `(start index, stop index, step)`
relative to the overall image.
index_limits: List[Tuple[int, int]]
The sequence of start/stop indices for positioning of the given range
relative to the original range.
"""
entries = numpy.arange(the_range[0], the_range[1], the_range[2], dtype=numpy.int64)
if index_block_size is None:
return [the_range, ], [(0, entries.size), ]
# how many blocks?
block_count = int(numpy.ceil(entries.size/float(index_block_size)))
if index_block_size == 1:
return [the_range, ], [(0, entries.size), ]
# workspace for what the blocks are
out1 = []
out2 = []
start_ind = 0
for i in range(block_count):
end_ind = start_ind+index_block_size
if end_ind < entries.size:
block1 = (int(entries[start_ind]), int(entries[end_ind]), the_range[2])
block2 = (start_ind, end_ind)
else:
block1 = (int(entries[start_ind]), the_range[1], the_range[2])
block2 = (start_ind, entries.size)
out1.append(block1)
out2.append(block2)
start_ind = end_ind
return out1, out2
def get_data_mean_magnitude(bounds, reader, index, block_size_in_bytes):
"""
Gets the mean magnitude in the region defined by bounds.
Note: this is a helper function, and no checking of argument validity will
be performed.
Parameters
----------
bounds : numpy.ndarray|tuple|list
Of the form `(row_start, row_end, col_start, col_end)`.
reader : SICDTypeReader
The data reader.
index : int
The reader index to use.
block_size_in_bytes : int|float
The block size in bytes.
Returns
-------
float
"""
# Extract the mean of the data magnitude - for global remap usage
logger.info(
'Calculating mean over the block ({}:{}, {}:{}), this may be time consuming'.format(*bounds))
mean_block_size = get_fetch_block_size(bounds[0], bounds[1], block_size_in_bytes)
mean_column_blocks, _ = extract_blocks((bounds[2], bounds[3], 1), mean_block_size)
mean_total = 0.0
mean_count = 0
# noinspection PyTypeChecker
for this_column_range in mean_column_blocks:
data = numpy.abs(reader[
bounds[0]:bounds[1],
this_column_range[0]:this_column_range[1],
index])
mask = (data > 0) & numpy.isfinite(data)
mean_total += numpy.sum(data[mask])
mean_count += numpy.sum(mask)
return float(mean_total / mean_count)
def stats_calculation(data, percentile=None):
"""
Calculate the statistics for input into the nrl remap.
Parameters
----------
data : numpy.ndarray
The amplitude array, assumed real valued an finite.
percentile : None|float|int
Which percentile to calculate
Returns
-------
tuple
Of the form `(`minimum, maximum)` or `(minimum, maximum, `percentile` percentile)
"""
if percentile is None:
return numpy.min(data), numpy.max(data)
else:
return numpy.min(data), numpy.max(data), scoreatpercentile(data, percentile)
def get_data_extrema(bounds, reader, index, block_size_in_bytes, percentile=None):
"""
Gets the minimum and maximum magnitude in the region defined by bounds,
optionally, get as **estimate** for the `percentage` percentile point.
Note: this is a helper function, and no checking of argument validity will
be performed.
Parameters
----------
bounds : numpy.ndarray|tuple|list
Of the form `(row_start, row_end, col_start, col_end)`.
reader : SICDTypeReader
The data reader.
index : int
The reader index to use.
block_size_in_bytes : int|float
The block size in bytes.
percentile : None|int|float
The optional percentile to estimate.
Returns
-------
Tuple
The minimum (finite) observed, maximum (finite) observed, and optionally,
the desired (of finite) percentile.
The minimum, maximum, and percentile values will be `None` exactly when the reader
contains no finite data.
"""
min_value = None
max_value = None
percent = None
# Extract the mean of the data magnitude - for global remap usage
logger.info(
'Calculating extrema over the block ({}:{}, {}:{}), this may be time consuming'.format(*bounds))
mean_block_size = get_fetch_block_size(bounds[0], bounds[1], block_size_in_bytes)
mean_column_blocks, _ = extract_blocks((bounds[2], bounds[3], 1), mean_block_size)
# noinspection PyTypeChecker
for this_column_range in mean_column_blocks:
data = numpy.abs(reader[
bounds[0]:bounds[1],
this_column_range[0]:this_column_range[1],
index])
mask = numpy.isfinite(data)
if numpy.any(mask):
temp_values = stats_calculation(data[mask], percentile=percentile)
min_value = temp_values[0] if min_value is None else min(min_value, temp_values[0])
max_value = temp_values[1] if max_value is None else max(max_value, temp_values[1])
percent = temp_values[2] if percentile is not None else None
if percentile is None:
return min_value, max_value
else:
return min_value, max_value, percent
| 18,096 | 33.274621 | 120 | py |
sarpy | sarpy-master/sarpy/io/complex/tsx.py | """
Functionality for reading TerraSAR-X data into a SICD model.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import os
import logging
from xml.etree import ElementTree
from typing import Union, List, Tuple, Optional, BinaryIO
from functools import reduce
import struct
import numpy
import numpy.linalg
from numpy.polynomial import polynomial
from scipy.constants import speed_of_light
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.sicd_elements.blocks import Poly1DType, Poly2DType
from sarpy.io.complex.sicd_elements.SICD import SICDType
from sarpy.io.complex.sicd_elements.CollectionInfo import CollectionInfoType, RadarModeType
from sarpy.io.complex.sicd_elements.ImageCreation import ImageCreationType
from sarpy.io.complex.sicd_elements.RadarCollection import RadarCollectionType, \
WaveformParametersType, ChanParametersType, TxStepType
from sarpy.io.complex.sicd_elements.ImageData import ImageDataType
from sarpy.io.complex.sicd_elements.GeoData import GeoDataType, SCPType
from sarpy.io.complex.sicd_elements.Position import PositionType, XYZPolyType
from sarpy.io.complex.sicd_elements.Grid import GridType, DirParamType, WgtTypeType
from sarpy.io.complex.sicd_elements.Timeline import TimelineType, IPPSetType
from sarpy.io.complex.sicd_elements.ImageFormation import ImageFormationType, \
RcvChanProcType
from sarpy.io.complex.sicd_elements.RMA import RMAType, INCAType
from sarpy.io.complex.sicd_elements.Radiometric import RadiometricType, NoiseLevelType_
from sarpy.io.complex.utils import two_dim_poly_fit, fit_position_xvalidation
from sarpy.io.general.base import SarpyIOError
from sarpy.io.general.data_segment import DataSegment, NumpyMemmapSegment, SubsetSegment
from sarpy.io.general.format_function import ComplexFormatFunction
from sarpy.io.general.utils import get_seconds, parse_timestring, is_file_like
logger = logging.getLogger(__name__)
##########
# helper functions and basic interpreter
def _parse_xml(file_name: str, without_ns: bool = False) -> Union[
ElementTree.Element, Tuple[dict, ElementTree.Element]]:
root_node = ElementTree.parse(file_name).getroot()
if without_ns:
return root_node
else:
ns = dict([node for _, node in ElementTree.iterparse(file_name, events=('start-ns', ))])
return ns, root_node
def _is_level1_product(prospective_file: str) -> bool:
with open(prospective_file, 'rb') as fi:
check = fi.read(200)
if check.startswith(b'<?xml'):
end_xml_declaration = check.find(b'?>')
if end_xml_declaration == -1:
raise ValueError('Poorly formed xml declaration\n\t`{}`'.format(check))
check = check[end_xml_declaration+2:].strip()
else:
check = check.strip()
return check.startswith(b'<level1Product')
############
# metadata helper class
class TSXDetails(object):
"""
Parser and interpreter for the TerraSAR-X file package meta-data.
"""
__slots__ = (
'_parent_directory', '_main_file', '_georef_file', '_main_root', '_georef_root',
'_im_format')
def __init__(self, file_name: str):
"""
Parameters
----------
file_name : str
The top-level directory, or the basic package xml file.
"""
self._parent_directory = None
self._main_file = None
self._georef_file = None
self._main_root = None
self._georef_root = None
self._im_format = None
self._validate_file(file_name)
self._im_format = self._find_main('./productInfo/imageDataInfo/imageDataFormat').text
if self._im_format not in ['COSAR', 'GEOTIFF']:
raise ValueError(
'The file is determined to be of type TerraSAR-X, but we got '
'unexpected image format value {}'.format(self.image_format))
def _validate_file(self, file_name: str) -> None:
"""
Validate the input file location.
Parameters
----------
file_name : str
Returns
-------
None
"""
if not isinstance(file_name, str):
raise SarpyIOError('file_name must be of string type.')
if not os.path.exists(file_name):
raise SarpyIOError('file {} does not exist'.format(file_name))
found_file = None
if os.path.isdir(file_name):
for entry in os.listdir(file_name):
prop_file = os.path.join(file_name, entry)
if os.path.isfile(prop_file) and os.path.splitext(prop_file)[1] == '.xml' \
and _is_level1_product(prop_file):
found_file = prop_file
if found_file is None:
raise SarpyIOError(
'The provided argument is a directory, but we found no level1Product xml file at the top level.')
elif os.path.splitext(file_name)[1] == '.xml':
if _is_level1_product(file_name):
found_file = file_name
else:
raise SarpyIOError(
'The provided argument is an xml file, which is not a level1Product xml file.')
else:
raise SarpyIOError(
'The provided argument is an file, but does not have .xml extension.')
if file_name is None:
raise ValueError('Unspecified error where main_file is not defined.')
parent_directory = os.path.split(found_file)[0]
if not os.path.isdir(os.path.join(parent_directory, 'IMAGEDATA')):
raise ValueError(
'The input file was determined to be or contain a TerraSAR-X level 1 product file, '
'but the IMAGEDATA directory is not in the expected relative location.')
self._parent_directory = parent_directory
self._main_file = found_file
self._main_root = _parse_xml(self._main_file, without_ns=True)
georef_file = os.path.join(parent_directory, 'ANNOTATION', 'GEOREF.xml')
if not os.path.isfile(georef_file):
logger.warning(
'The input file was determined to be or contain a TerraSAR-X level 1 product file,\n\t'
'but the ANNOTATION/GEOREF.xml is not in the expected relative location.')
else:
self._georef_file = georef_file
self._georef_root = _parse_xml(self._georef_file, without_ns=True)
@property
def file_name(self) -> str:
"""
str: the package directory location
"""
return self._parent_directory
@property
def image_format(self) -> str:
"""
str: The image file format enum value.
"""
return self._im_format
def _find_main(self, tag: str) -> ElementTree.Element:
"""
Pass through to ElementTree.Element.find(tag).
Parameters
----------
tag : str
Returns
-------
ElementTree.Element
"""
return self._main_root.find(tag)
def _findall_main(self, tag: str) -> List[ElementTree.Element]:
"""
Pass through to ElementTree.Element.findall(tag).
Parameters
----------
tag : str
Returns
-------
List[ElementTree.Element
"""
return self._main_root.findall(tag)
def _find_georef(self, tag: str) -> ElementTree.Element:
"""
Pass through to ElementTree.Element.find(tag).
Parameters
----------
tag : str
Returns
-------
ElementTree.Element
"""
return None if self._georef_root is None else self._georef_root.find(tag)
def _findall_georef(self, tag: str) -> List[ElementTree.Element]:
"""
Pass through to ElementTree.Element.findall(tag).
Parameters
----------
tag : str
Returns
-------
List[ElementTree.Element
"""
return None if self._georef_root is None else self._georef_root.findall(tag)
def _get_state_vector_data(self) -> Tuple[numpy.ndarray, numpy.ndarray, numpy.ndarray]:
"""
Gets the state vector data.
Returns
-------
times: numpy.ndarray
positions: numpy.ndarray
velocities: numpy.ndarray
"""
state_vecs = self._findall_main('./platform/orbit/stateVec')
tims = numpy.zeros((len(state_vecs),), dtype='datetime64[us]')
pos = numpy.zeros((len(state_vecs), 3), dtype='float64')
vel = numpy.zeros((len(state_vecs), 3), dtype='float64')
for i, entry in enumerate(state_vecs):
tims[i] = parse_timestring(entry.find('./timeUTC').text, precision='us')
pos[i, :] = [
float(entry.find('./posX').text), float(entry.find('./posY').text),
float(entry.find('./posZ').text)]
vel[i, :] = [
float(entry.find('./velX').text), float(entry.find('./velY').text),
float(entry.find('./velZ').text)]
return tims, pos, vel
@staticmethod
def _parse_pol_string(str_in: str) -> Tuple[str, str]:
return str_in[0], str_in[1]
def _get_sicd_tx_rcv_pol(self, str_in: str) -> str:
tx_pol, rcv_pol = self._parse_pol_string(str_in)
return '{}:{}'.format(tx_pol, rcv_pol)
def _get_full_pol_list(self) -> Tuple[List[str], List[str], List[str]]:
"""
Gets the full list of polarization states.
Returns
-------
original_pols : List[str]
tx_pols : List[str]
tx_rcv_pols: List[str]
"""
t_original_pols = []
t_tx_pols = []
t_tx_rcv_pols = []
# TODO: this in particular is probably troubled for ScanSAR mode
for node in self._findall_main('./productComponents/imageData'):
orig_pol = node.find('./polLayer').text
tx_part, rcv_part = self._parse_pol_string(orig_pol)
t_original_pols.append(orig_pol)
t_tx_pols.append(tx_part)
t_tx_rcv_pols.append('{}:{}'.format(tx_part, rcv_part))
return t_original_pols, t_tx_pols, t_tx_rcv_pols
def _find_middle_grid_node(self) -> Optional[ElementTree.Element]:
"""
Find and returns the middle geolocationGrid point, if it exists.
Otherwise, returns None.
Returns
-------
None|ElementTree.Element
"""
if self._georef_root is None:
return None
# determine the middle grid location
az_grid_pts = int(self._find_georef('./geolocationGrid/numberOfGridPoints/azimuth').text)
rg_grid_pts = int(self._find_georef('./geolocationGrid/numberOfGridPoints/range').text)
mid_az = int(round(az_grid_pts/2.0)) + 1
mid_rg = int(round(rg_grid_pts/2.0)) + 1
test_nodes = self._findall_georef('./geolocationGrid/gridPoint[@iaz="{}"]'.format(mid_az))
for entry in test_nodes:
if entry.attrib['irg'] == '{}'.format(mid_rg):
return entry
return test_nodes[int(len(test_nodes)/2)]
def _calculate_dop_polys(self,
layer_index: str,
azimuth_time_scp: float,
range_time_scp: float,
collect_start: numpy.datetime64,
doppler_rate_reference_node: ElementTree.Element) -> Tuple[numpy.ndarray, numpy.ndarray]:
"""
Calculate the doppler centroid polynomials. This is apparently extracted
from the paper "TerraSAR-X Deskew Description" by Michael Stewart dated
December 11, 2008.
Parameters
----------
layer_index : str
The layer index string, required for extracting correct metadata.
azimuth_time_scp : float
This is in seconds relative to the collection start.
range_time_scp : float
This is in seconds.
collect_start : numpy.datetime64
The collection start time.
doppler_rate_reference_node : ElementTree.Element
Returns
-------
(numpy.ndarray, numpy.ndarray)
"""
# parse the doppler centroid estimates nodes
doppler_estimate_nodes = self._findall_main(
'./processing/doppler/dopplerCentroid[@layerIndex="{}"]/dopplerEstimate'.format(layer_index))
# find the center node and extract some reference parameters
doppler_count = len(doppler_estimate_nodes)
doppler_estimate_center_node = doppler_estimate_nodes[int(doppler_count/2)]
rg_ref_time = float(doppler_estimate_center_node.find('./combinedDoppler/referencePoint').text)
# extract the doppler centroid information from all the nodes
diff_times_raw = numpy.zeros((doppler_count, ), dtype='float64') # offsets from reference time, in seconds
doppler_range_min = numpy.zeros((doppler_count, ), dtype='float64') # offsets in seconds
doppler_range_max = numpy.zeros((doppler_count, ), dtype='float64') # offsets in seconds
doppler_poly_est = []
for i, node in enumerate(doppler_estimate_nodes):
diff_times_raw[i] = get_seconds(
parse_timestring(node.find('./timeUTC').text, precision='us'),
collect_start, precision='us')
combined_node = node.find('./combinedDoppler')
doppler_range_min[i] = float(combined_node.find('./validityRangeMin').text)
doppler_range_max[i] = float(combined_node.find('./validityRangeMax').text)
doppler_poly_est.append(
numpy.array([float(entry.text) for entry in combined_node.findall('./coefficient')], dtype='float64'))
# parse the doppler rate estimate from our provided reference node
fm_dop = float(doppler_rate_reference_node.find('./dopplerRatePolynomial/coefficient[@exponent="0"]').text)
ss_zd_s = float(self._find_main('./productInfo/imageDataInfo/imageRaster/columnSpacing').text)
side_of_track = self._find_main('./productInfo/acquisitionInfo/lookDirection').text[0].upper()
ss_zd_m = float(self._find_main('./productSpecific/complexImageInfo/projectedSpacingAzimuth').text)
use_ss_zd_s = -ss_zd_s if side_of_track == 'L' else ss_zd_s
# create a sampled doppler centroid grid
range_samples = 49 # this is suggested in the paper
time_coa = numpy.zeros((doppler_count, range_samples), dtype='float64')
diff_t_range = numpy.zeros((doppler_count, range_samples), dtype='float64')
dopp_centroid = numpy.zeros((doppler_count, range_samples), dtype='float64')
for i, entry in enumerate(diff_times_raw):
time_coa[i, :] = entry
diff_t_range[i, :] = numpy.linspace(
doppler_range_min[i], doppler_range_max[i], num=range_samples) - rg_ref_time
dopp_centroid[i, :] = polynomial.polyval(diff_t_range[i, :], doppler_poly_est[i])
diff_t_zd = time_coa - dopp_centroid/fm_dop
coords_rg = 0.5*(diff_t_range + rg_ref_time - range_time_scp)*speed_of_light
coords_az = ss_zd_m*(diff_t_zd - azimuth_time_scp)/use_ss_zd_s
# perform our fitting
poly_order = 3
dop_centroid_poly, residuals, rank, sing_values = two_dim_poly_fit(
coords_rg, coords_az, dopp_centroid,
x_order=poly_order, y_order=poly_order, x_scale=1e-3, y_scale=1e-3, rcond=1e-35)
logger.info(
'The dop centroid polynomial fit details:\n\t'
'root mean square residuals = {}\n\t'
'rank = {}\n\t'
'singular values = {}'.format(residuals, rank, sing_values))
time_coa_poly, residuals, rank, sing_values = two_dim_poly_fit(
coords_rg, coords_az, time_coa,
x_order=poly_order, y_order=poly_order, x_scale=1e-3, y_scale=1e-3, rcond=1e-35)
logger.info(
'The dop centroid polynomial fit details:\n\t'
'root mean square residuals = {}\n\t'
'rank = {}\n\t'
'singular values = {}'.format(residuals, rank, sing_values))
return dop_centroid_poly, time_coa_poly
def _get_basic_sicd_shell(self,
center_freq: float,
dop_bw: float,
ss_zd_s: float) -> SICDType:
"""
Define the common sicd elements.
Parameters
----------
center_freq : float
The center frequency.
dop_bw : float
The doppler bandwidth.
ss_zd_s : float
The (positive) zero doppler spacing in the time domain.
Returns
-------
SICDType
"""
def get_collection_info():
# type: () -> CollectionInfoType
collector_name = self._find_main('./productInfo/missionInfo/mission').text
core_name = self._find_main('./productInfo/sceneInfo/sceneID').text
mode_id = self._find_main('./productInfo/acquisitionInfo/imagingMode').text
if mode_id == 'ST':
# TSX "staring" mode, corresponds to SICD spotlight
mode_type = 'SPOTLIGHT'
elif mode_id in ['SL', 'HS']:
# confusing, but TSX mode "spolight" and "high-resolution spotlight",
# which actually has a moving beam
mode_type = 'DYNAMIC STRIPMAP'
elif mode_id == 'SM':
# TSX stripmap mode
mode_type = 'STRIPMAP'
elif mode_id == 'SC':
# TSX scansar mode
mode_type = 'STRIPMAP'
else:
raise ValueError('Got unexpected mode id {}'.format(mode_id))
return CollectionInfoType(
CollectorName=collector_name,
CoreName=core_name,
CollectType='MONOSTATIC',
RadarMode=RadarModeType(ModeID=mode_id, ModeType=mode_type),
Classification='UNCLASSIFIED')
def get_image_creation():
# type: () -> ImageCreationType
from sarpy.__about__ import __version__
create_time = self._find_main('./generalHeader/generationTime').text
site = self._find_main('./productInfo/generationInfo/level1ProcessingFacility').text
app_node = self._find_main('./generalHeader/generationSystem')
application = 'Unknown' if app_node is None else \
'{} {}'.format(app_node.text, app_node.attrib.get('version', 'version_unknown'))
return ImageCreationType(Application=application,
DateTime=parse_timestring(create_time, precision='us'),
Site=site,
Profile='sarpy {}'.format(__version__))
def get_initial_grid():
# type: () -> GridType
proj_string = self._find_main('./setup/orderInfo/projection').text
if proj_string == 'GROUNDRANGE':
image_plane = 'GROUND'
elif proj_string == 'SLANTRANGE':
image_plane = 'SLANT'
else:
logger.warning('Got image projection {}'.format(proj_string))
image_plane = 'OTHER'
the_type = None
if self._find_main('./productSpecific/complexImageInfo/imageCoordinateType').text == 'ZERODOPPLER':
the_type = 'RGZERO'
row_ss = 0.5*float(
self._find_main('./productInfo/imageDataInfo/imageRaster/rowSpacing').text)*speed_of_light
row_bw = 2*float(
self._find_main('./processing/processingParameter/rangeLookBandwidth').text)/speed_of_light
row_win_name = self._find_main(
'./processing/processingParameter/rangeWindowID').text.upper()
row_wgt_type = WgtTypeType(WindowName=row_win_name)
if row_win_name == 'HAMMING':
row_wgt_type.Parameters = {
'COEFFICIENT': self._find_main('./processing/processingParameter/rangeWindowCoefficient').text}
row = DirParamType(
SS=row_ss,
Sgn=-1,
ImpRespBW=row_bw,
KCtr=2*center_freq/speed_of_light,
DeltaK1=-0.5*row_bw,
DeltaK2=0.5*row_bw,
DeltaKCOAPoly=[[0, ], ],
WgtType=row_wgt_type)
col_ss = float(self._find_main('./productSpecific/complexImageInfo/projectedSpacingAzimuth').text)
col_win_name = self._find_main('./processing/processingParameter/azimuthWindowID').text.upper()
col_wgt_type = WgtTypeType(WindowName=col_win_name)
if col_win_name == 'HAMMING':
col_wgt_type.Parameters = {
'COEFFICIENT': self._find_main('./processing/processingParameter/azimuthWindowCoefficient').text}
col = DirParamType(
SS=col_ss,
Sgn=-1,
ImpRespBW=dop_bw*ss_zd_s/col_ss,
KCtr=0,
WgtType=col_wgt_type)
return GridType(
ImagePlane=image_plane,
Type=the_type,
Row=row,
Col=col)
def get_initial_image_formation():
# type: () -> ImageFormationType
return ImageFormationType(
RcvChanProc=RcvChanProcType(NumChanProc=1, PRFScaleFactor=1), # ChanIndex set later
ImageFormAlgo='RMA',
TStartProc=0,
TEndProc=0, # corrected later
ImageBeamComp='SV',
AzAutofocus='NO',
RgAutofocus='NO',
STBeamComp='SV' if collection_info.RadarMode.ModeID in ['SL', 'HS'] else 'GLOBAL')
# NB: SL and HS are the proper spotlight modes
def get_initial_rma():
# type: () -> RMAType
return RMAType(RMAlgoType='OMEGA_K',
INCA=INCAType(FreqZero=center_freq))
collection_info = get_collection_info()
image_creation = get_image_creation()
init_grid = get_initial_grid()
init_image_formation = get_initial_image_formation()
init_rma = get_initial_rma()
return SICDType(
CollectionInfo=collection_info,
ImageCreation=image_creation,
Grid=init_grid,
ImageFormation=init_image_formation,
RMA=init_rma)
def _populate_basic_image_data(self,
sicd: SICDType,
grid_node: Optional[ElementTree.Element]) -> None:
"""
Populate the basic ImageData and GeoData. This assumes not ScanSAR mode.
This modifies the provided sicd in place.
Parameters
----------
sicd : SICDType
grid_node : None|ElementTree.Element
The central geolocationGrid point, if it exists.
Returns
-------
None
"""
# NB: the role of rows and columns is switched in TSX/SICD convention
rows = int(self._find_main('./productInfo/imageDataInfo/imageRaster/numberOfColumns').text)
cols = int(self._find_main('./productInfo/imageDataInfo/imageRaster/numberOfRows').text)
if grid_node is not None:
scp_row = int(grid_node.find('./col').text)
scp_col = int(grid_node.find('./row').text)
scp_llh = [
float(grid_node.find('./lat').text),
float(grid_node.find('./lon').text),
float(grid_node.find('./height').text)]
else:
scp_row = int(self._find_main('./productInfo/sceneInfo/sceneCenterCoord/refColumn').text) - 1
scp_col = int(self._find_main('./productInfo/sceneInfo/sceneCenterCoord/refRow').text) - 1
scp_llh = [
float(self._find_main('./productInfo/sceneInfo/sceneCenterCoord/lat').text),
float(self._find_main('./productInfo/sceneInfo/sceneCenterCoord/lon').text),
float(self._find_main('./productInfo/sceneInfo/sceneAverageHeight').text)]
if self._find_main('./productInfo/acquisitionInfo/lookDirection').text[0].upper() == 'L':
scp_col = cols - scp_col - 1
sicd.ImageData = ImageDataType(
NumRows=rows, NumCols=cols, FirstRow=0, FirstCol=0, FullImage=(rows, cols),
PixelType='RE16I_IM16I', SCPPixel=(scp_row, scp_col))
sicd.GeoData = GeoDataType(SCP=SCPType(LLH=scp_llh))
@staticmethod
def _populate_initial_radar_collection(
sicd: SICDType,
tx_pols: List[str],
tx_rcv_pols: List[str]) -> None:
"""
Populate the initial radar collection information. This modifies the
provided sicd in place.
Parameters
----------
sicd : SICDType
tx_pols : List[str]
tx_rcv_pols : List[str]
Returns
-------
None
"""
tx_pol_count = len(set(tx_pols))
if tx_pol_count == 1:
the_tx_pol = tx_pols[0]
tx_sequence = None
else:
the_tx_pol = 'SEQUENCE'
tx_sequence = [TxStepType(TxPolarization=tx_pol) for tx_pol in tx_pols]
sicd.RadarCollection = RadarCollectionType(
TxPolarization=the_tx_pol,
TxSequence=tx_sequence,
RcvChannels=[ChanParametersType(TxRcvPolarization=tx_rcv_pol) for tx_rcv_pol in tx_rcv_pols])
def _complete_sicd(self,
sicd: SICDType,
orig_pol: str,
layer_index: str,
pol_index: int,
ss_zd_s: float,
side_of_track: str,
center_freq: float,
arp_times: numpy.ndarray,
arp_pos: numpy.ndarray,
arp_vel: numpy.ndarray,
middle_grid: Optional[ElementTree.Element],
doppler_rate_reference_node: ElementTree.Element) -> SICDType:
"""
Complete the remainder of the sicd information and populate as collection,
if appropriate. **This assumes that this is not ScanSAR mode.**
Parameters
----------
sicd : SICDType
orig_pol : str
The TSX polarization string.
layer_index : str
The layer index entry.
pol_index : int
The polarization index (1 based) here.
ss_zd_s : float
The zero doppler spacing in the time domain.
side_of_track : str
One of ['R', 'S']
center_freq : float
The center frequency.
arp_times : numpy.ndarray
The array of reference times for the state information.
arp_pos : numpy.ndarray
arp_vel : numpy.ndarray
middle_grid : None|ElementTree.Element
The central geolocationGrid point, if it exists.
doppler_rate_reference_node : ElementTree.Element
Returns
-------
SICDType
"""
def get_settings_node():
# type: () -> Union[None, ElementTree.Element]
for entry in self._findall_main('./instrument/settings'):
if entry.find('./polLayer').text == orig_pol:
return entry
return None
def set_timeline():
prf = float(settings_node.find('./settingRecord/PRF').text)
ipp_poly = Poly1DType(Coefs=[0, prf])
out_sicd.Timeline = TimelineType(
CollectStart=collect_start,
CollectDuration=collect_duration,
IPP=[IPPSetType(TStart=0,
TEnd=collect_duration,
IPPPoly=ipp_poly,
IPPStart=0,
IPPEnd=int(round(ipp_poly(collect_duration))) - 1)])
def set_position():
times_s = numpy.array(
[get_seconds(entry, collect_start, precision='us') for entry in arp_times], dtype='float64')
P_x, P_y, P_z = fit_position_xvalidation(times_s, arp_pos, arp_vel, max_degree=8)
out_sicd.Position = PositionType(ARPPoly=XYZPolyType(X=P_x, Y=P_y, Z=P_z))
def complete_radar_collection():
tx_pulse_length = float(
self._find_main('./processing'
'/processingParameter'
'/rangeCompression'
'/chirps'
'/referenceChirp'
'/pulseLength').text)*32/3.29658384e8
# NB: the matlab version indicates that this conversion comes via personal
# communication with Juergen Janoth, Head of Application Development, Infoterra
# The times of this communication is not indicated
sample_rate = float(settings_node.find('./RSF').text)
rcv_window_length = float(settings_node.find('./settingRecord/echowindowLength').text)/sample_rate
out_sicd.RadarCollection.TxFrequency = (tx_freq_start, tx_freq_end)
out_sicd.RadarCollection.Waveform = [
WaveformParametersType(TxPulseLength=tx_pulse_length,
TxRFBandwidth=band_width,
TxFreqStart=tx_freq_start,
TxFMRate=band_width/tx_pulse_length,
ADCSampleRate=sample_rate,
RcvWindowLength=rcv_window_length,
RcvFMRate=0)]
def complete_image_formation():
out_sicd.ImageFormation.RcvChanProc.ChanIndices = [pol_index, ]
out_sicd.ImageFormation.TEndProc = collect_duration
out_sicd.ImageFormation.TxFrequencyProc = (tx_freq_start, tx_freq_end)
out_sicd.ImageFormation.TxRcvPolarizationProc = self._get_sicd_tx_rcv_pol(orig_pol)
def complete_rma():
use_ss_zd_s = -ss_zd_s if side_of_track == 'L' else ss_zd_s
time_ca_linear = use_ss_zd_s/out_sicd.Grid.Col.SS
if self._georef_root is not None:
if middle_grid is None:
raise ValueError('middle_grid should have been provided here')
ref_time = parse_timestring(
self._find_georef('./geolocationGrid'
'/gridReferenceTime'
'/tReferenceTimeUTC').text, precision='us')
az_offset = get_seconds(ref_time, collect_start, precision='us')
time_ca_scp = float(middle_grid.find('./t').text)
# get the sum of all provided azimuth shifts?
# NB: this is obviously assuming that all entries are constant shifts...should we check?
azimuths_shifts = [
float(entry.find('./coefficient').text) for entry in
self._findall_georef('./signalPropagationEffects/azimuthShift')]
azimuth_shift = reduce(lambda x, y: x+y, azimuths_shifts)
out_sicd.RMA.INCA.TimeCAPoly = Poly1DType(
Coefs=[time_ca_scp + az_offset - azimuth_shift, time_ca_linear])
azimuth_time_scp = get_seconds(ref_time, collect_start, precision='us') + time_ca_scp
range_time_scp = float(
self._find_georef('./geolocationGrid/gridReferenceTime/tauReferenceTime').text) + \
float(middle_grid.find('./tau').text)
# get the sum of all provided range delays?
# NB: this is obviously assuming that all entries are constant shifts...should we check?
range_delays = [
float(entry.find('./coefficient').text) for entry in
self._findall_georef('./signalPropagationEffects/rangeDelay')]
range_delay = reduce(lambda x, y: x+y, range_delays)
out_sicd.RMA.INCA.R_CA_SCP = 0.5*(range_time_scp - range_delay)*speed_of_light
else:
azimuth_time_scp = get_seconds(
parse_timestring(
self._find_main('./productInfo/sceneInfo/sceneCenterCoord/azimuthTimeUTC').text,
precision='us'),
collect_start, precision='us')
range_time_scp = float(self._find_main('./productInfo/sceneInfo/sceneCenterCoord/rangeTime').text)
out_sicd.RMA.INCA.TimeCAPoly = Poly1DType(Coefs=[azimuth_time_scp, time_ca_linear])
out_sicd.RMA.INCA.R_CA_SCP = 0.5*range_time_scp*speed_of_light
# populate DopCentroidPoly and TimeCOAPoly
if out_sicd.CollectionInfo.RadarMode.ModeID == 'ST':
# proper spotlight mode
coa_time = collect_duration / 2
look = 1 if side_of_track == 'L' else -1
alpha = 2.0/speed_of_light
pos = out_sicd.Position.ARPPoly(coa_time)
vel = out_sicd.Position.ARPPoly.derivative_eval(coa_time)
speed = numpy.linalg.norm(vel)
vel_hat = vel / speed
los = out_sicd.GeoData.SCP.ECF.get_array() - pos
out_sicd.Grid.TimeCOAPoly = Poly2DType(Coefs=[[coa_time, ], ])
dop_poly = numpy.zeros((2, 2), dtype=numpy.float64)
dop_poly[0, 1] = -look*center_freq*alpha*speed/out_sicd.RMA.INCA.R_CA_SCP
dop_poly[1, 1] = look*center_freq*alpha*speed/(out_sicd.RMA.INCA.R_CA_SCP**2)
dop_poly[:, 0] = dop_poly[:, 0] - look*(dop_poly[:, 1]*numpy.dot(los, vel_hat))
out_sicd.Grid.Col.DeltaKCOAPoly = Poly2DType(Coefs=dop_poly*use_ss_zd_s/out_sicd.Grid.Col.SS)
else:
dop_centroid_poly, time_coa_poly = self._calculate_dop_polys(
layer_index, azimuth_time_scp, range_time_scp, collect_start, doppler_rate_reference_node)
out_sicd.RMA.INCA.DopCentroidPoly = Poly2DType(Coefs=dop_centroid_poly)
out_sicd.RMA.INCA.DopCentroidCOA = True
out_sicd.Grid.TimeCOAPoly = Poly2DType(Coefs=time_coa_poly)
out_sicd.Grid.Col.DeltaKCOAPoly = Poly2DType(Coefs=dop_centroid_poly*use_ss_zd_s/out_sicd.Grid.Col.SS)
# calculate DRateSFPoly
vm_vel_sq = numpy.sum(out_sicd.Position.ARPPoly.derivative_eval(azimuth_time_scp)**2)
r_ca = numpy.array([out_sicd.RMA.INCA.R_CA_SCP, 1], dtype='float64')
dop_rate_poly_coefs = [
float(entry.text) for entry in doppler_rate_reference_node.findall(
'./dopplerRatePolynomial/coefficient')]
# Shift 1D polynomial to account for SCP
dop_rate_ref_time = float(doppler_rate_reference_node.find('./dopplerRatePolynomial/referencePoint').text)
dop_rate_poly_rg = Poly1DType(Coefs=dop_rate_poly_coefs).shift(dop_rate_ref_time - range_time_scp,
alpha=2/speed_of_light,
return_poly=False)
# NB: assumes a sign of -1
drate_poly = -polynomial.polymul(dop_rate_poly_rg, r_ca)*speed_of_light/(2*center_freq*vm_vel_sq)
out_sicd.RMA.INCA.DRateSFPoly = Poly2DType(Coefs=numpy.reshape(drate_poly, (-1, 1)))
def define_radiometric():
beta_factor = float(self._find_main('./calibration'
'/calibrationConstant[@layerIndex="{}"]'.format(layer_index) +
'/calFactor').text)
range_time_scp = float(self._find_main('./productInfo/sceneInfo/sceneCenterCoord/rangeTime').text)
# now, calculate the radiometric noise polynomial
# find the noise node
noise_node = self._find_main('./noise[@layerIndex="{}"]'.format(layer_index))
# extract the middle image noise node
noise_data_nodes = noise_node.findall('./imageNoise')
noise_data_node = noise_data_nodes[int(len(noise_data_nodes)/2)]
range_min = float(noise_data_node.find('./noiseEstimate/validityRangeMin').text)
range_max = float(noise_data_node.find('./noiseEstimate/validityRangeMax').text)
ref_point = float(noise_data_node.find('./noiseEstimate/referencePoint').text)
poly_coeffs = numpy.array(
[float(coeff.text) for coeff in noise_data_node.findall(
'./noiseEstimate/coefficient')], dtype='float64')
# create a sample grid in range time and evaluate the noise
range_time = numpy.linspace(range_min, range_max, 100) - ref_point
# this should be an absolute squared magnitude value
raw_noise_values = polynomial.polyval(range_time, poly_coeffs)
# we convert to db
noise_values = 10*numpy.log10(raw_noise_values)
coords_range_m = 0.5*(range_time + ref_point - range_time_scp)*speed_of_light
# fit the polynomial
scale = 1e-3
deg = poly_coeffs.size-1
coeffs = polynomial.polyfit(coords_range_m*scale, noise_values, deg=deg, rcond=1e-30, full=False)
coeffs *= numpy.power(scale, numpy.arange(deg+1))
coeffs = numpy.reshape(coeffs, (-1, 1))
out_sicd.Radiometric = RadiometricType(
BetaZeroSFPoly=Poly2DType(Coefs=[[beta_factor, ], ]),
NoiseLevel=NoiseLevelType_(
NoiseLevelType='ABSOLUTE', NoisePoly=Poly2DType(Coefs=coeffs)))
def revise_scp():
scp_ecf = out_sicd.project_image_to_ground(out_sicd.ImageData.SCPPixel.get_array())
out_sicd.update_scp(scp_ecf, coord_system='ECF')
out_sicd = sicd.copy()
# get some common use parameters
settings_node = get_settings_node()
if settings_node is None:
raise ValueError('Cannot find the settings node for polarization {}'.format(orig_pol))
collect_start = parse_timestring(
settings_node.find('./settingRecord'
'/dataSegment'
'/startTimeUTC').text, precision='us')
collect_end = parse_timestring(
settings_node.find('./settingRecord'
'/dataSegment'
'/stopTimeUTC').text, precision='us')
collect_duration = get_seconds(collect_end, collect_start, precision='us')
band_width = float(settings_node.find('./rxBandwidth').text)
tx_freq_start = center_freq - 0.5 * band_width
tx_freq_end = center_freq + 0.5 * band_width
# populate the missing sicd elements
set_timeline()
set_position()
complete_radar_collection()
complete_image_formation()
complete_rma()
define_radiometric()
out_sicd.derive()
revise_scp()
out_sicd.populate_rniirs(override=False)
return out_sicd
def get_sicd_collection(self) -> Tuple[List[str], List[SICDType]]:
"""
Gets the sicd metadata collection.
Returns
-------
files: List[str]
sicds: List[SICDType]
"""
def get_file_name(layer_index):
file_node = self._find_main(
'./productComponents/imageData[@layerIndex="{}"]/file/location'.format(layer_index))
path_stem = file_node.find('./path').text
file_name = file_node.find('./filename').text
full_file = os.path.join(self._parent_directory, path_stem, file_name)
if not os.path.isfile(full_file):
raise ValueError('Expected image file at\n\t{}\n\tbut this path does not exist'.format(full_file))
return full_file
the_files = []
the_sicds = []
# get some basic common use parameters
center_freq = float(self._find_main('./instrument/radarParameters/centerFrequency').text)
dop_bw = float(self._find_main('./processing/processingParameter/azimuthLookBandwidth').text)
ss_zd_s = float(self._find_main('./productInfo/imageDataInfo/imageRaster/columnSpacing').text)
side_of_track = self._find_main('./productInfo/acquisitionInfo/lookDirection').text[0].upper()
# define the basic SICD shell
basic_sicd = self._get_basic_sicd_shell(center_freq, dop_bw, ss_zd_s)
if basic_sicd.CollectionInfo.RadarMode.ModeID == 'SC':
raise ValueError('ScanSAR mode is currently unsupported')
# fetch the state vector data
times, positions, velocities = self._get_state_vector_data()
# fetch the polarization list(s) (maybe ScanSAR modification required here)
original_pols, tx_pols, tx_rcv_pols = self._get_full_pol_list()
if basic_sicd.CollectionInfo.RadarMode.ModeID == 'SC':
raise ValueError('ScanSAR mode is currently unsupported')
else:
middle_grid = self._find_middle_grid_node()
self._populate_basic_image_data(basic_sicd, middle_grid)
self._populate_initial_radar_collection(basic_sicd, tx_pols, tx_rcv_pols)
# get the doppler rate reference node
doppler_rate_nodes = self._findall_main('./processing/geometry/dopplerRate')
doppler_rate_center_node = doppler_rate_nodes[int(len(doppler_rate_nodes) / 2)]
for i, orig_pol in enumerate(original_pols):
the_layer = '{}'.format(i+1)
pol_index = i+1
the_sicds.append(self._complete_sicd(
basic_sicd, orig_pol, the_layer, pol_index, ss_zd_s, side_of_track,
center_freq, times, positions, velocities, middle_grid, doppler_rate_center_node))
the_files.append(get_file_name(the_layer))
return the_files, the_sicds
class COSARDetails(object):
__slots__ = (
'_file_name', '_file_size', '_header_offsets', '_data_offsets',
'_burst_index', '_burst_size', '_data_sizes', '_version')
def __init__(self, file_name: str):
"""
Parameters
----------
file_name : str
"""
self._header_offsets = []
self._data_offsets = []
self._burst_index = []
self._burst_size = []
self._data_sizes = []
self._version = None
if not os.path.isfile(file_name):
raise SarpyIOError('path {} is not not a file'.format(file_name))
self._file_name = file_name
self._file_size = os.path.getsize(file_name)
self._parse_details()
@property
def burst_count(self) -> int:
"""
int: The discovered burst count
"""
return len(self._data_offsets)
@property
def version(self) -> Optional[int]:
"""
int: The COSAR version
"""
return self._version
@property
def pixel_type(self) -> Optional[str]:
"""
str: The pixel type
"""
if self._version is None:
return None
elif self._version == 1:
return 'RE16I_IM16I'
elif self._version == 2:
return 'RE32F_IM32F'
else:
raise ValueError('Got unexpected version {}'.format(self._version))
def _process_burst_header(
self,
fi: BinaryIO,
the_offset: int):
"""
Parameters
----------
fi : BinaryIO
the_offset : int
Returns
-------
None
"""
if the_offset >= self._file_size - 48:
raise ValueError(
'The seek location + basic header size is greater than the file size.')
# seek to our desired location
fi.seek(the_offset, os.SEEK_SET)
# read the desired bytes
header_bytes = fi.read(48)
# interpret the data
burst_in_bytes = struct.unpack('>I', header_bytes[:4])[0]
rsri = struct.unpack('>I', header_bytes[4:8])[0]
range_samples = struct.unpack('>I', header_bytes[8:12])[0]
azimuth_samples = struct.unpack('>I', header_bytes[12:16])[0]
burst_index = struct.unpack('>I', header_bytes[16:20])[0]
# these two are only useful in the first record
rtnb = struct.unpack('>I', header_bytes[20:24])[0]
tnl = struct.unpack('>I', header_bytes[24:28])[0]
# basic check bytes
csar = struct.unpack('>4s', header_bytes[28:32])[0]
version = struct.unpack('>I', header_bytes[32:36])[0]
oversample = struct.unpack('>I', header_bytes[36:40])[0]
scaling_rate = struct.unpack('>d', header_bytes[40:])[0]
if csar.upper() != b'CSAR':
raise ValueError('unexpected csar value {}'.format(csar))
logger.debug(
'Parsed COSAR burst:'
'\n\tburst_in_bytes = {}'
'\n\trsri = {}'
'\n\trange samples = {}'
'\n\tazimuth samples = {}'
'\n\trtnb = {}'
'\n\ttnl = {}'
'\n\tcsar = {}'
'\n\tversion = {}'
'\n\toversample = {}'
'\n\tscaling rate = {}'.format(
burst_in_bytes, rsri, range_samples, azimuth_samples, rtnb, tnl,
csar, version, oversample, scaling_rate))
# now, populate our appropriate details
data_offset = the_offset + (int(range_samples)+2)*4*4
burst_size = 4*(int(range_samples)+2)*(int(azimuth_samples) + 4)
self._header_offsets.append(the_offset)
self._data_offsets.append(data_offset)
self._burst_index.append(int(burst_index))
self._burst_size.append(burst_size)
self._data_sizes.append((range_samples, azimuth_samples))
self._version = version
if the_offset + burst_size > self._file_size:
raise ValueError(
'The file size for {} is given as {} bytes, but '
'the burst at index {} has size {} and offset {}'.format(
self._file_name, self._file_size, self._burst_index[-1],
self._burst_size[-1], the_offset))
if self._version not in [1, 2]:
raise ValueError(
'Got unexpected version value {}'.format(self._version))
def _parse_details(self) -> None:
with open(self._file_name, 'rb') as fi:
# process the first burst header
self._process_burst_header(fi, 0)
cont = True
while cont:
next_burst_location = self._header_offsets[-1] + self._burst_size[-1]
if next_burst_location < self._file_size:
self._process_burst_header(fi, next_burst_location)
else:
cont = False
def construct_data_segment(
self,
index: int,
reverse_axes: Optional[Tuple[int, ...]],
transpose_axes: Optional[Tuple[int, ...]],
expected_size: Tuple[int, ...]) -> DataSegment:
"""
Construct a data segment for the given burst index.
Parameters
----------
index : int
reverse_axes : None|int|Sequence[int, ...]
transpose_axes : None|Tuple[int, ...]
expected_size : Tuple[int, ...]
Returns
-------
DataSegment
"""
index = int(index)
if not (0 <= index < self.burst_count):
raise KeyError('Provided index {} must be in the range [0, {})'.format(index, self.burst_count))
# get data_size
offset = self._data_offsets[index]
range_samples, azimuth_samples = self._data_sizes[index]
exp_cols, exp_rows = expected_size
if not (exp_rows == range_samples and exp_cols == azimuth_samples):
raise ValueError(
'Expected raw burst size is {}, while actual raw burst size '
'is {}'.format(expected_size, (range_samples, azimuth_samples)))
if self._version == 1:
raw_dtype = numpy.dtype('>i2')
elif self._version == 2:
raw_dtype = numpy.dtype('>f2')
else:
raise ValueError('Got unhandled version {}'.format(self._version))
format_function = ComplexFormatFunction(raw_dtype, 'IQ')
parent_segment = NumpyMemmapSegment(
self._file_name, offset, raw_dtype, (azimuth_samples, range_samples + 2, 2),
'complex64', (range_samples + 2, azimuth_samples),
reverse_axes=reverse_axes, transpose_axes=transpose_axes,
format_function=format_function, mode='r', close_file=True)
subset_def = (slice(2, exp_rows+2), slice(0, exp_cols))
relevant_segment = SubsetSegment(parent_segment, subset_def, 'formatted', close_parent=True)
return relevant_segment
#########
# the reader implementation
class TSXReader(SICDTypeReader):
"""
The TerraSAR-X SLC reader implementation.
**Changed in version 1.3.0** for reading changes.
"""
__slots__ = ('_tsx_details', )
def __init__(self, tsx_details):
"""
Parameters
----------
tsx_details : str|TSXDetails
"""
if isinstance(tsx_details, str):
tsx_details = TSXDetails(tsx_details)
if not isinstance(tsx_details, TSXDetails):
raise TypeError(
'tsx_details is expected to be the path to the TerraSAR-X package '
'directory or main xml file, of TSXDetails instance. Got type {}'.format(type(tsx_details)))
self._tsx_details = tsx_details
data_segments = []
image_format = tsx_details.image_format
the_files, the_sicds = tsx_details.get_sicd_collection()
for the_file, the_sicd in zip(the_files, the_sicds):
rows = the_sicd.ImageData.NumRows
cols = the_sicd.ImageData.NumCols
reverse_axes = (0, ) if the_sicd.SCPCOA.SideOfTrack == 'L' else None
transpose_axes = (1, 0, 2)
if image_format != 'COSAR':
raise ValueError(
'Expected complex data for TerraSAR-X to be in COSAR format. '
'Got unhandled format {}'.format(image_format))
cosar_details = COSARDetails(the_file)
if cosar_details.burst_count != 1:
raise ValueError(
'Expected one burst in the COSAR file {},\n\t'
'but got {} bursts'.format(the_file, cosar_details.burst_count))
the_sicd.ImageData.PixelType = cosar_details.pixel_type
data_seg = cosar_details.construct_data_segment(0, reverse_axes, transpose_axes, (cols, rows))
data_segments.append(data_seg)
SICDTypeReader.__init__(self, data_segments, the_sicds, close_segments=True)
self._check_sizes()
@property
def file_name(self) -> str:
return self._tsx_details.file_name
########
# base expected functionality for a module with an implemented Reader
def is_a(file_name: str) -> Optional[TSXReader]:
"""
Tests whether a given file_name corresponds to a TerraSAR-X file SSC package.
Returns a reader instance, if so.
Parameters
----------
file_name : str
the file_name to check
Returns
-------
TSXReader|None
`TSXReader` instance if TerraSAR-X file file, `None` otherwise
"""
if is_file_like(file_name):
return None
try:
tsx_details = TSXDetails(file_name)
logger.info('Path {} is determined to be a TerraSAR-X file package.'.format(tsx_details.file_name))
return TSXReader(tsx_details)
except SarpyIOError:
return None
| 52,244 | 41.406656 | 118 | py |
sarpy | sarpy-master/sarpy/io/complex/iceye.py | """
Functionality for reading ICEYE complex data into a SICD model.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
import os
from typing import Union, Tuple, Sequence, Optional
import numpy
from numpy.polynomial import polynomial
from scipy.constants import speed_of_light
from sarpy.io.complex.nisar import _stringify
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.sicd_elements.blocks import Poly2DType, Poly1DType
from sarpy.io.complex.sicd_elements.SICD import SICDType
from sarpy.io.complex.sicd_elements.CollectionInfo import CollectionInfoType, \
RadarModeType
from sarpy.io.complex.sicd_elements.ImageCreation import ImageCreationType
from sarpy.io.complex.sicd_elements.RadarCollection import RadarCollectionType, \
ChanParametersType, WaveformParametersType
from sarpy.io.complex.sicd_elements.ImageData import ImageDataType
from sarpy.io.complex.sicd_elements.GeoData import GeoDataType, SCPType
from sarpy.io.complex.sicd_elements.Position import PositionType, XYZPolyType
from sarpy.io.complex.sicd_elements.Grid import GridType, DirParamType, WgtTypeType
from sarpy.io.complex.sicd_elements.Timeline import TimelineType, IPPSetType
from sarpy.io.complex.sicd_elements.ImageFormation import ImageFormationType, \
RcvChanProcType
from sarpy.io.complex.sicd_elements.RMA import RMAType, INCAType
from sarpy.io.complex.sicd_elements.Radiometric import RadiometricType
from sarpy.io.complex.utils import fit_position_xvalidation, two_dim_poly_fit
from sarpy.io.general.base import SarpyIOError
from sarpy.io.general.data_segment import HDF5DatasetSegment, BandAggregateSegment
from sarpy.io.general.format_function import ComplexFormatFunction
from sarpy.io.general.utils import get_seconds, parse_timestring, is_file_like, is_hdf5, h5py
logger = logging.getLogger(__name__)
def _parse_time(input_str: Union[bytes, str]) -> numpy.datetime64:
"""
Parse the timestring.
Parameters
----------
input_str : bytes|str
Returns
-------
numpy.datetime64
"""
return parse_timestring(_stringify(input_str), precision='us')
class ICEYEDetails(object):
"""
Parses and converts the ICEYE metadata.
"""
__slots__ = ('_file_name', )
def __init__(self, file_name: str):
"""
Parameters
----------
file_name : str
"""
if h5py is None:
raise ImportError("Can't read ICEYE files, because the h5py dependency is missing.")
if not os.path.isfile(file_name):
raise SarpyIOError('Path {} is not a file'.format(file_name))
with h5py.File(file_name, 'r') as hf:
if 's_q' not in hf or 's_i' not in hf:
raise SarpyIOError(
'The hdf file does not have the real (s_q) or imaginary dataset (s_i).')
if 'satellite_name' not in hf:
raise SarpyIOError('The hdf file does not have the satellite_name dataset.')
if 'product_name' not in hf:
raise SarpyIOError('The hdf file does not have the product_name dataset.')
self._file_name = file_name
@property
def file_name(self) -> str:
"""
str: the file name
"""
return self._file_name
def get_sicd(self) -> (SICDType, Optional[Tuple[int, ...]], Tuple[int, ...]):
"""
Gets the SICD structure and associated details for constructing the data segment.
Returns
-------
sicd : SICDType
reverse_axes : None|Tuple[int, ...]
transpose_axes : Tuple[int, ...]
"""
def get_collection_info() -> CollectionInfoType:
mode_id_type_map = {
"SpotlightExtendedArea": "DYNAMIC STRIPMAP", }
mode_id = _stringify(hf['product_type'][()])
mode_type = mode_id_type_map.get(mode_id, _stringify(hf['acquisition_mode'][()]).upper())
return CollectionInfoType(
CollectorName=_stringify(hf['satellite_name'][()]),
CoreName=_stringify(hf['product_name'][()]),
CollectType='MONOSTATIC',
Classification='UNCLASSIFIED',
RadarMode=RadarModeType(
ModeType=mode_type,
ModeID=mode_id))
def get_image_creation() -> ImageCreationType:
from sarpy.__about__ import __version__
return ImageCreationType(
Application='ICEYE_P_{}'.format(hf['processor_version'][()]),
DateTime=_parse_time(hf['processing_time'][()]),
Site='Unknown',
Profile='sarpy {}'.format(__version__))
def get_image_data() -> ImageDataType:
samp_prec = _stringify(hf['sample_precision'][()])
if samp_prec.upper() == 'INT16':
pixel_type = 'RE16I_IM16I'
elif samp_prec.upper() == 'FLOAT32':
pixel_type = 'RE32F_IM32F'
else:
raise ValueError('Got unhandled sample precision {}'.format(samp_prec))
num_rows = int(number_of_range_samples)
num_cols = int(number_of_azimuth_samples)
scp_row = int(coord_center[0]) - 1
scp_col = int(coord_center[1]) - 1
if 0 < scp_col < num_rows-1:
if look_side == 'left':
scp_col = num_cols - scp_col - 1
else:
# early ICEYE processing bug led to nonsensical SCP
scp_col = int(num_cols/2.0)
return ImageDataType(
PixelType=pixel_type,
NumRows=num_rows,
NumCols=num_cols,
FirstRow=0,
FirstCol=0,
FullImage=(num_rows, num_cols),
SCPPixel=(scp_row, scp_col))
def get_geo_data() -> GeoDataType:
# NB: the remainder will be derived.
return GeoDataType(
SCP=SCPType(
LLH=[coord_center[2], coord_center[3], avg_scene_height]))
def get_timeline() -> TimelineType:
acq_prf = hf['acquisition_prf'][()]
return TimelineType(
CollectStart=start_time,
CollectDuration=duration,
IPP=[IPPSetType(index=0, TStart=0, TEnd=duration,
IPPStart=0, IPPEnd=int(round(acq_prf*duration) - 1),
IPPPoly=[0, acq_prf]), ])
def get_position() -> PositionType:
times_str = hf['state_vector_time_utc'][:, 0]
times = numpy.zeros((times_str.shape[0], ), dtype='float64')
positions = numpy.zeros((times.size, 3), dtype='float64')
velocities = numpy.zeros((times.size, 3), dtype='float64')
for i, entry in enumerate(times_str):
times[i] = get_seconds(_parse_time(entry), start_time, precision='us')
positions[:, 0], positions[:, 1], positions[:, 2] = hf['posX'][:], hf['posY'][:], hf['posZ'][:]
velocities[:, 0], velocities[:, 1], velocities[:, 2] = hf['velX'][:], hf['velY'][:], hf['velZ'][:]
# fir the the position polynomial using cross validation
P_x, P_y, P_z = fit_position_xvalidation(times, positions, velocities, max_degree=8)
return PositionType(ARPPoly=XYZPolyType(X=P_x, Y=P_y, Z=P_z))
def get_radar_collection() -> RadarCollectionType:
return RadarCollectionType(
TxPolarization=tx_pol,
TxFrequency=(min_freq, max_freq),
Waveform=[WaveformParametersType(TxFreqStart=min_freq,
TxRFBandwidth=tx_bandwidth,
TxPulseLength=hf['chirp_duration'][()],
ADCSampleRate=hf['range_sampling_rate'][()],
RcvDemodType='CHIRP',
RcvFMRate=0,
index=1)],
RcvChannels=[ChanParametersType(TxRcvPolarization=polarization,
index=1)])
def get_image_formation() -> ImageFormationType:
return ImageFormationType(
TxRcvPolarizationProc=polarization,
ImageFormAlgo='RMA',
TStartProc=0,
TEndProc=duration,
TxFrequencyProc=(min_freq, max_freq),
STBeamComp='NO',
ImageBeamComp='SV',
AzAutofocus='NO',
RgAutofocus='NO',
RcvChanProc=RcvChanProcType(NumChanProc=1, PRFScaleFactor=1, ChanIndices=[1, ]),)
def get_radiometric() -> RadiometricType:
return RadiometricType(BetaZeroSFPoly=[[float(hf['calibration_factor'][()]), ], ])
def calculate_drate_sf_poly() -> (numpy.ndarray, numpy.ndarray):
r_ca_coeffs = numpy.array([r_ca_scp, 1], dtype='float64')
dop_rate_coeffs = hf['doppler_rate_coeffs'][:]
# Prior to ICEYE 1.14 processor, absolute value of Doppler rate was
# provided, not true Doppler rate. Doppler rate should always be negative
if dop_rate_coeffs[0] > 0:
dop_rate_coeffs *= -1
dop_rate_poly = Poly1DType(Coefs=dop_rate_coeffs)
# now adjust to create
t_drate_ca_poly = dop_rate_poly.shift(
t_0=zd_ref_time - rg_time_scp,
alpha=2/speed_of_light, return_poly=False)
return t_drate_ca_poly, \
-polynomial.polymul(t_drate_ca_poly, r_ca_coeffs)*speed_of_light/(2*center_freq*vm_ca_sq)
def calculate_doppler_polys() -> (numpy.ndarray, numpy.ndarray):
# define and fit the time coa array
if collect_info.RadarMode.ModeType == 'SPOTLIGHT':
coa_time = duration / 2
look = 1 if look_side == 'left' else -1
alpha = 2.0/speed_of_light
pos = position.ARPPoly(coa_time)
vel = position.ARPPoly.derivative_eval(coa_time)
speed = numpy.linalg.norm(vel)
vel_hat = vel / speed
los = geo_data.SCP.ECF.get_array() - pos
t_time_coa_coeffs = [[coa_time, ], ]
t_dop_centroid_coeffs = numpy.zeros((2, 2), dtype=numpy.float64)
t_dop_centroid_coeffs[0, 1] = -look*center_freq*alpha*speed/r_ca_scp
t_dop_centroid_coeffs[1, 1] = look*center_freq*alpha*speed/(r_ca_scp**2)
t_dop_centroid_coeffs[:, 0] = -look*(t_dop_centroid_coeffs[:, 1]*numpy.dot(los, vel_hat))
else:
# extract doppler centroid coefficients
dc_estimate_coeffs = hf['dc_estimate_coeffs'][:]
dc_time_str = hf['dc_estimate_time_utc'][:, 0]
dc_zd_times = numpy.zeros((dc_time_str.shape[0], ), dtype='float64')
for i, entry in enumerate(dc_time_str):
dc_zd_times[i] = get_seconds(_parse_time(entry), start_time, precision='us')
# create a sampled doppler centroid
samples = 49 # copied from corresponding matlab, we just need enough for appropriate refitting
# create doppler time samples
diff_time_rg = first_pixel_time - zd_ref_time + \
numpy.linspace(0, number_of_range_samples/range_sampling_rate, samples)
# doppler centroid samples definition
dc_sample_array = numpy.zeros((samples, dc_zd_times.size), dtype='float64')
for i, coeffs in enumerate(dc_estimate_coeffs):
dc_sample_array[:, i] = polynomial.polyval(diff_time_rg, coeffs)
# create arrays for range/azimuth from scp in meters
azimuth_scp_m, range_scp_m = numpy.meshgrid(
col_ss*(dc_zd_times - zd_time_scp)/ss_zd_s,
(diff_time_rg + zd_ref_time - rg_time_scp)*speed_of_light/2)
# fit the doppler centroid sample array
x_order = min(3, range_scp_m.shape[0]-1)
y_order = min(3, range_scp_m.shape[1]-1)
t_dop_centroid_coeffs, residuals, rank, sing_values = two_dim_poly_fit(
range_scp_m, azimuth_scp_m, dc_sample_array, x_order=x_order, y_order=y_order,
x_scale=1e-3, y_scale=1e-3, rcond=1e-40)
logger.info(
'The dop_centroid_poly fit details:\n\troot mean square '
'residuals = {}\n\trank = {}\n\tsingular values = {}'.format(
residuals, rank, sing_values))
doppler_rate_sampled = polynomial.polyval(azimuth_scp_m, drate_ca_poly)
time_coa = dc_zd_times + dc_sample_array/doppler_rate_sampled
t_time_coa_coeffs, residuals, rank, sing_values = two_dim_poly_fit(
range_scp_m, azimuth_scp_m, time_coa, x_order=x_order, y_order=y_order,
x_scale=1e-3, y_scale=1e-3, rcond=1e-40)
logger.info(
'The time_coa_poly fit details:\n\troot mean square '
'residuals = {}\n\trank = {}\n\tsingular values = {}'.format(
residuals, rank, sing_values))
return t_dop_centroid_coeffs, t_time_coa_coeffs
def get_rma() -> RMAType:
if collect_info.RadarMode.ModeType == 'SPOTLIGHT':
dop_centroid_poly = None
dop_centroid_coa = None
else:
dop_centroid_poly = Poly2DType(Coefs=dop_centroid_poly_coeffs)
dop_centroid_coa = True
# NB: DRateSFPoly is defined as a function of only range - reshape appropriately
inca = INCAType(
R_CA_SCP=r_ca_scp,
FreqZero=center_freq,
DRateSFPoly=Poly2DType(Coefs=numpy.reshape(drate_sf_poly_coefs, (-1, 1))),
DopCentroidPoly=dop_centroid_poly,
DopCentroidCOA=dop_centroid_coa,
TimeCAPoly=Poly1DType(Coefs=time_ca_poly_coeffs))
return RMAType(
RMAlgoType='OMEGA_K',
INCA=inca)
def get_grid() -> GridType:
time_coa_poly = Poly2DType(Coefs=time_coa_poly_coeffs)
row_win = _stringify(hf['window_function_range'][()])
if row_win == 'NONE':
row_win = 'UNIFORM'
row = DirParamType(
SS=row_ss,
Sgn=-1,
KCtr=2*center_freq/speed_of_light,
ImpRespBW=2*tx_bandwidth/speed_of_light,
DeltaKCOAPoly=Poly2DType(Coefs=[[0, ], ]),
WgtType=WgtTypeType(WindowName=row_win))
col_win = _stringify(hf['window_function_azimuth'][()])
if col_win == 'NONE':
col_win = 'UNIFORM'
col = DirParamType(
SS=col_ss,
Sgn=-1,
KCtr=0,
ImpRespBW=col_imp_res_bw,
WgtType=WgtTypeType(WindowName=col_win),
DeltaKCOAPoly=Poly2DType(Coefs=dop_centroid_poly_coeffs*ss_zd_s/col_ss))
return GridType(
Type='RGZERO',
ImagePlane='SLANT',
TimeCOAPoly=time_coa_poly,
Row=row,
Col=col)
def correct_scp() -> None:
scp_pixel = sicd.ImageData.SCPPixel.get_array()
scp_ecf = sicd.project_image_to_ground(scp_pixel, projection_type='HAE')
sicd.update_scp(scp_ecf, coord_system='ECF')
with h5py.File(self._file_name, 'r') as hf:
# some common use variables
look_side = _stringify(hf['look_side'][()])
coord_center = hf['coord_center'][:]
avg_scene_height = float(hf['avg_scene_height'][()])
start_time = _parse_time(hf['acquisition_start_utc'][()])
end_time = _parse_time(hf['acquisition_end_utc'][()])
duration = get_seconds(end_time, start_time, precision='us')
center_freq = float(hf['carrier_frequency'][()])
tx_bandwidth = float(hf['chirp_bandwidth'][()])
min_freq = center_freq-0.5*tx_bandwidth
max_freq = center_freq+0.5*tx_bandwidth
pol_temp = _stringify(hf['polarization'][()])
tx_pol = pol_temp[0]
rcv_pol = pol_temp[1]
polarization = tx_pol + ':' + rcv_pol
first_pixel_time = float(hf['first_pixel_time'][()])
near_range = first_pixel_time*speed_of_light/2
number_of_range_samples = float(hf['number_of_range_samples'][()])
number_of_azimuth_samples = float(hf['number_of_azimuth_samples'][()])
range_sampling_rate = float(hf['range_sampling_rate'][()])
row_ss = speed_of_light/(2*range_sampling_rate)
# define the sicd elements
collect_info = get_collection_info()
image_creation = get_image_creation()
image_data = get_image_data()
geo_data = get_geo_data()
timeline = get_timeline()
position = get_position()
radar_collection = get_radar_collection()
image_formation = get_image_formation()
radiometric = get_radiometric()
# calculate some zero doppler parameters
ss_zd_s = float(hf['azimuth_time_interval'][()])
if look_side == 'left':
ss_zd_s *= -1
zero_doppler_left = _parse_time(hf['zerodoppler_end_utc'][()])
else:
zero_doppler_left = _parse_time(hf['zerodoppler_start_utc'][()])
dop_bw = hf['total_processed_bandwidth_azimuth'][()]
zd_time_scp = get_seconds(zero_doppler_left, start_time, precision='us') + \
image_data.SCPPixel.Col*ss_zd_s
zd_ref_time = first_pixel_time + number_of_range_samples/(2*range_sampling_rate)
vel_scp = position.ARPPoly.derivative_eval(zd_time_scp, der_order=1)
vm_ca_sq = numpy.sum(vel_scp*vel_scp)
rg_time_scp = first_pixel_time + image_data.SCPPixel.Row/range_sampling_rate
r_ca_scp = rg_time_scp*speed_of_light/2
# calculate the doppler rate sf polynomial
drate_ca_poly, drate_sf_poly_coefs = calculate_drate_sf_poly()
# calculate some doppler dependent grid parameters
col_ss = float(numpy.sqrt(vm_ca_sq)*abs(ss_zd_s)*drate_sf_poly_coefs[0])
col_imp_res_bw = dop_bw*abs(ss_zd_s)/col_ss
time_ca_poly_coeffs = [zd_time_scp, ss_zd_s/col_ss]
# calculate the doppler polynomials
dop_centroid_poly_coeffs, time_coa_poly_coeffs = calculate_doppler_polys()
# finish definition of sicd elements
rma = get_rma()
grid = get_grid()
sicd = SICDType(
CollectionInfo=collect_info,
ImageCreation=image_creation,
ImageData=image_data,
GeoData=geo_data,
Timeline=timeline,
Position=position,
RadarCollection=radar_collection,
ImageFormation=image_formation,
Radiometric=radiometric,
RMA=rma,
Grid=grid)
# adjust the scp location
correct_scp()
# derive sicd fields
sicd.derive()
# TODO: RNIIRS?
reverse_axes = (0, ) if look_side == 'left' else None
transpose_axes = (1, 0)
# noinspection PyTypeChecker
return sicd, reverse_axes, transpose_axes
def get_iceye_data_segment(
file_name: str,
reverse_axes: Union[None, int, Sequence[int]],
transpose_axes: Union[None, Tuple[int, ...]],
real_group: str = 's_i',
imaginary_grop: str = 's_q') -> BandAggregateSegment:
real_dataset = HDF5DatasetSegment(
file_name, real_group, reverse_axes=reverse_axes, transpose_axes=transpose_axes, close_file=True)
imag_dataset = HDF5DatasetSegment(
file_name, imaginary_grop, reverse_axes=reverse_axes, transpose_axes=transpose_axes, close_file=True)
return BandAggregateSegment(
(real_dataset, imag_dataset), band_dimension=2,
formatted_dtype='complex64', formatted_shape=real_dataset.formatted_shape,
format_function=ComplexFormatFunction(real_dataset.formatted_dtype, order='IQ', band_dimension=-1))
class ICEYEReader(SICDTypeReader):
"""
An ICEYE SLC reader implementation.
**Changed in version 1.3.0** for reading changes.
"""
__slots__ = ('_iceye_details', )
def __init__(self, iceye_details):
"""
Parameters
----------
iceye_details : str|ICEYEDetails
file name or ICEYEDetails object
"""
if isinstance(iceye_details, str):
iceye_details = ICEYEDetails(iceye_details)
if not isinstance(iceye_details, ICEYEDetails):
raise TypeError('The input argument for a ICEYEReader must be a '
'filename or ICEYEDetails object')
self._iceye_details = iceye_details
sicd, reverse_axes, transpose_axes = iceye_details.get_sicd()
data_segment = get_iceye_data_segment(iceye_details.file_name, reverse_axes, transpose_axes)
SICDTypeReader.__init__(self, data_segment, sicd, close_segments=True)
self._check_sizes()
@property
def iceye_details(self) -> ICEYEDetails:
"""
ICEYEDetails: The ICEYE details object.
"""
return self._iceye_details
@property
def file_name(self):
return self.iceye_details.file_name
########
# base expected functionality for a module with an implemented Reader
def is_a(file_name: str) -> Union[None, ICEYEReader]:
"""
Tests whether a given file_name corresponds to a ICEYE file. Returns a reader instance, if so.
Parameters
----------
file_name : str|BinaryIO
the file_name to check
Returns
-------
None|ICEYEReader
`ICEYEReader` instance if ICEYE file, `None` otherwise
"""
if is_file_like(file_name):
return None
if not is_hdf5(file_name):
return None
if h5py is None:
return None
try:
iceye_details = ICEYEDetails(file_name)
logger.info('File {} is determined to be a ICEYE file.'.format(file_name))
return ICEYEReader(iceye_details)
except SarpyIOError:
return None
| 22,796 | 41.531716 | 111 | py |
sarpy | sarpy-master/sarpy/io/complex/aggregate.py | """
Functionality for an aggregate sicd type reader, for opening multiple sicd type
files as a single reader object.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Tuple, Sequence, Union
from sarpy.io.complex.converter import open_complex
from sarpy.io.general.base import AggregateReader, SarpyIOError
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.sicd_elements.SICD import SICDType
class AggregateComplexReader(AggregateReader, SICDTypeReader):
"""
Aggregate multiple sicd type readers into a single reader instance.
**Changed in version 1.3.0** for reading changes.
"""
__slots__ = ('_readers', '_index_mapping')
def __init__(self, readers: Union[Sequence[str], Sequence[SICDTypeReader]]):
"""
Parameters
----------
readers : Sequence[str]|Sequence[SICDTypeReader]
"""
readers = self._validate_readers(readers)
AggregateReader.__init__(self, readers)
sicds = self._define_sicds()
SICDTypeReader.__init__(self, None, sicds)
self._check_sizes()
@staticmethod
def _validate_readers(readers: Sequence[SICDTypeReader]) -> Tuple[SICDTypeReader, ...]:
"""
Validate the input reader/file collection.
Parameters
----------
readers : Sequence[SICDTypeReader]
Returns
-------
Tuple[SICDTypeReader]
"""
if not isinstance(readers, Sequence):
raise TypeError('input argument must be a list or tuple of readers/files. Got type {}'.format(type(readers)))
# get a reader for each entry, and make sure that they are sicd type
# validate each entry
the_readers = []
for i, entry in enumerate(readers):
if isinstance(entry, str):
try:
reader = open_complex(entry)
except SarpyIOError:
raise SarpyIOError(
'Attempted and failed to open {} (entry {} of the input argument) '
'using the complex opener.'.format(entry, i))
else:
reader = entry
if not isinstance(reader, SICDTypeReader):
raise ValueError(
'Entry {} of the input argument does not correspond to a SICDTypeReader instance. '
'Got type {}.'.format(i, type(reader)))
the_readers.append(reader)
return tuple(the_readers)
def _define_sicds(self) -> Tuple[SICDType, ...]:
sicds = []
for reader_index, sicd_index in self.index_mapping:
reader = self._readers[reader_index]
sicd = reader.get_sicds_as_tuple()[sicd_index]
sicds.append(sicd)
return tuple(sicds)
| 2,833 | 31.574713 | 121 | py |
sarpy | sarpy-master/sarpy/io/complex/nisar.py | """
Functionality for reading NISAR data into a SICD model.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
import os
from collections import OrderedDict
from typing import Tuple, Dict, Union, List, Sequence, Optional
import numpy
from numpy.polynomial import polynomial
from scipy.constants import speed_of_light
from sarpy.compliance import bytes_to_string
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.sicd_elements.blocks import Poly2DType
from sarpy.io.complex.sicd_elements.SICD import SICDType
from sarpy.io.complex.sicd_elements.CollectionInfo import CollectionInfoType, RadarModeType
from sarpy.io.complex.sicd_elements.ImageCreation import ImageCreationType
from sarpy.io.complex.sicd_elements.RadarCollection import RadarCollectionType, \
ChanParametersType, TxStepType
from sarpy.io.complex.sicd_elements.ImageData import ImageDataType
from sarpy.io.complex.sicd_elements.GeoData import GeoDataType, SCPType
from sarpy.io.complex.sicd_elements.SCPCOA import SCPCOAType
from sarpy.io.complex.sicd_elements.Position import PositionType, XYZPolyType
from sarpy.io.complex.sicd_elements.Grid import GridType, DirParamType, WgtTypeType
from sarpy.io.complex.sicd_elements.Timeline import TimelineType, IPPSetType
from sarpy.io.complex.sicd_elements.ImageFormation import ImageFormationType, \
RcvChanProcType
from sarpy.io.complex.sicd_elements.RMA import RMAType, INCAType
from sarpy.io.complex.sicd_elements.Radiometric import RadiometricType, NoiseLevelType_
from sarpy.geometry import point_projection
from sarpy.io.complex.utils import fit_position_xvalidation, two_dim_poly_fit
from sarpy.io.general.base import SarpyIOError
from sarpy.io.general.data_segment import HDF5DatasetSegment
from sarpy.io.general.format_function import ComplexFormatFunction
from sarpy.io.general.utils import get_seconds, parse_timestring, is_file_like, is_hdf5, h5py
if h5py is None:
h5pyFile = None
h5pyGroup = None
h5pyDataset = None
else:
from h5py import File as h5pyFile, Group as h5pyGroup, Dataset as h5pyDataset
logger = logging.getLogger(__name__)
###########
# parser and interpreter for hdf5 attributes
def _stringify(val: Union[str, bytes]) -> str:
"""
Decode the value as necessary, for hdf5 string support issues.
Parameters
----------
val : str|bytes
Returns
-------
str
"""
return bytes_to_string(val).strip()
def _get_ref_time(str_in: Union[str, bytes]) -> numpy.datetime64:
"""
Extract the given reference time.
Parameters
----------
str_in : str|bytes
Returns
-------
numpy.datetime64
"""
str_in = bytes_to_string(str_in)
prefix = 'seconds since '
if not str_in.startswith(prefix):
raise ValueError('Got unexpected reference time string - {}'.format(str_in))
return parse_timestring(str_in[len(prefix):], precision='ns')
def _get_string_list(array: Sequence[bytes]) -> List[str]:
return [bytes_to_string(el) for el in array]
class NISARDetails(object):
"""
Parses and converts the Cosmo Skymed metadata
"""
__slots__ = ('_file_name', )
def __init__(self, file_name: str):
"""
Parameters
----------
file_name : str
"""
if h5py is None:
raise ImportError("Can't read NISAR files, because the h5py dependency is missing.")
if not os.path.isfile(file_name):
raise SarpyIOError('Path {} is not a file'.format(file_name))
with h5py.File(file_name, 'r') as hf:
# noinspection PyBroadException
try:
# noinspection PyUnusedLocal
gp = hf['/science/LSAR/SLC']
except Exception as e:
raise SarpyIOError('Got an error when reading required path /science/LSAR/SLC\n\t{}'.format(e))
self._file_name = file_name
@property
def file_name(self) -> str:
"""
str: the file name
"""
return self._file_name
@staticmethod
def _get_frequency_list(hf: h5pyFile) -> List[str]:
"""
Gets the list of frequencies.
Parameters
----------
hf : h5py.File
Returns
-------
numpy.ndarray
"""
return _get_string_list(hf['/science/LSAR/identification/listOfFrequencies'][:])
@staticmethod
def _get_collection_times(hf: h5pyFile) -> Tuple[numpy.datetime64, numpy.datetime64, float]:
"""
Gets the collection start and end times, and inferred duration.
Parameters
----------
hf : h5py.File
The h5py File object.
Returns
-------
start_time : numpy.datetime64
end_time : numpy.datetime64
duration : float
"""
start_time = parse_timestring(
_stringify(hf['/science/LSAR/identification/zeroDopplerStartTime'][()]),
precision='ns')
end_time = parse_timestring(
_stringify(hf['/science/LSAR/identification/zeroDopplerEndTime'][()]),
precision='ns')
duration = get_seconds(end_time, start_time, precision='ns')
return start_time, end_time, duration
@staticmethod
def _get_zero_doppler_data(
hf: h5pyFile,
base_sicd: SICDType) -> Tuple[numpy.ndarray, float, numpy.ndarray, numpy.ndarray]:
"""
Gets zero-doppler parameters.
Parameters
----------
hf : h5py.File
base_sicd : SICDType
Returns
-------
azimuth_zero_doppler_times : numpy.ndarray
azimuth_zero_doppler_spacing : float
grid_range_array : numpy.ndarray
range_zero_doppler_times : numpy.ndarray
"""
gp = hf['/science/LSAR/SLC/swaths']
ds = gp['zeroDopplerTime']
ref_time = _get_ref_time(ds.attrs['units'])
zd_time = ds[:] + get_seconds(ref_time, base_sicd.Timeline.CollectStart, precision='ns')
ss_az_s = gp['zeroDopplerTimeSpacing'][()]
if base_sicd.SCPCOA.SideOfTrack == 'L':
zd_time = zd_time[::-1]
ss_az_s *= -1
gp = hf['/science/LSAR/SLC/metadata/processingInformation/parameters']
grid_r = gp['slantRange'][:]
ds = gp['zeroDopplerTime']
ref_time = _get_ref_time(ds.attrs['units'])
grid_zd_time = ds[:] + get_seconds(ref_time, base_sicd.Timeline.CollectStart, precision='ns')
return zd_time, ss_az_s, grid_r, grid_zd_time
def _get_base_sicd(self, hf: h5pyFile) -> SICDType:
"""
Defines the base SICD object, to be refined with further details.
Returns
-------
SICDType
"""
def get_collection_info() -> CollectionInfoType:
gp = hf['/science/LSAR/identification']
return CollectionInfoType(
CollectorName=_stringify(hf.attrs['mission_name']),
CoreName='{0:07d}_{1:s}'.format(gp['absoluteOrbitNumber'][()],
_stringify(gp['trackNumber'][()])),
CollectType='MONOSTATIC',
Classification='UNCLASSIFIED',
RadarMode=RadarModeType(ModeType='STRIPMAP'))
def get_image_creation() -> ImageCreationType:
application = 'ISCE'
# noinspection PyBroadException
try:
application = '{} {}'.format(
application,
_stringify(hf['/science/LSAR/SLC/metadata/processingInformation/algorithms/ISCEVersion'][()]))
except Exception as e:
logger.info('Failed extracting the application details with error\n\t{}'.format(e))
pass
from sarpy.__about__ import __version__
# TODO: DateTime?
return ImageCreationType(
Application=application,
Site='Unknown',
Profile='sarpy {}'.format(__version__))
def get_geo_data() -> GeoDataType:
# seeds a rough SCP for projection usage
poly_str = _stringify(hf['/science/LSAR/identification/boundingPolygon'][()])
beg_str = 'POLYGON (('
if not poly_str.startswith(beg_str):
raise ValueError('Unexpected polygon string {}'.format(poly_str))
parts = poly_str[len(beg_str):-2].strip().split(',')
if len(parts) != 5:
raise ValueError('Unexpected polygon string parts {}'.format(parts))
lats_lons = numpy.zeros((4, 2), dtype=numpy.float64)
for i, part in enumerate(parts[:-1]):
spart = part.strip().split()
if len(spart) != 2:
raise ValueError('Unexpected polygon string parts {}'.format(parts))
lats_lons[i, :] = float(spart[1]), float(spart[0])
llh = numpy.zeros((3, ), dtype=numpy.float64)
llh[0:2] = numpy.mean(lats_lons, axis=0)
llh[2] = numpy.mean(
hf['/science/LSAR/SLC/metadata/processingInformation/parameters/referenceTerrainHeight'][:])
return GeoDataType(SCP=SCPType(LLH=llh))
def get_grid() -> GridType:
# TODO: Future Change Required - JPL states that uniform weighting in data simulated
# from UAVSAR is a placeholder, not an accurate description of the data.
# At this point, it is not clear what the final weighting description for NISAR
# will be.
gp = hf['/science/LSAR/SLC/metadata/processingInformation/parameters']
row_wgt = gp['rangeChirpWeighting'][:]
win_name = 'UNIFORM' if numpy.all(row_wgt == row_wgt[0]) else 'UNKNOWN'
row = DirParamType(
Sgn=-1,
DeltaKCOAPoly=[[0, ], ],
WgtFunct=numpy.cast[numpy.float64](row_wgt),
WgtType=WgtTypeType(WindowName=win_name))
col_wgt = gp['azimuthChirpWeighting'][:]
win_name = 'UNIFORM' if numpy.all(col_wgt == col_wgt[0]) else 'UNKNOWN'
col = DirParamType(
Sgn=-1,
KCtr=0,
WgtFunct=numpy.cast[numpy.float64](col_wgt),
WgtType=WgtTypeType(WindowName=win_name))
return GridType(ImagePlane='SLANT', Type='RGZERO', Row=row, Col=col)
def get_timeline() -> TimelineType:
# NB: IPPEnd must be set, but will be replaced
return TimelineType(
CollectStart=collect_start,
CollectDuration=duration,
IPP=[IPPSetType(index=0, TStart=0, TEnd=duration, IPPStart=0, IPPEnd=0), ])
def get_position() -> PositionType:
gp = hf['/science/LSAR/SLC/metadata/orbit']
ref_time = _get_ref_time(gp['time'].attrs['units'])
T = gp['time'][:] + get_seconds(ref_time, collect_start, precision='ns')
Pos = gp['position'][:]
Vel = gp['velocity'][:]
P_x, P_y, P_z = fit_position_xvalidation(T, Pos, Vel, max_degree=8)
return PositionType(ARPPoly=XYZPolyType(X=P_x, Y=P_y, Z=P_z))
def get_scpcoa() -> SCPCOAType:
# remaining fields set later
sot = _stringify(hf['/science/LSAR/identification/lookDirection'][()])[0].upper()
return SCPCOAType(SideOfTrack=sot)
def get_image_formation() -> ImageFormationType:
return ImageFormationType(
ImageFormAlgo='RMA',
TStartProc=0,
TEndProc=duration,
STBeamComp='NO',
ImageBeamComp='SV',
AzAutofocus='NO',
RgAutofocus='NO',
RcvChanProc=RcvChanProcType(NumChanProc=1, PRFScaleFactor=1))
def get_rma() -> RMAType:
return RMAType(RMAlgoType='OMEGA_K', INCA=INCAType(DopCentroidCOA=True))
collect_start, collect_end, duration = self._get_collection_times(hf)
collection_info = get_collection_info()
image_creation = get_image_creation()
geo_data = get_geo_data()
grid = get_grid()
timeline = get_timeline()
position = get_position()
scpcoa = get_scpcoa()
image_formation = get_image_formation()
rma = get_rma()
return SICDType(
CollectionInfo=collection_info,
ImageCreation=image_creation,
GeoData=geo_data,
Grid=grid,
Timeline=timeline,
Position=position,
SCPCOA=scpcoa,
ImageFormation=image_formation,
RMA=rma)
@staticmethod
def _get_freq_specific_sicd(
gp: h5pyGroup,
base_sicd: SICDType) -> Tuple[SICDType, List[str], List[str], float]:
"""
Gets the frequency specific sicd.
Parameters
----------
gp : h5py.Group
base_sicd : SICDType
Returns
-------
sicd: SICDType
pol_names : numpy.ndarray
pols : List[str]
center_frequency : float
"""
def update_grid() -> None:
row_imp_resp_bw = 2*gp['processedRangeBandwidth'][()]/speed_of_light
t_sicd.Grid.Row.SS = gp['slantRangeSpacing'][()]
t_sicd.Grid.Row.ImpRespBW = row_imp_resp_bw
t_sicd.Grid.Row.DeltaK1 = -0.5*row_imp_resp_bw
t_sicd.Grid.Row.DeltaK2 = -t_sicd.Grid.Row.DeltaK1
def update_timeline() -> None:
prf = gp['nominalAcquisitionPRF'][()]
t_sicd.Timeline.IPP[0].IPPEnd = round(prf*t_sicd.Timeline.CollectDuration) - 1
t_sicd.Timeline.IPP[0].IPPPoly = [0, prf]
def define_radar_collection() -> List[str]:
tx_rcv_pol_t = []
tx_pol = []
for entry in pols:
tx_rcv_pol_t.append('{}:{}'.format(entry[0], entry[1]))
if entry[0] not in tx_pol:
tx_pol.append(entry[0])
center_freq_t = gp['acquiredCenterFrequency'][()]
bw = gp['acquiredRangeBandwidth'][()]
tx_freq = (center_freq_t - 0.5*bw, center_freq_t + 0.5*bw)
rcv_chans = [ChanParametersType(TxRcvPolarization=pol) for pol in tx_rcv_pol_t]
if len(tx_pol) == 1:
tx_sequence = None
tx_pol = tx_pol[0]
else:
tx_sequence = [TxStepType(WFIndex=j+1, TxPolarization=pol) for j, pol in enumerate(tx_pol)]
tx_pol = 'SEQUENCE'
t_sicd.RadarCollection = RadarCollectionType(
TxFrequency=tx_freq,
RcvChannels=rcv_chans,
TxPolarization=tx_pol,
TxSequence=tx_sequence)
return tx_rcv_pol_t
def update_image_formation() -> float:
center_freq_t = gp['processedCenterFrequency'][()]
bw = gp['processedRangeBandwidth'][()]
t_sicd.ImageFormation.TxFrequencyProc = (center_freq_t - 0.5*bw, center_freq_t + 0.5*bw)
return center_freq_t
pols = _get_string_list(gp['listOfPolarizations'][:])
t_sicd = base_sicd.copy()
update_grid()
update_timeline()
tx_rcv_pol = define_radar_collection()
center_freq = update_image_formation()
return t_sicd, pols, tx_rcv_pol, center_freq
@staticmethod
def _get_pol_specific_sicd(
hf: h5pyFile,
ds: h5pyDataset,
base_sicd: SICDType,
pol_name: str,
freq_name: str,
j: int,
pol: str,
r_ca_sampled: numpy.ndarray,
zd_time: numpy.ndarray,
grid_zd_time: numpy.ndarray,
grid_r: numpy.ndarray,
doprate_sampled: numpy.ndarray,
dopcentroid_sampled: numpy.ndarray,
center_freq: float,
ss_az_s: float,
dop_bw: float,
beta0,
gamma0,
sigma0) -> Tuple[SICDType, Tuple[int, ...], numpy.dtype]:
"""
Gets the frequency/polarization specific sicd.
Parameters
----------
hf : h5py.File
ds : h5py.Dataset
base_sicd : SICDType
pol_name : str
freq_name : str
j : int
pol : str
r_ca_sampled : numpy.ndarray
zd_time : numpy.ndarray
grid_zd_time : numpy.ndarray
grid_r : numpy.ndarray
doprate_sampled : numpy.ndarray
dopcentroid_sampled : numpy.ndarray
center_freq : float
ss_az_s : float
dop_bw : float
Returns
-------
sicd: SICDType
shape : Tuple[int, ...]
numpy.dtype
"""
def define_image_data() -> None:
if dtype.name in ('float32', 'complex64'):
pixel_type = 'RE32F_IM32F'
elif dtype.name == 'int16':
pixel_type = 'RE16I_IM16I'
else:
raise ValueError('Got unhandled dtype {}'.format(dtype))
t_sicd.ImageData = ImageDataType(
PixelType=pixel_type,
NumRows=shape[1],
NumCols=shape[0],
FirstRow=0,
FirstCol=0,
SCPPixel=[0.5*shape[0], 0.5*shape[1]],
FullImage=[shape[1], shape[0]])
def update_image_formation() -> None:
t_sicd.ImageFormation.RcvChanProc.ChanIndices = [j, ]
t_sicd.ImageFormation.TxRcvPolarizationProc = pol
def update_inca_and_grid() -> Tuple[numpy.ndarray, numpy.ndarray]:
t_sicd.RMA.INCA.R_CA_SCP = r_ca_sampled[t_sicd.ImageData.SCPPixel.Row]
scp_ca_time = zd_time[t_sicd.ImageData.SCPPixel.Col]
# compute DRateSFPoly
# velocity at scp ca time
vel_ca = t_sicd.Position.ARPPoly.derivative_eval(scp_ca_time, der_order=1)
# squared magnitude
vm_ca_sq = numpy.sum(vel_ca*vel_ca)
# polynomial coefficient for function representing range as a function of range distance from SCP
r_ca_poly = numpy.array([t_sicd.RMA.INCA.R_CA_SCP, 1], dtype=numpy.float64)
# closest Doppler rate polynomial to SCP
min_ind = numpy.argmin(numpy.absolute(grid_zd_time - scp_ca_time))
# define range coordinate grid
coords_rg_m = grid_r - t_sicd.RMA.INCA.R_CA_SCP
# determine dop_rate_poly coordinates
dop_rate_poly = polynomial.polyfit(coords_rg_m, -doprate_sampled[min_ind, :], 4) # why fourth order?
t_sicd.RMA.INCA.FreqZero = center_freq
t_sicd.RMA.INCA.DRateSFPoly = Poly2DType(Coefs=numpy.reshape(
-numpy.convolve(dop_rate_poly, r_ca_poly)*speed_of_light/(2*center_freq*vm_ca_sq), (-1, 1)))
# update Grid.Col parameters
t_sicd.Grid.Col.SS = numpy.sqrt(vm_ca_sq)*abs(ss_az_s)*t_sicd.RMA.INCA.DRateSFPoly.Coefs[0, 0]
t_sicd.Grid.Col.ImpRespBW = min(abs(dop_bw*ss_az_s), 1)/t_sicd.Grid.Col.SS
t_sicd.RMA.INCA.TimeCAPoly = [scp_ca_time, ss_az_s/t_sicd.Grid.Col.SS]
# TimeCOAPoly/DopCentroidPoly/DeltaKCOAPoly
coords_az_m = (grid_zd_time - scp_ca_time)*t_sicd.Grid.Col.SS/ss_az_s
# cerate the 2d grids
coords_rg_2d_t, coords_az_2d_t = numpy.meshgrid(coords_rg_m, coords_az_m, indexing='xy')
coefs, residuals, rank, sing_values = two_dim_poly_fit(
coords_rg_2d_t, coords_az_2d_t, dopcentroid_sampled,
x_order=3, y_order=3, x_scale=1e-3, y_scale=1e-3, rcond=1e-40)
logger.info(
'The dop_centroid_poly fit details:\n\t'
'root mean square residuals = {}\n\t'
'rank = {}\n\t'
'singular values = {}'.format(residuals, rank, sing_values))
t_sicd.RMA.INCA.DopCentroidPoly = Poly2DType(Coefs=coefs)
t_sicd.Grid.Col.DeltaKCOAPoly = Poly2DType(Coefs=coefs*ss_az_s/t_sicd.Grid.Col.SS)
timeca_sampled = numpy.outer(grid_zd_time, numpy.ones((grid_r.size, )))
time_coa_sampled = timeca_sampled + (dopcentroid_sampled/doprate_sampled)
coefs, residuals, rank, sing_values = two_dim_poly_fit(
coords_rg_2d_t, coords_az_2d_t, time_coa_sampled,
x_order=3, y_order=3, x_scale=1e-3, y_scale=1e-3, rcond=1e-40)
logger.info(
'The time_coa_poly fit details:\n\t'
'root mean square residuals = {}\n\t'
'rank = {}\n\t'
'singular values = {}'.format(residuals, rank, sing_values))
t_sicd.Grid.TimeCOAPoly = Poly2DType(Coefs=coefs)
return coords_rg_2d_t, coords_az_2d_t
def define_radiometric() -> None:
def get_poly(ds: h5pyDataset, name: str) -> Optional[Poly2DType]:
array = ds[:]
fill = ds.attrs['_FillValue']
boolc = (array != fill)
if numpy.any(boolc):
array = array[boolc]
if numpy.any(array != array[0]):
coefs, residuals, rank, sing_values = two_dim_poly_fit(
coords_rg_2d[boolc], coords_az_2d[boolc], array,
x_order=3, y_order=3, x_scale=1e-3, y_scale=1e-3, rcond=1e-40)
logger.info(
'The {} fit details:\n\t'
'root mean square residuals = {}\n\t'
'rank = {}\n\t'
'singular values = {}'.format(name, residuals, rank, sing_values))
else:
# it's constant, so just use a constant polynomial
coefs = [[array[0], ], ]
logger.info('The {} values are constant'.format(name))
return Poly2DType(Coefs=coefs)
else:
logger.warning('No non-trivial values for {} provided.'.format(name))
return None
beta0_poly = get_poly(beta0, 'beta0')
gamma0_poly = get_poly(gamma0, 'gamma0')
sigma0_poly = get_poly(sigma0, 'sigma0')
nesz = hf['/science/LSAR/SLC/metadata/calibrationInformation/frequency{}/{}/nes0'.format(freq_name,
pol_name)][:]
noise_samples = nesz - (10 * numpy.log10(sigma0_poly.Coefs[0, 0]))
coefs, residuals, rank, sing_values = two_dim_poly_fit(
coords_rg_2d, coords_az_2d, noise_samples,
x_order=3, y_order=3, x_scale=1e-3, y_scale=1e-3, rcond=1e-40)
logger.info(
'The noise_poly fit details:\n\t'
'root mean square residuals = {}\n\t'
'rank = {}\n\t'
'singular values = {}'.format(
residuals, rank, sing_values))
t_sicd.Radiometric = RadiometricType(
BetaZeroSFPoly=beta0_poly,
GammaZeroSFPoly=gamma0_poly,
SigmaZeroSFPoly=sigma0_poly,
NoiseLevel=NoiseLevelType_(
NoiseLevelType='ABSOLUTE', NoisePoly=Poly2DType(Coefs=coefs)))
def update_geodata() -> None:
ecf = point_projection.image_to_ground(
[t_sicd.ImageData.SCPPixel.Row, t_sicd.ImageData.SCPPixel.Col], t_sicd)
t_sicd.GeoData.SCP = SCPType(ECF=ecf) # LLH will be populated
t_sicd = base_sicd.copy()
shape = ds.shape
dtype = ds.dtype
define_image_data()
update_image_formation()
coords_rg_2d, coords_az_2d = update_inca_and_grid()
define_radiometric()
update_geodata()
t_sicd.derive()
t_sicd.populate_rniirs(override=False)
return t_sicd, shape, dtype
def get_sicd_collection(self) -> Tuple[
Dict[str, SICDType],
Dict[str, Tuple[Tuple[int, ...], numpy.dtype]],
Optional[Tuple[int, ...]],
Optional[Tuple[int, ...]]]:
"""
Get the sicd collection for the bands.
Returns
-------
sicd_dict : Dict[str, SICDType]
shape_dict : Dict[str, Tuple[Tuple[int, ...], numpy.dtype]]
reverse_axes : None|Tuple[int, ...]
transpose_axes : None|Tuple[int, ...]
"""
# TODO: check if the hdf already has the sicds defined, and fish them out if so.
with h5py.File(self.file_name, 'r') as hf:
# fetch the base shared sicd
base_sicd = self._get_base_sicd(hf)
# prepare our output workspace
out_sicds = OrderedDict()
shapes = OrderedDict()
reverse_axes = (0, ) if base_sicd.SCPCOA.SideOfTrack == 'L' else None
transpose_axes = (1, 0)
# fetch the common use data for frequency issues
collect_start, collect_end, duration = self._get_collection_times(hf)
zd_time, ss_az_s, grid_r, grid_zd_time = self._get_zero_doppler_data(hf, base_sicd)
gp = hf['/science/LSAR/SLC/metadata/calibrationInformation/geometry']
beta0 = gp['beta0']
gamma0 = gp['gamma0']
sigma0 = gp['sigma0']
# formulate the frequency specific sicd information
freqs = self._get_frequency_list(hf)
for i, freq in enumerate(freqs):
gp_name = '/science/LSAR/SLC/swaths/frequency{}'.format(freq)
gp = hf[gp_name]
freq_sicd, pols, tx_rcv_pol, center_freq = self._get_freq_specific_sicd(gp, base_sicd)
# formulate the frequency dependent doppler grid
# TODO: Future Change Required - processedAzimuthBandwidth acknowledged
# by JPL to be wrong in simulated datasets.
dop_bw = gp['processedAzimuthBandwidth'][()]
gp2 = hf['/science/LSAR/SLC/metadata/processingInformation/parameters/frequency{}'.format(freq)]
dopcentroid_sampled = gp2['dopplerCentroid'][:]
doprate_sampled = gp2['azimuthFMRate'][:]
r_ca_sampled = gp['slantRange'][:]
# formulate the frequency/polarization specific sicd information
for j, pol in enumerate(pols):
ds_name = '{}/{}'.format(gp_name, pol)
ds = gp[pol]
pol_sicd, shape, dtype = self._get_pol_specific_sicd(
hf, ds, freq_sicd, pol, freq, j, tx_rcv_pol[j],
r_ca_sampled, zd_time, grid_zd_time, grid_r,
doprate_sampled, dopcentroid_sampled, center_freq,
ss_az_s, dop_bw, beta0, gamma0, sigma0)
out_sicds[ds_name] = pol_sicd
shapes[ds_name] = (shape, dtype)
return out_sicds, shapes, reverse_axes, transpose_axes
################
# The NISAR reader
class NISARReader(SICDTypeReader):
"""
An NISAR SLC reader implementation.
**Changed in version 1.3.0** for reading changes.
"""
__slots__ = ('_nisar_details', )
def __init__(self, nisar_details: Union[str, NISARDetails]):
"""
Parameters
----------
nisar_details : str|NISARDetails
file name or NISARDetails object
"""
if isinstance(nisar_details, str):
nisar_details = NISARDetails(nisar_details)
if not isinstance(nisar_details, NISARDetails):
raise TypeError('The input argument for NISARReader must be a '
'filename or NISARDetails object')
self._nisar_details = nisar_details
sicd_data, shape_dict, reverse_axes, transpose_axes = nisar_details.get_sicd_collection()
data_segments = []
sicds = []
for band_name in sicd_data:
sicds.append(sicd_data[band_name])
raw_shape, raw_dtype = shape_dict[band_name]
formatted_shape = (raw_shape[1], raw_shape[0]) if transpose_axes is not None \
else raw_shape[:2]
if raw_dtype.name == 'complex64':
formatted_dtype = raw_dtype
format_function = None
else:
formatted_dtype = 'complex64'
format_function = ComplexFormatFunction(raw_dtype=raw_dtype, order='IQ', band_dimension=-1)
data_segments.append(
HDF5DatasetSegment(
nisar_details.file_name, band_name,
formatted_dtype=formatted_dtype, formatted_shape=formatted_shape,
reverse_axes=reverse_axes, transpose_axes=transpose_axes,
format_function=format_function, close_file=True))
SICDTypeReader.__init__(self, data_segments, sicds, close_segments=True)
self._check_sizes()
@property
def nisar_details(self) -> NISARDetails:
"""
NISARDetails: The nisar details object.
"""
return self._nisar_details
@property
def file_name(self) -> str:
return self.nisar_details.file_name
########
# base expected functionality for a module with an implemented Reader
def is_a(file_name: str) -> Optional[NISARReader]:
"""
Tests whether a given file_name corresponds to a NISAR file. Returns a reader instance, if so.
Parameters
----------
file_name : str|BinaryIO
the file_name to check
Returns
-------
NISARReader|None
`NISARReader` instance if NISAR file, `None` otherwise
"""
if is_file_like(file_name):
return None
if not is_hdf5(file_name):
return None
if h5py is None:
return None
try:
nisar_details = NISARDetails(file_name)
logger.info('File {} is determined to be a NISAR file.'.format(file_name))
return NISARReader(nisar_details)
except (ImportError, SarpyIOError):
return None
| 30,180 | 37.252218 | 114 | py |
sarpy | sarpy-master/sarpy/io/complex/sentinel.py | """
Functionality for reading Sentinel-1 data into a SICD model.
"""
__classification__ = "UNCLASSIFIED"
__author__ = ("Thomas McCullough", "Daniel Haverporth")
import os
import logging
from datetime import datetime
from xml.etree import ElementTree
from typing import List, Tuple, Union, Optional
import numpy
from numpy.polynomial import polynomial
from scipy.constants import speed_of_light
from scipy.interpolate import griddata
from sarpy.geometry.geocoords import geodetic_to_ecf
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.sicd_elements.blocks import Poly1DType, Poly2DType
from sarpy.io.complex.sicd_elements.SICD import SICDType
from sarpy.io.complex.sicd_elements.CollectionInfo import CollectionInfoType, RadarModeType
from sarpy.io.complex.sicd_elements.ImageCreation import ImageCreationType
from sarpy.io.complex.sicd_elements.RadarCollection import RadarCollectionType, \
WaveformParametersType, ChanParametersType
from sarpy.io.complex.sicd_elements.ImageData import ImageDataType
from sarpy.io.complex.sicd_elements.GeoData import GeoDataType, SCPType
from sarpy.io.complex.sicd_elements.Position import PositionType, XYZPolyType
from sarpy.io.complex.sicd_elements.Grid import GridType, DirParamType, WgtTypeType
from sarpy.io.complex.sicd_elements.Timeline import TimelineType, IPPSetType
from sarpy.io.complex.sicd_elements.ImageFormation import ImageFormationType, RcvChanProcType
from sarpy.io.complex.sicd_elements.RMA import RMAType, INCAType
from sarpy.io.complex.sicd_elements.Radiometric import RadiometricType, NoiseLevelType_
from sarpy.io.complex.utils import two_dim_poly_fit, get_im_physical_coords
from sarpy.io.general.base import BaseReader, SarpyIOError
from sarpy.io.general.data_segment import SubsetSegment
from sarpy.io.general.tiff import TiffDetails, NativeTiffDataSegment
from sarpy.io.general.utils import get_seconds, parse_timestring, is_file_like
logger = logging.getLogger(__name__)
##########
# helper functions
def _parse_xml(file_name: str,
without_ns: bool = False) -> Union[ElementTree.Element, Tuple[dict, ElementTree.Element]]:
root_node = ElementTree.parse(file_name).getroot()
if without_ns:
return root_node
else:
ns = dict([node for _, node in ElementTree.iterparse(file_name, events=('start-ns', ))])
return ns, root_node
###########
# parser and interpreter for sentinel-1 manifest.safe file
class SentinelDetails(object):
__slots__ = ('_file_name', '_directory_name', '_root_node', '_ns', '_satellite', '_product_type', '_base_sicd')
def __init__(self, file_name: str):
"""
Parameters
----------
file_name : str
"""
if os.path.isdir(file_name): # its directory - point it at the manifest.safe file
t_file_name = os.path.join(file_name, 'manifest.safe')
if os.path.exists(t_file_name):
file_name = t_file_name
if not os.path.exists(file_name) or not os.path.isfile(file_name):
raise SarpyIOError('path {} does not exist or is not a file'.format(file_name))
if os.path.split(file_name)[1] != 'manifest.safe':
raise SarpyIOError('The sentinel file is expected to be named manifest.safe, got path {}'.format(file_name))
self._file_name = file_name
absolute_path = os.path.abspath(file_name)
self._directory_name, _ = os.path.split(absolute_path)
self._ns, self._root_node = _parse_xml(file_name)
# note that the manifest.safe apparently does not have a default namespace,
# so we have to explicitly enter no prefix in the namespace dictionary
self._ns[''] = ''
self._satellite = self._find('./metadataSection'
'/metadataObject[@ID="platform"]'
'/metadataWrap'
'/xmlData'
'/safe:platform'
'/safe:familyName').text
if self._satellite != 'SENTINEL-1':
raise ValueError('The platform in the manifest.safe file is required '
'to be SENTINEL-1, got {}'.format(self._satellite))
self._product_type = self._find('./metadataSection'
'/metadataObject[@ID="generalProductInformation"]'
'/metadataWrap'
'/xmlData'
'/s1sarl1:standAloneProductInformation'
'/s1sarl1:productType').text
if self._product_type != 'SLC':
raise ValueError('The product type in the manifest.safe file is required '
'to be "SLC", got {}'.format(self._product_type))
self._base_sicd = self._get_base_sicd()
@property
def file_name(self) -> str:
"""
str: the file name
"""
return self._file_name
@property
def directory_name(self) -> str:
"""
str: the package directory name
"""
return self._directory_name
@property
def satellite(self) -> str:
"""
str: the satellite
"""
return self._satellite
@property
def product_type(self) -> str:
"""
str: the product type
"""
return self._product_type
def _find(self, tag: str) -> ElementTree.Element:
"""
Pass through to ElementTree.Element.find(tag, ns).
Parameters
----------
tag : str
Returns
-------
ElementTree.Element
"""
return self._root_node.find(tag, self._ns)
def _findall(self, tag: str) -> List[ElementTree.Element]:
"""
Pass through to ElementTree.Element.findall(tag, ns).
Parameters
----------
tag : str
Returns
-------
List[ElementTree.Element
"""
return self._root_node.findall(tag, self._ns)
@staticmethod
def _parse_pol(str_in: str) -> str:
return '{}:{}'.format(str_in[0], str_in[1])
def _get_file_sets(self) -> List[dict]:
"""
Extracts paths for measurement and metadata files from a Sentinel manifest.safe file.
These files will be grouped according to "measurement data unit" implicit in the
Sentinel structure.
Returns
-------
List[dict]
"""
def get_file_location(schema_type, tids):
if isinstance(tids, str):
tids = [tids, ]
for tid in tids:
do = self._find('dataObjectSection/dataObject[@repID="{}"]/[@ID="{}"]'.format(schema_type, tid))
if do is None:
continue
return os.path.join(base_path, do.find('./byteStream/fileLocation').attrib['href'])
return None
base_path = os.path.dirname(self._file_name)
files = []
for mdu in self._findall('./informationPackageMap'
'/xfdu:contentUnit'
'/xfdu:contentUnit/[@repID="s1Level1MeasurementSchema"]'):
# get the data file for this measurement
fnames = {'data': get_file_location('s1Level1MeasurementSchema',
mdu.find('dataObjectPointer').attrib['dataObjectID'])}
# get the ids for product, noise, and calibration associated with this measurement data unit
ids = mdu.attrib['dmdID'].split()
# translate these ids to data object ids=file ids for the data files
fids = [self._find('./metadataSection'
'/metadataObject[@ID="{}"]'
'/dataObjectPointer'.format(did)).attrib['dataObjectID'] for did in ids]
# NB: there is (at most) one of these per measurement data unit
fnames['product'] = get_file_location('s1Level1ProductSchema', fids)
fnames['noise'] = get_file_location('s1Level1NoiseSchema', fids)
fnames['calibration'] = get_file_location('s1Level1CalibrationSchema', fids)
files.append(fnames)
return files
def _get_base_sicd(self) -> SICDType:
"""
Gets the base SICD element.
Returns
-------
SICDType
"""
from sarpy.__about__ import __version__
# CollectionInfo
platform = self._find('./metadataSection'
'/metadataObject[@ID="platform"]'
'/metadataWrap'
'/xmlData/safe:platform')
collector_name = platform.find('safe:familyName', self._ns).text + platform.find('safe:number', self._ns).text
mode_id = platform.find('./safe:instrument'
'/safe:extension'
'/s1sarl1:instrumentMode'
'/s1sarl1:mode', self._ns).text
if mode_id == 'SM':
mode_type = 'STRIPMAP'
else:
# TOPSAR - closest SICD analog is Dynamic Stripmap
mode_type = 'DYNAMIC STRIPMAP'
collection_info = CollectionInfoType(Classification='UNCLASSIFIED',
CollectorName=collector_name,
CollectType='MONOSTATIC',
RadarMode=RadarModeType(ModeID=mode_id, ModeType=mode_type))
# ImageCreation
processing = self._find('./metadataSection'
'/metadataObject[@ID="processing"]'
'/metadataWrap'
'/xmlData'
'/safe:processing')
facility = processing.find('safe:facility', self._ns)
software = facility.find('safe:software', self._ns)
image_creation = ImageCreationType(
Application='{name} {version}'.format(**software.attrib),
DateTime=processing.attrib['stop'],
Site='{name}, {site}, {country}'.format(**facility.attrib),
Profile='sarpy {}'.format(__version__))
# RadarCollection
polarizations = self._findall('./metadataSection'
'/metadataObject[@ID="generalProductInformation"]'
'/metadataWrap'
'/xmlData'
'/s1sarl1:standAloneProductInformation'
'/s1sarl1:transmitterReceiverPolarisation')
radar_collection = RadarCollectionType(RcvChannels=[
ChanParametersType(TxRcvPolarization=self._parse_pol(pol.text), index=i)
for i, pol in enumerate(polarizations)])
return SICDType(CollectionInfo=collection_info, ImageCreation=image_creation, RadarCollection=radar_collection)
def _parse_product_sicd(self, product_file_name: str) -> Union[SICDType, List[SICDType]]:
"""
Parameters
----------
product_file_name : str
Returns
-------
SICDType|List[SICDType]
"""
DT_FMT = '%Y-%m-%dT%H:%M:%S.%f'
root_node = _parse_xml(product_file_name, without_ns=True)
burst_list = root_node.findall('./swathTiming/burstList/burst')
# parse the geolocation information - for SCP calculation
geo_grid_point_list = root_node.findall('./geolocationGrid/geolocationGridPointList/geolocationGridPoint')
geo_pixels = numpy.zeros((len(geo_grid_point_list), 2), dtype=numpy.float64)
geo_coords = numpy.zeros((len(geo_grid_point_list), 3), dtype=numpy.float64)
for i, grid_point in enumerate(geo_grid_point_list):
geo_pixels[i, :] = (float(grid_point.find('./pixel').text),
float(grid_point.find('./line').text)) # (row, col) order
geo_coords[i, :] = (float(grid_point.find('./latitude').text),
float(grid_point.find('./longitude').text),
float(grid_point.find('./height').text))
geo_coords = geodetic_to_ecf(geo_coords)
def get_center_frequency(): # type: () -> float
return float(root_node.find('./generalAnnotation/productInformation/radarFrequency').text)
def get_image_col_spacing_zdt(): # type: () -> float
# Image column spacing in zero doppler time (seconds)
# Sentinel-1 is always right-looking, so this should always be positive
return float(root_node.find('./imageAnnotation/imageInformation/azimuthTimeInterval').text)
def get_image_data(): # type: () -> ImageDataType
_pv = root_node.find('./imageAnnotation/imageInformation/pixelValue').text
if _pv == 'Complex':
pixel_type = 'RE16I_IM16I'
else:
# NB: we only handle SLC
raise ValueError('SLC data should be 16-bit complex, got pixelValue = {}.'.format(_pv))
if len(burst_list) > 0:
# should be TOPSAR
num_rows = int(root_node.find('./swathTiming/samplesPerBurst').text)
num_cols = int(root_node.find('./swathTiming/linesPerBurst').text)
else:
# STRIPMAP
# NB - these fields always contain the number of rows/cols in the entire tiff,
# even if there are multiple bursts
num_rows = int(root_node.find('./imageAnnotation/imageInformation/numberOfSamples').text)
num_cols = int(root_node.find('./imageAnnotation/imageInformation/numberOfLines').text)
# SCP pixel within single burst image is the same for all burst
return ImageDataType(PixelType=pixel_type,
NumRows=num_rows,
NumCols=num_cols,
FirstRow=0,
FirstCol=0,
FullImage=(num_rows, num_cols),
SCPPixel=(int((num_rows - 1)/2), int((num_cols - 1)/2)))
def get_common_grid(): # type: () -> GridType
center_frequency = get_center_frequency()
image_plane = 'SLANT' if root_node.find('./generalAnnotation/productInformation/projection').text == \
'Slant Range' else None
# get range processing node
range_proc = root_node.find('./imageAnnotation'
'/processingInformation'
'/swathProcParamsList'
'/swathProcParams'
'/rangeProcessing')
delta_tau_s = 1. / float(root_node.find('./generalAnnotation/productInformation/rangeSamplingRate').text)
row_window_name = range_proc.find('./windowType').text.upper()
row_params = None
if row_window_name == 'NONE':
row_window_name = 'UNIFORM'
elif row_window_name == 'HAMMING':
row_params = {'COEFFICIENT': range_proc.find('./windowCoefficient').text}
row = DirParamType(SS=(speed_of_light/2)*delta_tau_s,
Sgn=-1,
KCtr=2*center_frequency/speed_of_light,
ImpRespBW=2. * float(range_proc.find('./processingBandwidth').text) / speed_of_light,
DeltaKCOAPoly=Poly2DType(Coefs=[[0, ]]),
WgtType=WgtTypeType(WindowName=row_window_name, Parameters=row_params))
# get azimuth processing node
az_proc = root_node.find('./imageAnnotation'
'/processingInformation'
'/swathProcParamsList'
'/swathProcParams'
'/azimuthProcessing')
col_ss = float(root_node.find('./imageAnnotation/imageInformation/azimuthPixelSpacing').text)
dop_bw = float(az_proc.find('./processingBandwidth').text) # Doppler bandwidth
ss_zd_s = get_image_col_spacing_zdt()
col_window_name = az_proc.find('./windowType').text.upper()
col_params = None
if col_window_name == 'NONE':
col_window_name = 'UNIFORM'
elif col_window_name == 'HAMMING':
col_params = {'COEFFICIENT': az_proc.find('./windowCoefficient').text}
col = DirParamType(SS=col_ss,
Sgn=-1,
KCtr=0,
ImpRespBW=dop_bw*ss_zd_s/col_ss,
WgtType=WgtTypeType(WindowName=col_window_name, Parameters=col_params))
return GridType(ImagePlane=image_plane, Type='RGZERO', Row=row, Col=col)
def get_common_timeline(): # type: () -> TimelineType
prf = float(root_node.find('./generalAnnotation'
'/downlinkInformationList'
'/downlinkInformation'
'/prf').text)
# NB: TEnd and IPPEnd are nonsense values which will be corrected
return TimelineType(IPP=[IPPSetType(TStart=0, TEnd=0, IPPStart=0, IPPEnd=0, IPPPoly=(0, prf), index=0), ])
def get_common_radar_collection(): # type: () -> RadarCollectionType
radar_collection = out_sicd.RadarCollection.copy()
center_frequency = get_center_frequency()
min_frequency = center_frequency + \
float(root_node.find('./generalAnnotation/downlinkInformationList/downlinkInformation'
'/downlinkValues/txPulseStartFrequency').text)
tx_pulse_length = float(root_node.find('./generalAnnotation'
'/downlinkInformationList'
'/downlinkInformation'
'/downlinkValues'
'/txPulseLength').text)
tx_fm_rate = float(root_node.find('./generalAnnotation'
'/downlinkInformationList'
'/downlinkInformation'
'/downlinkValues'
'/txPulseRampRate').text)
band_width = tx_pulse_length*tx_fm_rate
pol = root_node.find('./adsHeader/polarisation').text
radar_collection.TxPolarization = pol[0]
radar_collection.TxFrequency = (min_frequency, min_frequency+band_width)
adc_sample_rate = float(root_node.find('./generalAnnotation'
'/productInformation'
'/rangeSamplingRate').text) # Raw not decimated
swl_list = root_node.findall('./generalAnnotation/downlinkInformationList/' +
'downlinkInformation/downlinkValues/swlList/swl')
radar_collection.Waveform = [
WaveformParametersType(index=j,
TxFreqStart=min_frequency,
TxPulseLength=tx_pulse_length,
TxFMRate=tx_fm_rate,
TxRFBandwidth=band_width,
RcvFMRate=0,
ADCSampleRate=adc_sample_rate,
RcvWindowLength=float(swl.find('./value').text))
for j, swl in enumerate(swl_list)]
return radar_collection
def get_image_formation(): # type: () -> ImageFormationType
st_beam_comp = 'GLOBAL' if out_sicd.CollectionInfo.RadarMode.ModeID[0] == 'S' else 'SV'
pol = self._parse_pol(root_node.find('./adsHeader/polarisation').text)
# which channel does this pol correspond to?
chan_indices = None
for element in out_sicd.RadarCollection.RcvChannels:
if element.TxRcvPolarization == pol:
chan_indices = [element.index, ]
return ImageFormationType(RcvChanProc=RcvChanProcType(NumChanProc=1,
PRFScaleFactor=1,
ChanIndices=chan_indices),
TxRcvPolarizationProc=pol,
TStartProc=0,
TxFrequencyProc=(
out_sicd.RadarCollection.TxFrequency.Min,
out_sicd.RadarCollection.TxFrequency.Max),
ImageFormAlgo='RMA',
ImageBeamComp='SV',
AzAutofocus='NO',
RgAutofocus='NO',
STBeamComp=st_beam_comp)
def get_rma(): # type: () -> RMAType
center_frequency = get_center_frequency()
tau_0 = float(root_node.find('./imageAnnotation/imageInformation/slantRangeTime').text)
delta_tau_s = 1. / float(root_node.find('./generalAnnotation/productInformation/rangeSamplingRate').text)
return RMAType(
RMAlgoType='RG_DOP',
INCA=INCAType(
FreqZero=center_frequency,
DopCentroidCOA=True,
R_CA_SCP=(0.5*speed_of_light)*(tau_0 + out_sicd.ImageData.SCPPixel.Row*delta_tau_s))
)
def get_slice(): # type: () -> str
slice_number = root_node.find('./imageAnnotation/imageInformation/sliceNumber')
if slice_number is None:
return '0'
else:
return slice_number.text
def get_swath(): # type: () -> str
return root_node.find('./adsHeader/swath').text
def get_collection_info(): # type: () -> CollectionInfoType
collection_info = out_sicd.CollectionInfo.copy()
collection_info.CollectorName = root_node.find('./adsHeader/missionId').text
collection_info.RadarMode.ModeID = root_node.find('./adsHeader/mode').text
t_slice = get_slice()
swath = get_swath()
collection_info.Parameters = {
'SLICE': t_slice, 'BURST': '1', 'SWATH': swath, 'ORBIT_SOURCE': 'SLC_INTERNAL'}
return collection_info
def get_state_vectors(start):
# type: (numpy.datetime64) -> Tuple[numpy.ndarray, numpy.ndarray, numpy.ndarray, numpy.ndarray]
orbit_list = root_node.findall('./generalAnnotation/orbitList/orbit')
shp = (len(orbit_list), )
Ts = numpy.empty(shp, dtype=numpy.float64)
Xs = numpy.empty(shp, dtype=numpy.float64)
Ys = numpy.empty(shp, dtype=numpy.float64)
Zs = numpy.empty(shp, dtype=numpy.float64)
for j, orbit in enumerate(orbit_list):
Ts[j] = get_seconds(parse_timestring(orbit.find('./time').text), start, precision='us')
Xs[j] = float(orbit.find('./position/x').text)
Ys[j] = float(orbit.find('./position/y').text)
Zs[j] = float(orbit.find('./position/z').text)
return Ts, Xs, Ys, Zs
def get_doppler_estimates(start):
# type: (numpy.datetime64) -> Tuple[numpy.ndarray, numpy.ndarray, List[numpy.ndarray]]
dc_estimate_list = root_node.findall('./dopplerCentroid/dcEstimateList/dcEstimate')
shp = (len(dc_estimate_list), )
dc_az_time = numpy.empty(shp, dtype=numpy.float64)
dc_t0 = numpy.empty(shp, dtype=numpy.float64)
data_dc_poly = []
for j, dc_estimate in enumerate(dc_estimate_list):
dc_az_time[j] = get_seconds(parse_timestring(dc_estimate.find('./azimuthTime').text),
start, precision='us')
dc_t0[j] = float(dc_estimate.find('./t0').text)
data_dc_poly.append(numpy.fromstring(dc_estimate.find('./dataDcPolynomial').text, sep=' '))
return dc_az_time, dc_t0, data_dc_poly
def get_azimuth_fm_estimates(start):
# type: (numpy.datetime64) -> Tuple[numpy.ndarray, numpy.ndarray, List[numpy.ndarray]]
azimuth_fm_rate_list = root_node.findall('./generalAnnotation/azimuthFmRateList/azimuthFmRate')
shp = (len(azimuth_fm_rate_list), )
az_t = numpy.empty(shp, dtype=numpy.float64)
az_t0 = numpy.empty(shp, dtype=numpy.float64)
k_a_poly = []
for j, az_fm_rate in enumerate(azimuth_fm_rate_list):
az_t[j] = get_seconds(parse_timestring(az_fm_rate.find('./azimuthTime').text),
start, precision='us')
az_t0[j] = float(az_fm_rate.find('./t0').text)
if az_fm_rate.find('c0') is not None:
# old style annotation xml file
k_a_poly.append(numpy.array([float(az_fm_rate.find('./c0').text),
float(az_fm_rate.find('./c1').text),
float(az_fm_rate.find('./c2').text)], dtype=numpy.float64))
else:
k_a_poly.append(numpy.fromstring(az_fm_rate.find('./azimuthFmRatePolynomial').text, sep=' '))
return az_t, az_t0, k_a_poly
def set_core_name(sicd, start_dt, burst_num):
# type: (SICDType, datetime, int) -> None
t_slice = int(get_slice())
swath = get_swath()
sicd.CollectionInfo.CoreName = '{0:s}{1:s}{2:s}_{3:02d}_{4:s}_{5:02d}'.format(
start_dt.strftime('%d%b%y'),
root_node.find('./adsHeader/missionId').text,
root_node.find('./adsHeader/missionDataTakeId').text,
t_slice,
swath,
burst_num+1)
sicd.CollectionInfo.Parameters['BURST'] = '{0:d}'.format(burst_num+1)
def set_timeline(sicd, start, duration):
# type: (SICDType, numpy.datetime64, float) -> None
prf = float(root_node.find('./generalAnnotation/downlinkInformationList/downlinkInformation/prf').text)
timeline = sicd.Timeline
timeline.CollectStart = start
timeline.CollectDuration = duration
timeline.IPP[0].TEnd = duration
timeline.IPP[0].IPPEnd = round(timeline.CollectDuration*prf) - 1
sicd.ImageFormation.TEndProc = duration
def set_position(sicd, start):
# type: (SICDType, numpy.datetime64) -> None
Ts, Xs, Ys, Zs = get_state_vectors(start)
poly_order = min(5, Ts.size-1)
P_X = polynomial.polyfit(Ts, Xs, poly_order)
P_Y = polynomial.polyfit(Ts, Ys, poly_order)
P_Z = polynomial.polyfit(Ts, Zs, poly_order)
sicd.Position = PositionType(ARPPoly=XYZPolyType(X=P_X, Y=P_Y, Z=P_Z))
def update_rma_and_grid(sicd, first_line_relative_start, start, return_time_dets=False):
# type: (SICDType, Union[float, int], numpy.datetime64, bool) -> Union[None, Tuple[float, float]]
center_frequency = get_center_frequency()
# set TimeCAPoly
ss_zd_s = get_image_col_spacing_zdt()
eta_mid = ss_zd_s * float(out_sicd.ImageData.SCPPixel.Col)
sicd.RMA.INCA.TimeCAPoly = Poly1DType(
Coefs=[first_line_relative_start+eta_mid, ss_zd_s/out_sicd.Grid.Col.SS])
range_time_scp = sicd.RMA.INCA.R_CA_SCP*2/speed_of_light
# get velocity polynomial
vel_poly = sicd.Position.ARPPoly.derivative(1, return_poly=True)
# We pick a single velocity magnitude at closest approach to represent
# the entire burst. This is valid, since the magnitude of the velocity
# changes very little.
vm_ca = numpy.linalg.norm(vel_poly(sicd.RMA.INCA.TimeCAPoly[0]))
az_rate_times, az_rate_t0, k_a_poly = get_azimuth_fm_estimates(start)
# find the closest fm rate polynomial
az_rate_poly_ind = int(numpy.argmin(numpy.abs(az_rate_times - sicd.RMA.INCA.TimeCAPoly[0])))
az_rate_poly = Poly1DType(Coefs=k_a_poly[az_rate_poly_ind])
dr_ca_poly = az_rate_poly.shift(t_0=az_rate_t0[az_rate_poly_ind] - range_time_scp,
alpha=2/speed_of_light,
return_poly=False)
r_ca = numpy.array([sicd.RMA.INCA.R_CA_SCP, 1], dtype=numpy.float64)
sicd.RMA.INCA.DRateSFPoly = numpy.reshape(
-numpy.convolve(dr_ca_poly, r_ca)*(speed_of_light/(2*center_frequency*vm_ca*vm_ca)),
(-1, 1))
# Doppler Centroid
dc_est_times, dc_t0, data_dc_poly = get_doppler_estimates(start)
# find the closest doppler centroid polynomial
dc_poly_ind = int(numpy.argmin(numpy.abs(dc_est_times - sicd.RMA.INCA.TimeCAPoly[0])))
# we are going to move the respective polynomial from reference point as dc_t0 to
# reference point at SCP time.
dc_poly = Poly1DType(Coefs=data_dc_poly[dc_poly_ind])
# Fit DeltaKCOAPoly, DopCentroidPoly, and TimeCOAPoly from data
tau_0 = float(root_node.find('./imageAnnotation/imageInformation/slantRangeTime').text)
delta_tau_s = 1./float(root_node.find('./generalAnnotation/productInformation/rangeSamplingRate').text)
image_data = sicd.ImageData
grid = sicd.Grid
inca = sicd.RMA.INCA
# common use for the fitting efforts
poly_order = 2
grid_samples = poly_order + 4
cols = numpy.linspace(0, image_data.NumCols - 1, grid_samples, dtype=numpy.int64)
rows = numpy.linspace(0, image_data.NumRows - 1, grid_samples, dtype=numpy.int64)
coords_az = get_im_physical_coords(cols, grid, image_data, 'col')
coords_rg = get_im_physical_coords(rows, grid, image_data, 'row')
coords_az_2d, coords_rg_2d = numpy.meshgrid(coords_az, coords_rg)
# fit DeltaKCOAPoly
tau = tau_0 + delta_tau_s*rows
# Azimuth steering rate (constant, not dependent on burst or range)
k_psi = numpy.deg2rad(float(
root_node.find('./generalAnnotation/productInformation/azimuthSteeringRate').text))
k_s = vm_ca*center_frequency*k_psi*2/speed_of_light
k_a = az_rate_poly(tau - az_rate_t0[az_rate_poly_ind])
k_t = (k_a*k_s)/(k_a-k_s)
f_eta_c = dc_poly(tau - dc_t0[dc_poly_ind])
eta = (cols - image_data.SCPPixel.Col)*ss_zd_s
eta_c = -f_eta_c/k_a # Beam center crossing time (TimeCOA)
eta_ref = eta_c - eta_c[0]
eta_2d, eta_ref_2d = numpy.meshgrid(eta, eta_ref)
eta_arg = eta_2d - eta_ref_2d
deramp_phase = 0.5*k_t[:, numpy.newaxis]*eta_arg*eta_arg
demod_phase = eta_arg*f_eta_c[:, numpy.newaxis]
total_phase = deramp_phase + demod_phase
phase, residuals, rank, sing_values = two_dim_poly_fit(
coords_rg_2d, coords_az_2d, total_phase,
x_order=poly_order, y_order=poly_order, x_scale=1e-3, y_scale=1e-3, rcond=1e-35)
logger.info(
'The phase polynomial fit details:\n\t'
'root mean square residuals = {}\n\t'
'rank = {}\n\t'
'singular values = {}'.format(residuals, rank, sing_values))
# DeltaKCOAPoly is derivative of phase in azimuth/Col direction
delta_kcoa_poly = polynomial.polyder(phase, axis=1)
grid.Col.DeltaKCOAPoly = Poly2DType(Coefs=delta_kcoa_poly)
# derive the DopCentroidPoly directly
dop_centroid_poly = delta_kcoa_poly*grid.Col.SS/ss_zd_s
inca.DopCentroidPoly = Poly2DType(Coefs=dop_centroid_poly)
# complete deriving the TimeCOAPoly, which depends on the DOPCentroidPoly
time_ca_sampled = inca.TimeCAPoly(coords_az_2d)
doppler_rate_sampled = polynomial.polyval(coords_rg_2d, dr_ca_poly)
dop_centroid_sampled = inca.DopCentroidPoly(coords_rg_2d, coords_az_2d)
time_coa_sampled = time_ca_sampled + dop_centroid_sampled / doppler_rate_sampled
time_coa_poly, residuals, rank, sing_values = two_dim_poly_fit(
coords_rg_2d, coords_az_2d, time_coa_sampled,
x_order=poly_order, y_order=poly_order, x_scale=1e-3, y_scale=1e-3, rcond=1e-40)
logger.info(
'The TimeCOAPoly fit details:\n\t'
'root mean square residuals = {}\n\t'
'rank = {}\n\t'
'singular values = {}'.format(residuals, rank, sing_values))
grid.TimeCOAPoly = Poly2DType(Coefs=time_coa_poly)
if return_time_dets:
return time_coa_sampled.min(), time_coa_sampled.max()
def adjust_time(sicd, time_offset):
# type: (SICDType, float) -> None
# adjust TimeCOAPoly
sicd.Grid.TimeCOAPoly.Coefs[0, 0] -= time_offset
# adjust TimeCAPoly
sicd.RMA.INCA.TimeCAPoly.Coefs[0] -= time_offset
# shift ARPPoly
sicd.Position.ARPPoly = sicd.Position.ARPPoly.shift(-time_offset, return_poly=True)
def update_geodata(sicd): # type: (SICDType) -> None
scp_pixel = [sicd.ImageData.SCPPixel.Row, sicd.ImageData.SCPPixel.Col]
ecf = sicd.project_image_to_ground(scp_pixel, projection_type='HAE')
sicd.update_scp(ecf, coord_system='ECF')
def get_scps(count):
# SCPPixel - points at which to interpolate geo_pixels & geo_coords data
scp_pixels = numpy.zeros((count, 2), dtype=numpy.float64)
scp_pixels[:, 0] = int((out_sicd.ImageData.NumRows - 1)/2.)
scp_pixels[:, 1] = int((out_sicd.ImageData.NumCols - 1)/2.) + \
out_sicd.ImageData.NumCols*(numpy.arange(count, dtype=numpy.float64))
scps = numpy.zeros((count, 3), dtype=numpy.float64)
for j in range(3):
scps[:, j] = griddata(geo_pixels, geo_coords[:, j], scp_pixels)
return scps
def finalize_stripmap(): # type: () -> SICDType
# out_sicd is the one that we return, just complete it
# set preliminary geodata (required for projection)
scp = get_scps(1)
out_sicd.GeoData = GeoDataType(SCP=SCPType(ECF=scp[0, :])) # EarthModel & LLH are implicitly set
# NB: SCPPixel is already set to the correct thing
im_dat = out_sicd.ImageData
im_dat.ValidData = (
(0, 0), (0, im_dat.NumCols-1), (im_dat.NumRows-1, im_dat.NumCols-1), (im_dat.NumRows-1, 0))
start_dt = datetime.strptime(root_node.find('./generalAnnotation'
'/downlinkInformationList'
'/downlinkInformation'
'/firstLineSensingTime').text, DT_FMT)
start = numpy.datetime64(start_dt, 'us')
stop = parse_timestring(root_node.find('./generalAnnotation'
'/downlinkInformationList'
'/downlinkInformation'
'/lastLineSensingTime').text)
set_core_name(out_sicd, start_dt, 0)
set_timeline(out_sicd, start, get_seconds(stop, start, precision='us'))
set_position(out_sicd, start)
azimuth_time_first_line = parse_timestring(
root_node.find('./imageAnnotation/imageInformation/productFirstLineUtcTime').text)
first_line_relative_start = get_seconds(azimuth_time_first_line, start, precision='us')
update_rma_and_grid(out_sicd, first_line_relative_start, start)
update_geodata(out_sicd)
return out_sicd
def finalize_bursts(): # type: () -> List[SICDType]
# we will have one sicd per burst.
sicds = []
scps = get_scps(len(burst_list))
for j, burst in enumerate(burst_list):
t_sicd = out_sicd.copy()
# set preliminary geodata (required for projection)
t_sicd.GeoData = GeoDataType(SCP=SCPType(ECF=scps[j, :])) # EarthModel & LLH are implicitly set
xml_first_cols = numpy.fromstring(burst.find('./firstValidSample').text, sep=' ', dtype=numpy.int64)
xml_last_cols = numpy.fromstring(burst.find('./lastValidSample').text, sep=' ', dtype=numpy.int64)
valid = (xml_first_cols >= 0) & (xml_last_cols >= 0)
valid_cols = numpy.arange(xml_first_cols.size, dtype=numpy.int64)[valid]
first_row = int(numpy.min(xml_first_cols[valid]))
last_row = int(numpy.max(xml_last_cols[valid]))
first_col = valid_cols[0]
last_col = valid_cols[-1]
t_sicd.ImageData.ValidData = (
(first_row, first_col), (first_row, last_col), (last_row, last_col), (last_row, first_col))
# This is the first and last zero doppler times of the columns in the burst.
# Not really CollectStart and CollectDuration in SICD (first last pulse time)
start_dt = datetime.strptime(burst.find('./azimuthTime').text, DT_FMT)
start = numpy.datetime64(start_dt, 'us')
set_core_name(t_sicd, start_dt, j)
set_position(t_sicd, start)
early, late = update_rma_and_grid(t_sicd, 0, start, return_time_dets=True)
new_start = start + numpy.timedelta64(numpy.int64(early * 1e6), 'us')
duration = late - early
set_timeline(t_sicd, new_start, duration)
# adjust my time offset
adjust_time(t_sicd, early)
update_geodata(t_sicd)
sicds.append(t_sicd)
return sicds
######
# create a common sicd with shared basic information here
out_sicd = self._base_sicd.copy()
out_sicd.ImageData = get_image_data()
out_sicd.Grid = get_common_grid()
out_sicd.Timeline = get_common_timeline()
out_sicd.RadarCollection = get_common_radar_collection()
out_sicd.ImageFormation = get_image_formation()
out_sicd.RMA = get_rma()
out_sicd.CollectionInfo = get_collection_info()
######
# consider the burst situation, and split into the appropriate sicd collection
if len(burst_list) > 0:
return finalize_bursts()
else:
return finalize_stripmap()
def _refine_using_calibration(self, cal_file_name: str, sicds: Union[SICDType, List[SICDType]]) -> None:
"""
Parameters
----------
cal_file_name : str
sicds : SICDType|List[SICDType]
Returns
-------
None
"""
# do not use before Sentinel baseline processing calibration update on 25 Nov 2015.
if self._base_sicd.ImageCreation.DateTime < numpy.datetime64('2015-11-25'):
return
root_node = _parse_xml(cal_file_name, without_ns=True)
if isinstance(sicds, SICDType):
sicds = [sicds, ]
def update_sicd(sicd, index): # type: (SICDType, int) -> None
# NB: in the deprecated version, there is a check if beta is constant,
# in which case constant values are used for beta/sigma/gamma.
# This has been removed.
valid_lines = (line >= index*lines_per_burst) & (line < (index+1)*lines_per_burst)
valid_count = numpy.sum(valid_lines)
if valid_count == 0:
# this burst contained no useful calibration data
return
coords_rg = (pixel[valid_lines] + sicd.ImageData.FirstRow - sicd.ImageData.SCPPixel.Row)*sicd.Grid.Row.SS
coords_az = (line[valid_lines] + sicd.ImageData.FirstCol - sicd.ImageData.SCPPixel.Col)*sicd.Grid.Col.SS
# NB: coords_rg = (valid_count, M) and coords_az = (valid_count, )
coords_az = numpy.repeat(coords_az, pixel.shape[1])
if valid_count > 1:
coords_az = coords_az.reshape((valid_count, -1))
def create_poly(arr, poly_order=2):
rg_poly = polynomial.polyfit(coords_rg.flatten(), arr.flatten(), poly_order)
az_poly = polynomial.polyfit(coords_az.flatten(), arr.flatten(), poly_order)
return Poly2DType(Coefs=numpy.outer(az_poly/numpy.max(az_poly), rg_poly))
if sicd.Radiometric is None:
sicd.Radiometric = RadiometricType()
sicd.Radiometric.SigmaZeroSFPoly = create_poly(sigma[valid_lines, :], poly_order=2)
sicd.Radiometric.BetaZeroSFPoly = create_poly(beta[valid_lines, :], poly_order=2)
sicd.Radiometric.GammaZeroSFPoly = create_poly(gamma[valid_lines, :], poly_order=2)
return
cal_vector_list = root_node.findall('./calibrationVectorList/calibrationVector')
line = numpy.empty((len(cal_vector_list), ), dtype=numpy.float64)
pixel, sigma, beta, gamma = [], [], [], []
for i, cal_vector in enumerate(cal_vector_list):
line[i] = float(cal_vector.find('./line').text)
pixel.append(numpy.fromstring(cal_vector.find('./pixel').text, sep=' ', dtype=numpy.float64))
sigma.append(numpy.fromstring(cal_vector.find('./sigmaNought').text, sep=' ', dtype=numpy.float64))
beta.append(numpy.fromstring(cal_vector.find('./betaNought').text, sep=' ', dtype=numpy.float64))
gamma.append(numpy.fromstring(cal_vector.find('./gamma').text, sep=' ', dtype=numpy.float64))
lines_per_burst = sicds[0].ImageData.NumCols
pixel = numpy.array(pixel)
sigma = numpy.array(sigma)
beta = numpy.array(beta)
gamma = numpy.array(gamma)
# adjust sentinel values for sicd convention (square and invert)
sigma = 1./(sigma*sigma)
beta = 1./(beta*beta)
gamma = 1./(gamma*gamma)
for ind, sic in enumerate(sicds):
update_sicd(sic, ind)
def _refine_using_noise(self, noise_file_name: str, sicds: Union[SICDType, List[SICDType]]) -> None:
"""
Parameters
----------
noise_file_name : str
sicds : SICDType|List[SICDType]
Returns
-------
None
"""
# do not use before Sentinel baseline processing calibration update on 25 Nov 2015.
if self._base_sicd.ImageCreation.DateTime < numpy.datetime64('2015-11-25'):
return
root_node = _parse_xml(noise_file_name, without_ns=True)
if isinstance(sicds, SICDType):
sicds = [sicds, ]
mode_id = sicds[0].CollectionInfo.RadarMode.ModeID
lines_per_burst = sicds[0].ImageData.NumCols
range_size_pixels = sicds[0].ImageData.NumRows
def extract_vector(stem):
# type: (str) -> Tuple[List[numpy.ndarray], List[Union[None, numpy.ndarray]], List[numpy.ndarray]]
lines = []
pixels = []
noises = []
noise_vector_list = root_node.findall('./{0:s}VectorList/{0:s}Vector'.format(stem))
for i, noise_vector in enumerate(noise_vector_list):
line = numpy.fromstring(noise_vector.find('./line').text, dtype=numpy.int64, sep=' ')
# some datasets have noise vectors for negative lines - ignore these
if numpy.all(line < 0):
continue
pixel_node = noise_vector.find('./pixel') # does not exist for azimuth noise
if pixel_node is not None:
pixel = numpy.fromstring(pixel_node.text, dtype=numpy.int64, sep=' ')
else:
pixel = None
noise = numpy.fromstring(noise_vector.find('./{}Lut'.format(stem)).text, dtype=numpy.float64, sep=' ')
# some datasets do not have any noise data (all 0's) - skipping these will throw things into disarray
if not numpy.all(noise == 0):
# convert noise to dB - what about -inf values?
noise = 10*numpy.log10(noise)
assert isinstance(noise, numpy.ndarray)
# do some validity checks
if (mode_id == 'IW') and numpy.any((line % lines_per_burst) != 0) and (i != len(noise_vector_list)-1):
# NB: the final burst has different timing
logger.error(
'Noise file should have one lut per burst, but more are present.\n\t'
'This may lead to confusion.')
if (pixel is not None) and (pixel[-1] > range_size_pixels):
logger.error(
'Noise file has more pixels in LUT than range size.\n\t'
'This may lead to confusion.')
lines.append(line)
pixels.append(pixel)
noises.append(noise)
return lines, pixels, noises
def populate_noise(sicd, index): # type: (SICDType, int) -> None
# NB: the default order was 7 before refactoring...that seems excessive.
rg_poly_order = min(5, range_pixel[0].size-1)
if sicd.CollectionInfo.RadarMode.ModeID[0] == 'S':
# STRIPMAP - all LUTs apply
# Treat range and azimuth polynomial components as fully independent
az_poly_order = min(4, len(range_line) - 1)
# NB: the previous rammed together two one-dimensional polys, but
# we should do a 2-d fit.
coords_rg = (range_pixel[0] + sicd.ImageData.FirstRow -
sicd.ImageData.SCPPixel.Row)*sicd.Grid.Row.SS
coords_az = (range_line + sicd.ImageData.FirstCol - sicd.ImageData.SCPPixel.Col)*sicd.Grid.Col.SS
coords_az_2d, coords_rg_2d = numpy.meshgrid(coords_az, coords_rg)
noise_poly, res, rank, sing_vals = two_dim_poly_fit(
coords_rg_2d, coords_az_2d, numpy.array(range_noise),
x_order=rg_poly_order, y_order=az_poly_order, x_scale=1e-3, y_scale=1e-3, rcond=1e-40)
logger.info(
'NoisePoly fit details:\n\t'
'root mean square residuals = {}\n\t'
'rank = {}\n\t'
'singular values = {}'.format(res, rank, sing_vals))
else:
# TOPSAR has single LUT per burst
# Treat range and azimuth polynomial components as weakly independent
if index >= len(range_pixel):
logger.error(
'We have run out of noise information.\n\t'
'Current index = {}, length of noise array = {}.\n\t'
'The previous noise information will be used to populate the NoisePoly.'.format(
index, len(range_pixel)))
rp_array = range_pixel[min(index, len(range_pixel)-1)]
rn_array = range_noise[min(index, len(range_pixel)-1)]
coords_rg = (rp_array + sicd.ImageData.FirstRow -
sicd.ImageData.SCPPixel.Row)*sicd.Grid.Row.SS
rg_poly = numpy.array(
polynomial.polyfit(coords_rg, rn_array, rg_poly_order))
az_poly = None
if azimuth_noise is not None:
line0 = lines_per_burst*index
coords_az = (azimuth_line[0] - line0 -
sicd.ImageData.SCPPixel.Col)*sicd.Grid.Col.SS
valid_lines = (azimuth_line[0] >= line0) & (azimuth_line[0] < line0 + lines_per_burst)
valid_count = numpy.sum(valid_lines)
if valid_count > 1:
az_poly_order = min(2, valid_lines.size-1)
az_poly = numpy.array(
polynomial.polyfit(coords_az[valid_lines], azimuth_noise[valid_lines], az_poly_order))
if az_poly is not None:
noise_poly = numpy.zeros((rg_poly.size, az_poly.size), dtype=numpy.float64)
noise_poly[:, 0] += rg_poly
noise_poly[0, :] += az_poly
else:
noise_poly = numpy.reshape(rg_poly, (-1, 1))
if sicd.Radiometric is None:
sicd.Radiometric = RadiometricType()
sicd.Radiometric.NoiseLevel = NoiseLevelType_(NoiseLevelType='ABSOLUTE',
NoisePoly=Poly2DType(Coefs=noise_poly))
# extract our noise vectors (used in populate_noise through implicit reference)
if root_node.find('./noiseVectorList') is not None:
# probably prior to March 2018
range_line, range_pixel, range_noise = extract_vector('noise')
azimuth_line, azimuth_pixel, azimuth_noise = None, None, None
else:
# noiseRange and noiseAzimuth fields began in March 2018
range_line, range_pixel, range_noise = extract_vector('noiseRange')
azimuth_line, azimuth_pixel, azimuth_noise = extract_vector('noiseAzimuth')
azimuth_line = numpy.concatenate(azimuth_line, axis=0)
# NB: range_line is actually a list of 1 element arrays - probably should have been parsed better
range_line = numpy.concatenate(range_line, axis=0)
for ind, sic in enumerate(sicds):
populate_noise(sic, ind)
@staticmethod
def _derive(sicds: Union[SICDType, List[SICDType]]) -> None:
if isinstance(sicds, SICDType):
sicds.derive()
sicds.populate_rniirs(override=False)
else:
for sicd in sicds:
sicd.derive()
sicd.populate_rniirs(override=False)
def get_sicd_collection(self) -> List[Tuple[str, Union[SICDType, List[SICDType]]]]:
"""
Get the data file location(s) and corresponding sicd collection for each file.
Returns
-------
List[Tuple[str, SICDType|List[SICDType]]]
list of the form `(file, sicds)`. Here `file` is the data filename (tiff).
`sicds` is either a single `SICDType` (STRIPMAP collect),
or a list of `SICDType` (TOPSAR with multiple bursts).
"""
out = []
for entry in self._get_file_sets():
# get the sicd collection for each product
sicds = self._parse_product_sicd(entry['product'])
# refine our sicds(s) using the calibration data (if sensible)
self._refine_using_calibration(entry['calibration'], sicds)
# refine our sicd(s) using the noise data (if sensible)
self._refine_using_noise(entry['noise'], sicds)
# populate our derived fields for the sicds
self._derive(sicds)
out.append((entry['data'], sicds))
return out
class SentinelReader(SICDTypeReader):
"""
A Sentinel-1 SLC file package reader implementation.
**Changed in version 1.3.0** for reading changes.
"""
__slots__ = ('_sentinel_details', '_parent_segments')
def __init__(self, sentinel_details: Union[str, SentinelDetails]):
"""
Parameters
----------
sentinel_details : str|SentinelDetails
"""
if isinstance(sentinel_details, str):
sentinel_details = SentinelDetails(sentinel_details)
if not isinstance(sentinel_details, SentinelDetails):
raise TypeError('Input argument for SentinelReader must be a file name or SentinelReader object.')
self._sentinel_details = sentinel_details # type: SentinelDetails
reverse_axes = None
transpose_axes = (1, 0, 2) # True for all Sentinel-1 data
parent_segments = []
segments = []
sicd_collection = self._sentinel_details.get_sicd_collection()
sicd_collection_out = []
for data_file, sicds in sicd_collection:
tiff_details = TiffDetails(data_file)
if isinstance(sicds, SICDType):
segments.append(
NativeTiffDataSegment(
tiff_details, reverse_axes=reverse_axes, transpose_axes=transpose_axes))
sicd_collection_out.append(sicds)
elif len(sicds) == 1:
segments.append(
NativeTiffDataSegment(
tiff_details, reverse_axes=reverse_axes, transpose_axes=transpose_axes))
sicd_collection_out.append(sicds[0])
else:
p_segment = NativeTiffDataSegment(
tiff_details, reverse_axes=reverse_axes, transpose_axes=transpose_axes)
parent_segments.append(p_segment)
begin_col = 0
for sicd in sicds:
end_col = begin_col + sicd.ImageData.NumCols
subet_def = (slice(0, tiff_details.tags['ImageWidth'], 1), slice(begin_col, end_col, 1))
segments.append(SubsetSegment(p_segment, subet_def, 'formatted', close_parent=False))
begin_col = end_col
sicd_collection_out.append(sicd)
self._parent_segments = parent_segments # type: List[NativeTiffDataSegment]
SICDTypeReader.__init__(self, segments, sicd_collection_out, close_segments=True)
self._check_sizes()
@property
def sentinel_details(self) -> SentinelDetails:
"""
SentinelDetails: The sentinel details object.
"""
return self._sentinel_details
@property
def file_name(self) -> str:
return self.sentinel_details.directory_name
def close(self):
BaseReader.close(self)
if hasattr(self, '_parent_segments') and self._parent_segments is not None:
# noinspection PyBroadException
try:
while len(self._parent_segments) > 0:
segment = self._parent_segments.pop()
segment.close()
except Exception:
pass
self._parent_segments = None
########
# base expected functionality for a module with an implemented Reader
def is_a(file_name: str) -> Optional[SentinelReader]:
"""
Tests whether a given file_name corresponds to a Sentinel file. Returns a reader instance, if so.
Parameters
----------
file_name : str
the file_name to check
Returns
-------
SentinelReader|None
`SentinelReader` instance if Sentinel-1 file, `None` otherwise
"""
if is_file_like(file_name):
return None
try:
sentinel_details = SentinelDetails(file_name)
logger.info('Path {} is determined to be or contain a Sentinel-1 manifest.safe file.'.format(file_name))
return SentinelReader(sentinel_details)
except (SarpyIOError, AttributeError, SyntaxError, ElementTree.ParseError):
return None
| 56,354 | 48.915855 | 120 | py |
sarpy | sarpy-master/sarpy/io/complex/sio.py | """
Functionality for reading SIO data into a SICD model.
"""
__classification__ = "UNCLASSIFIED"
__author__ = ("Thomas McCullough", "Wade Schwartzkopf")
import os
import sys
import struct
import logging
import re
from typing import Union, Dict, Tuple, Optional, BinaryIO
import numpy
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.sicd_elements.blocks import RowColType
from sarpy.io.complex.sicd_elements.SICD import SICDType
from sarpy.io.complex.sicd_elements.ImageData import ImageDataType, FullImageType
from sarpy.io.complex.sicd import AmpLookupFunction
from sarpy.io.general.base import BaseWriter, SarpyIOError
from sarpy.io.general.data_segment import NumpyArraySegment, NumpyMemmapSegment
from sarpy.io.general.format_function import ComplexFormatFunction
from sarpy.io.general.utils import is_file_like, is_real_file
from sarpy.io.xml.base import parse_xml_from_string
logger = logging.getLogger(__name__)
_unsupported_pix_size = 'Got unsupported sio data type/pixel size = `{}`'
###########
# parser and interpreter for hdf5 attributes
class SIODetails(object):
__slots__ = (
'_file_name', '_magic_number', '_head', '_user_data', '_data_offset',
'_caspr_data', '_reverse_axes', '_transpose_axes', '_sicd')
# NB: there are really just two types of SIO file (with user_data and without),
# with endian-ness layered on top
ENDIAN = {
0xFF017FFE: '>', 0xFE7F01FF: '<', # no user data
0xFF027FFD: '>', 0xFD7F02FF: '<'} # with user data
def __init__(self, file_name: str):
self._file_name = file_name
self._user_data = None
self._data_offset = 20
self._caspr_data = None
self._reverse_axes = None
self._transpose_axes = None
self._sicd = None
if not os.path.isfile(file_name):
raise SarpyIOError('Path {} is not a file'.format(file_name))
with open(file_name, 'rb') as fi:
if sys.byteorder == 'little':
self._magic_number = struct.unpack('>I', fi.read(4))[0]
else:
self._magic_number = struct.unpack('<I', fi.read(4))[0]
endian = self.ENDIAN.get(self._magic_number, None)
if endian is None:
raise SarpyIOError(
'File {} is not an SIO file. Got magic number {}'.format(file_name, self._magic_number))
# reader basic header - (rows, columns, data_type, pixel_size)?
init_head = numpy.array(struct.unpack('{}4I'.format(endian), fi.read(16)), dtype=numpy.uint64)
if not (numpy.all(init_head[2:] == numpy.array([13, 8]))
or numpy.all(init_head[2:] == numpy.array([12, 4]))
or numpy.all(init_head[2:] == numpy.array([11, 2]))):
raise SarpyIOError(_unsupported_pix_size.format(init_head[2:]))
self._head = init_head
@property
def file_name(self) -> str:
return self._file_name
@property
def data_offset(self): # type: () -> int
return self._data_offset
@property
def raw_data_size(self) -> Optional[Tuple[int, ...]]:
if self._head is None:
return None
rows, cols = self._head[:2]
return int(rows), int(cols), 2
@property
def formatted_data_size(self) -> Union[None, Tuple[int, ...]]:
if self._head is None:
return None
rows, cols = self._head[:2]
if self._transpose_axes is not None:
return int(cols), int(rows)
else:
return int(rows), int(cols)
@property
def raw_data_type(self): # type: () -> Union[None, str]
# head[2] = (2X = vector, 1X = complex/scalar, 0X = real/scalar), where
# X = (1 = unsigned int, 2 = signed int, 3 = float, (4=double? I would guess)
# head[3] = pixel size in bytes (2*bit depth for complex, or band*bit depth for vector)
pixel_size = self._head[3]
endian = self.ENDIAN[self._magic_number]
# we require (for sicd) that either head[2:] is [13, 8], [12, 4], [11, 2]
if pixel_size == 8:
return '{}f4'.format(endian)
elif pixel_size == 4:
return '{}i2'.format(endian)
elif pixel_size == 2:
return '{}u1'.format(endian)
else:
raise ValueError(_unsupported_pix_size.format(self._head[2:]))
@property
def pixel_type(self) -> str:
if self._head[2] == 13 and self._head[3] == 8:
return 'RE32F_IM32F'
elif self._head[2] == 12 and self._head[3] == 4:
return 'RE16I_IM16I'
elif self._head[2] == 11 and self._head[3] == 2:
return 'AMP8I_PHS8I'
else:
raise ValueError(_unsupported_pix_size.format(self._head[2:]))
def get_symmetry(self) -> Tuple[Optional[Tuple[int, ...]], Optional[Tuple[int, ...]]]:
return self._reverse_axes, self._transpose_axes
def _read_user_data(self):
if self._user_data is not None:
return
if self._magic_number in (0xFF017FFE, 0xFE7F01FF): # no user data
self._user_data = {}
else:
def read_user_data():
out = {}
user_dat_len = 0
if self._magic_number in (0xFF017FFE, 0xFE7F01FF): # no user data
return out, user_dat_len
num_data_pairs = struct.unpack('{}I'.format(endian), fi.read(4))[0]
for i in range(num_data_pairs):
name_length = struct.unpack('{}I'.format(endian), fi.read(4))[0]
name = struct.unpack('{}{}s'.format(endian, name_length), fi.read(name_length))[0].decode('utf-8')
value_length = struct.unpack('{}I'.format(endian), fi.read(4))[0]
value = struct.unpack('{}{}s'.format(endian, value_length), fi.read(value_length))[0]
try:
value = value.decode('utf-8')
except UnicodeDecodeError:
# leave value as bytes - it may just be some other type
pass
out[name] = value
user_dat_len += 4 + name_length + 4 + value_length
return out, user_dat_len
endian = self.ENDIAN[self._magic_number]
with open(self._file_name, 'rb') as fi:
fi.seek(20, os.SEEK_SET) # skip the basic header
# read the user data (some type of header), if necessary
user_data, user_data_length = read_user_data()
self._user_data = user_data
self._data_offset = 20 + user_data_length
# validate file size
exp_file_size = self._data_offset + self._head[0]*self._head[1]*self._head[3]
act_file_size = os.path.getsize(self._file_name)
if exp_file_size != act_file_size:
logger.warning(
'File {} appears to be an SIO file.\n\t'
'The file size calculated from the header ({})\n\t'
'does not match the actual file size ({})'.format(
self._file_name, exp_file_size, act_file_size))
def _find_caspr_data(self) -> None:
def find_caspr():
dir_name, fil_name = os.path.split(self._file_name)
file_stem = os.path.splitext(fil_name)
for fil in [os.path.join(dir_name, '{}.hydra'.format(file_stem)),
os.path.join(dir_name, '{}.hdr'.format(file_stem)),
os.path.join(dir_name, '..', 'RPHDHeader.out'),
os.path.join(dir_name, '..', '_RPHDHeader.out')]:
if os.path.exists(fil) and os.path.isfile(fil):
# generally redundant, except for broken link?
return fil
return None
casp_fil = find_caspr()
if casp_fil is None:
return
out = {}
with open(casp_fil, 'r') as fi:
lines = fi.read().splitlines(keepends=False)
# this is generally just copied from the previous version - maybe refactor eventually
current_subfield = ''
reading_subfield = False
for line in lines:
if len(line.strip()) == 0:
continue # skip blank lines
if line.startswith(';;;'): # I guess this is the subfield delimiter?
reading_subfield = ~reading_subfield # change state
if reading_subfield:
current_subfield = ''
else:
out[current_subfield] = {} # prepare the workspace
else:
quoted_token = re.match('"(?P<quoted>[^"]+)"', line)
if quoted_token: # Some values with spaces are surrounded by quotes
tokens = [quoted_token.group('quoted'),
line[quoted_token.end('quoted') + 1:].strip()]
else: # No quoted values were found
# If not using quotes, split with whitespace
tokens = line.split(None, 1)
if (len(tokens) > 1) and tokens[1] != '':
if reading_subfield: # Subsection heading
current_subfield = current_subfield + tokens[1]
elif not current_subfield == '': # Actual field value
try:
out[current_subfield][tokens[1]] = float(tokens[0])
except ValueError: # Value is string not numeric
out[current_subfield][tokens[1]] = tokens[0]
self._caspr_data = out
# set symmetry
im_params = out.get('Image Parameters', None)
if im_params is None:
return
illum_dir = im_params.get('image illumination direction [top, left, bottom, right]', None)
if illum_dir is None:
return
elif illum_dir == 'left':
self._reverse_axes = (0, )
self._transpose_axes = (1, 0, 2)
elif illum_dir != 'top':
raise ValueError('unhandled illumination direction {}'.format(illum_dir))
def get_sicd(self) -> SICDType:
"""
Extract the SICD details.
Returns
-------
SICDType
"""
if self._sicd is not None:
return self._sicd
if self._user_data is None:
self._read_user_data()
if self._caspr_data is None:
self._find_caspr_data()
# Check if the user data contains a sicd structure.
sicd_string = None
for nam in ['SICDMETA', 'SICD_META', 'SICD']:
if sicd_string is None:
sicd_string = self._user_data.get(nam, None)
# If so, assume that this SICD is valid and simply present it
if sicd_string is not None:
root_node, xml_ns = parse_xml_from_string(sicd_string)
self._sicd = SICDType.from_node(root_node, xml_ns)
self._sicd.derive()
else:
# otherwise, we populate a really minimal sicd structure
num_rows, num_cols = self.formatted_data_size
self._sicd = SICDType(ImageData=ImageDataType(NumRows=num_rows,
NumCols=num_cols,
FirstRow=0,
FirstCol=0,
PixelType=self.pixel_type,
FullImage=FullImageType(NumRows=num_rows,
NumCols=num_cols),
SCPPixel=RowColType(Row=num_rows/2,
Col=num_cols/2)))
return self._sicd
#######
# The actual reading implementation
class SIOReader(SICDTypeReader):
"""
**Changed in version 1.3.0** for reading changes.
"""
__slots__ = ('_sio_details', )
def __init__(self, sio_details):
"""
Parameters
----------
sio_details : str|SIODetails
filename or SIODetails object
"""
if isinstance(sio_details, str):
sio_details = SIODetails(sio_details)
if not isinstance(sio_details, SIODetails):
raise TypeError('The input argument for SIOReader must be a filename or '
'SIODetails object.')
self._sio_details = sio_details
sicd_meta = sio_details.get_sicd()
if sicd_meta.ImageData.PixelType == 'AMP8I_PHS8I':
format_function = AmpLookupFunction(sio_details.raw_data_type, sicd_meta.ImageData.AmpTable)
else:
format_function = ComplexFormatFunction(
sio_details.raw_data_type, order='IQ', band_dimension=-1)
reverse_axes, transpose_axes = sio_details.get_symmetry()
data_segment = NumpyMemmapSegment(
sio_details.file_name, sio_details.data_offset,
sio_details.raw_data_type, sio_details.raw_data_size,
'complex64', sio_details.formatted_data_size,
reverse_axes=reverse_axes, transpose_axes=transpose_axes,
format_function=format_function, mode='r', close_file=True)
SICDTypeReader.__init__(self, data_segment, sicd_meta, close_segments=True)
self._check_sizes()
@property
def sio_details(self) -> SIODetails:
"""
SIODetails: The sio details object.
"""
return self._sio_details
@property
def file_name(self) -> str:
return self.sio_details.file_name
########
# base expected functionality for a module with an implemented Reader
def is_a(file_name: str) -> Optional[SIOReader]:
"""
Tests whether a given file_name corresponds to a SIO file. Returns a reader instance, if so.
Parameters
----------
file_name : str
the file_name to check
Returns
-------
SIOReader|None
`SIOReader` instance if SIO file, `None` otherwise
"""
if is_file_like(file_name):
return None
try:
sio_details = SIODetails(file_name)
logger.info('File {} is determined to be a SIO file.'.format(file_name))
return SIOReader(sio_details)
except SarpyIOError:
return None
#######
# The actual writing implementation
class SIOWriter(BaseWriter):
"""
**Changed in version 1.3.0** for writing changes.
"""
_slots__ = (
'_file_name', '_file_object', '_in_memory',
'_data_offset', '_data_written')
def __init__(
self,
file_object: Union[str, BinaryIO],
sicd_meta: SICDType,
user_data: Optional[Dict[str, str]] = None,
check_older_version: bool = False,
check_existence: bool = True):
"""
Parameters
----------
file_object : str|BinaryIO
sicd_meta : SICDType
user_data : None|Dict[str, str]
check_older_version : bool
Try to use an older version (1.1) of the SICD standard, for possible
application compliance issues?
check_existence : bool
Should we check if the given file already exists, and raises an exception if so?
"""
self._data_written = True
if isinstance(file_object, str):
if check_existence and os.path.exists(file_object):
raise SarpyIOError(
'Given file {} already exists,\n\t'
'and a new SIO file cannot be created here.'.format(file_object))
file_object = open(file_object, 'wb')
if not is_file_like(file_object):
raise ValueError('file_object requires a file path or BinaryIO object')
self._file_object = file_object
if is_real_file(file_object):
self._file_name = file_object.name
self._in_memory = False
else:
self._file_name = None
self._in_memory = True
# choose magic number (with user data) and corresponding endian-ness
magic_number = 0xFD7F02FF
endian = SIODetails.ENDIAN[magic_number]
# define basic image details
raw_shape = (sicd_meta.ImageData.NumRows, sicd_meta.ImageData.NumCols, 2)
pixel_type = sicd_meta.ImageData.PixelType
if pixel_type == 'RE32F_IM32F':
raw_dtype = numpy.dtype('{}f4'.format(endian))
element_type = 13
element_size = 8
format_function = ComplexFormatFunction(raw_dtype, order='MP', band_dimension=2)
elif pixel_type == 'RE16I_IM16I':
raw_dtype = numpy.dtype('{}i2'.format(endian))
element_type = 12
element_size = 4
format_function = ComplexFormatFunction(raw_dtype, order='MP', band_dimension=2)
else:
raw_dtype = numpy.dtype('{}u1'.format(endian))
element_type = 11
element_size = 2
format_function = AmpLookupFunction(raw_dtype, sicd_meta.ImageData.AmpTable)
# construct the sio header
header = numpy.array(
[magic_number, raw_shape[0], raw_shape[1], element_type, element_size],
dtype='>u4')
# construct the user data - must be {str : str}
if user_data is None:
user_data = {}
uh_args = sicd_meta.get_des_details(check_older_version)
user_data['SICDMETA'] = sicd_meta.to_xml_string(tag='SICD', urn=uh_args['DESSHTN'])
# write the initial things to the buffer
self._file_object.seek(0, os.SEEK_SET)
self._file_object.write(struct.pack('{}5I'.format(endian), *header))
# write the user data - name size, name, value size, value
for name in user_data:
name_bytes = name.encode('utf-8')
self._file_object.write(struct.pack('{}I'.format(endian), len(name_bytes)))
self._file_object.write(struct.pack('{}{}s'.format(endian, len(name_bytes)), name_bytes))
val_bytes = user_data[name].encode('utf-8')
self._file_object.write(struct.pack('{}I'.format(endian), len(val_bytes)))
self._file_object.write(struct.pack('{}{}s'.format(endian, len(val_bytes)), val_bytes))
self._data_offset = self._file_object.tell()
# initialize the single data segment
if self._in_memory:
underlying_array = numpy.full(raw_shape, fill_value=0, dtype=raw_dtype)
data_segment = NumpyArraySegment(
underlying_array, 'complex64', raw_shape[:2], format_function=format_function, mode='w')
self._data_written = False
else:
data_segment = NumpyMemmapSegment(
self._file_object, self._data_offset, raw_dtype, raw_shape,
'complex64', raw_shape[:2], format_function=format_function, mode='w', close_file=False)
self._data_written = True
BaseWriter.__init__(self, data_segment)
@property
def file_name(self) -> Optional[str]:
"""
None|str: The file name, if feasible.
"""
return self._file_name
def flush(self, force: bool = False) -> None:
BaseWriter.flush(self, force=force)
if self._data_written:
return
if force or self.data_segment[0].check_fully_written(warn=force):
self._file_object.seek(self._data_offset, os.SEEK_SET)
self._file_object.write(self.data_segment[0].get_raw_bytes(warn=False))
def close(self) -> None:
"""
Completes any necessary final steps.
"""
if not hasattr(self, '_closed') or self._closed:
return
self.flush(force=True)
BaseWriter.close(self)
self._file_object = None
| 20,035 | 38.753968 | 118 | py |
sarpy | sarpy-master/sarpy/io/complex/converter.py | """
This module provide utilities for converting from any complex format that we can
read to SICD or SIO format. The same conversion utility can be used to subset data.
"""
__classification__ = "UNCLASSIFIED"
__author__ = ("Wade Schwartzkopf", "Thomas McCullough", "Valkyrie Systems Corporation")
import os
import logging
from typing import Union, List, Tuple, Callable, BinaryIO
import numpy
from sarpy.geometry.geocoords import ecf_to_geodetic
from sarpy.geometry.point_projection import image_to_ground_dem
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.sicd import SICDWriter
from sarpy.io.complex.sio import SIOWriter
from sarpy.io.DEM.geotiff1deg import GeoTIFF1DegInterpolator
from sarpy.io.general.base import SarpyIOError
from sarpy.io.general.base import check_for_openers
from sarpy.io.general.nitf import NITFReader
from sarpy.io.general.utils import is_file_like
logger = logging.getLogger(__name__)
###########
# Module variables
_writer_types = {'SICD': SICDWriter, 'SIO': SIOWriter}
_openers = []
_parsed_openers = False
def register_opener(open_func: Callable) -> None:
"""
Provide a new opener.
Parameters
----------
open_func : callable
This is required to be a function which takes a single argument (file name).
This function should return a sarpy.io.complex.base.SICDTypeReader instance
if the referenced file is viable for the underlying type, and None otherwise.
Returns
-------
None
"""
if not callable(open_func):
raise TypeError('open_func must be a callable')
if open_func not in _openers:
_openers.append(open_func)
def parse_openers() -> None:
"""
Automatically find the viable openers (i.e. :func:`is_a`) in the various modules.
"""
global _parsed_openers
if _parsed_openers:
return
_parsed_openers = True
check_for_openers('sarpy.io.complex', register_opener)
def _define_final_attempt_openers() -> List[Callable]:
"""
Gets the prioritized list of openers to attempt after regular openers.
Returns
-------
List[Callable]
"""
from sarpy.io.complex.other_nitf import final_attempt
return [final_attempt, ]
def open_complex(file_name: Union[str, BinaryIO]) -> SICDTypeReader:
"""
Given a file, try to find and return the appropriate reader object.
Parameters
----------
file_name : str|BinaryIO
Returns
-------
SICDTypeReader
Raises
------
SarpyIOError
"""
if (not is_file_like(file_name)) and (not os.path.exists(file_name)):
raise SarpyIOError('File {} does not exist.'.format(file_name))
# parse openers, if not already done
parse_openers()
# see if we can find a reader though trial and error
for opener in _openers:
reader = opener(file_name)
if reader is not None:
return reader
# check the final attempt openers
for opener in _define_final_attempt_openers():
reader = opener(file_name)
if reader is not None:
return reader
# If for loop completes, no matching file format was found.
raise SarpyIOError('Unable to determine complex image format.')
class Converter(object):
"""
This is a class for conversion (of a single frame) of one complex format to
SICD or SIO format. Another use case is to create a (contiguous) subset of a
given complex dataset. **This class is intended to be used as a context manager.**
"""
__slots__ = ('_reader', '_file_name', '_writer', '_frame', '_row_limits', '_col_limits')
def __init__(self, reader, output_directory, output_file=None, frame=None, row_limits=None, col_limits=None,
output_format='SICD', check_older_version=False, check_existence=True):
"""
Parameters
----------
reader : SICDTypeReader
The base reader instance.
output_directory : str
The output directory. **This must exist.**
output_file : None|str
The output file name. If not provided, then `sicd.get_suggested_name(frame)`
will be used.
frame : None|int
The frame (i.e. index into the reader's sicd collection) to convert.
The default is 0.
row_limits : None|Tuple[int, int]
Row start/stop. Default is all.
col_limits : None|Tuple[int, int]
Column start/stop. Default is all.
output_format : str
The output file format to write, from {'SICD', 'SIO'}. Default is SICD.
check_older_version : bool
Try to use a less recent version of SICD (1.1), for possible application compliance issues?
check_existence : bool
Should we check if the given file already exists, and raises an exception if so?
"""
if isinstance(reader, SICDTypeReader):
self._reader = reader
else:
raise ValueError(
'reader is expected to be a Reader instance. Got {}'.format(type(reader)))
if not (os.path.exists(output_directory) and os.path.isdir(output_directory)):
raise SarpyIOError('output directory {} must exist.'.format(output_directory))
if output_file is None:
output_file = self._reader.get_sicds_as_tuple()[frame].get_suggested_name(frame+1)+'_SICD'
output_path = os.path.join(output_directory, output_file)
if check_existence and os.path.exists(output_path):
raise SarpyIOError('The file {} already exists.'.format(output_path))
# validate the output format and fetch the writer type
if output_format is None:
output_format = 'SICD'
output_format = output_format.upper()
if output_format not in ['SICD', 'SIO']:
raise ValueError('Got unexpected output_format {}'.format(output_format))
writer_type = _writer_types[output_format]
# fetch the appropriate sicd instance
sicds = self._reader.get_sicds_as_tuple()
if frame is None:
self._frame = 0
else:
self._frame = int(frame)
if not (0 <= self._frame < len(sicds)):
raise ValueError(
'Got a frame {}, but it must be between 0 and {}'.format(frame, len(sicds)))
this_sicd = sicds[self._frame]
# validate row and column limits and update sicd structure, as necessary
this_sicd, self._row_limits, self._col_limits = this_sicd.create_subset_structure(row_limits, col_limits)
# set up our writer
self._file_name = output_path
self._writer = writer_type(
output_path, this_sicd, check_older_version=check_older_version, check_existence=check_existence)
def _get_rows_per_block(self, max_block_size):
pixel_type = self._writer.sicd_meta.ImageData.PixelType
cols = int(self._writer.sicd_meta.ImageData.NumCols)
bytes_per_row = 8*cols
if pixel_type == 'RE32F_IM32F':
bytes_per_row = 8*cols
elif pixel_type == 'RE16I_IM16I':
bytes_per_row = 4*cols
elif pixel_type == 'AMP8I_PHS8I':
bytes_per_row = 2*cols
return max(1, int(round(max_block_size/bytes_per_row)))
@property
def writer(self): # type: () -> Union[SICDWriter, SIOWriter]
"""SICDWriter|SIOWriter: The writer instance."""
return self._writer
def write_data(self, max_block_size=None):
r"""
Assuming that the desired changes have been made to the writer instance
nitf header tags, write the data.
Parameters
----------
max_block_size : None|int
(nominal) maximum block size in bytes. Minimum value is :math:`2^{20} = 1~\text{MB}`.
Default value is :math:`2^{26} = 64~\text{MB}`.
Returns
-------
None
"""
# validate max_block_size
if max_block_size is None:
max_block_size = 2**26
else:
max_block_size = int(max_block_size)
if max_block_size < 2**20:
max_block_size = 2**20
# now, write the data
rows_per_block = self._get_rows_per_block(max_block_size)
block_start = self._row_limits[0]
while block_start < self._row_limits[1]:
block_end = min(block_start + rows_per_block, self._row_limits[1])
data = self._reader[block_start:block_end, self._col_limits[0]:self._col_limits[1], self._frame, 'nosqueeze']
self._writer.write_chip(data, start_indices=(block_start - self._row_limits[0], 0))
logger.info('Done writing block {}-{} to file {}'.format(block_start, block_end, self._file_name))
block_start = block_end
self._writer.close()
def __del__(self):
if hasattr(self, '_writer'):
self._writer.close()
def __enter__(self):
return self
def __exit__(self, exception_type, exception_value, traceback):
if exception_type is None:
self._writer.close()
else:
logger.error(
'The {} file converter generated an exception during processing.\n\t'
'The file {} may be only partially generated and corrupt.'.format(
self.__class__.__name__, self._file_name))
# The exception will be reraised.
# It's unclear how any exception could be caught.
def conversion_utility(
input_file, output_directory, output_files=None, frames=None, output_format='SICD',
row_limits=None, column_limits=None, max_block_size=None, check_older_version=False,
preserve_nitf_information=False, check_existence=True,
dem_filename_pattern=None, dem_type=None, geoid_file=None):
"""
Copy SAR complex data to a file of the specified format.
Parameters
----------
input_file : str|SICDTypeReader
Reader instance, or the name of file to convert.
output_directory : str
The output directory. **This must exist.**
output_files : None|str|List[str]
The name of the output file(s), or list of output files matching `frames`.
If not provided, then `sicd.get_suggested_name(frame)` will be used.
frames : None|int|list
Set of frames to convert. Default is all.
output_format : str
The output file format to write, from {'SICD', 'SIO'}, optional. Default is SICD.
row_limits : None|Tuple[int, int]|List[Tuple[int, int]]
Rows start/stop. Default is all.
column_limits : None|Tuple[int, int]|List[Tuple[int, int]]
Columns start/stop. Default is all.
max_block_size : None|int
(nominal) maximum block size in bytes. Passed through to the Converter class.
check_older_version : bool
Try to use a less recent version of SICD (1.1), for possible application compliance issues?
preserve_nitf_information : bool
Try to preserve NITF information? This only applies in the case that the file being read
is actually a NITF file.
check_existence : bool
Check for the existence of any possibly overwritten file?
dem_filename_pattern : str | None
Optional string specifying a Digital Elevation Model (DEM) filename pattern.
This is a format string that specifies a glob pattern that will
uniquely specify a DEM file from the Lat/Lon of the SW corner of
the DEM tile. See the utils/convert_to_sicd help text for more details.
dem_type : str | None
Optional DEM type ('GeoTIFF', 'GeoTIFF:WGS84', 'GeoTIFF:EGM2008', etc.).
This parameter is required when dem_filename_pattern is specified. For 'GeoTIFF'
DEM files, the reference surface can be either WGS84 or any of the geoid models.
The reference surface is appended to the DEM type with a ':' separator. If the
reference surface is not specified, then EGM2008 is assumed.
geoid_file : str | None
Optional Geoid file which might be needed when dem_filename_pattern is specified.
Returns
-------
None
"""
def validate_lims(lims, typ):
# type: (Union[None, tuple, list, numpy.ndarray], str) -> Tuple[Tuple[int, int], ...]
def validate_entry(st, ed, shap, i_fr):
if not ((0 <= st < shap[ind]) and (st < ed <= shap[ind])):
raise ValueError('{}_limits is {}, and frame {} has shape {}'.format(typ, lims, i_fr, shap))
ind = 0 if typ == 'row' else 1
if lims is None:
return tuple((0, shp[ind]) for shp in sizes)
else:
o_lims = numpy.array(lims, dtype=numpy.int64)
t_lims = []
if len(o_lims.shape) == 1:
if o_lims.shape[0] != 2:
raise ValueError(
'row{}_limits must be of the form (<start>, <end>), got {}'.format(typ, lims))
t_start = int(o_lims[0])
t_end = int(o_lims[1])
for i_frame, shp in zip(frames, sizes):
validate_entry(t_start, t_end, shp, i_frame)
t_lims.append((t_start, t_end))
else:
if o_lims.shape[0] != len(frames):
raise ValueError(
'{0:s}_limits must either be of the form (<start>, <end>)\n\t'
'applied to all frames, or a collection of such of the \n\t'
'same length as frames.\n\t'
'Got len({0:s}_limits) = {1:d} and len(frames) = {2:d}'.format(
typ, o_lims.shape[0], len(frames)))
for entry, i_frame, shp in zip(o_lims, frames, sizes):
t_start = int(entry[0])
t_end = int(entry[1])
validate_entry(t_start, t_end, shp, i_frame)
t_lims.append((t_start, t_end))
return tuple(t_lims)
if isinstance(input_file, str):
reader = open_complex(input_file)
elif isinstance(input_file, SICDTypeReader):
reader = input_file
else:
raise ValueError(
'input_file is expected to be a file name or Reader instance.\n\t'
'Got {}'.format(type(input_file)))
if preserve_nitf_information and isinstance(reader, NITFReader):
try:
# noinspection PyUnresolvedReferences
reader.populate_nitf_information_into_sicd()
except AttributeError:
logger.warning(
'Reader class `{}` is missing populate_nitf_information_into_sicd '
'method'.format(type(reader)))
if not (os.path.exists(output_directory) and os.path.isdir(output_directory)):
raise SarpyIOError('output directory {} must exist.'.format(output_directory))
sicds = reader.get_sicds_as_tuple()
sizes = reader.get_data_size_as_tuple()
if dem_filename_pattern is not None:
# Update the SICD metadata base on a projection of the SCP to a DEM.
if dem_type.upper().startswith('GEOTIFF'):
ref_surface = dem_type.upper().split(':')[-1] if ':' in dem_type else 'EGM2008'
dem_interpolator = GeoTIFF1DegInterpolator(dem_filename_pattern,
ref_surface=ref_surface,
geoid_path=geoid_file)
else:
raise NotImplementedError(f'DEM type ({dem_type}) is not implemented.')
for sicd in sicds:
# project SCP and corner points to DEM
scp = (sicd.ImageData.SCPPixel.Row, sicd.ImageData.SCPPixel.Col)
frfc = (sicd.ImageData.FirstRow, sicd.ImageData.FirstCol)
frlc = (sicd.ImageData.FirstRow, sicd.ImageData.FirstCol + sicd.ImageData.NumCols - 1)
lrfc = (sicd.ImageData.FirstRow + sicd.ImageData.NumRows - 1, sicd.ImageData.FirstCol)
lrlc = (sicd.ImageData.FirstRow + sicd.ImageData.NumRows - 1,
sicd.ImageData.FirstCol + sicd.ImageData.NumCols - 1)
img_points = [scp, frfc, frlc, lrfc, lrlc]
ecf_points = image_to_ground_dem(img_points, sicd,
block_size=None,
dem_interpolator=dem_interpolator,
pad_value=0.2,
vertical_step_size=1.0,
use_structure_coa=True,
)
llh_points = ecf_to_geodetic(ecf_points)
sicd.GeoData.SCP.LLH.Lat = llh_points[0][0]
sicd.GeoData.SCP.LLH.Lon = llh_points[0][1]
sicd.GeoData.SCP.LLH.HAE = llh_points[0][2]
sicd.GeoData.SCP.ECF.X = ecf_points[0][0]
sicd.GeoData.SCP.ECF.Y = ecf_points[0][1]
sicd.GeoData.SCP.ECF.Z = ecf_points[0][2]
for i in range(4):
index = int(sicd.GeoData.ImageCorners[i].index.split(':')[0]) # index = 1|2|3|4, SCP is index 0
sicd.GeoData.ImageCorners[i].Lat = llh_points[index][0] # FRFC is index 1, FRLC is index 2,
sicd.GeoData.ImageCorners[i].Lon = llh_points[index][1] # LRFC is index 3, LRLC is index 4
# check that frames is valid
if frames is None:
frames = tuple(range(len(sicds)))
if isinstance(frames, int):
frames = (frames, )
if not isinstance(frames, tuple):
frames = tuple(int(entry) for entry in frames)
if len(frames) == 0:
raise ValueError('The list of frames is empty.')
o_frames = []
for frame in frames:
index = int(frame)
if not (0 <= index < len(sicds)):
raise ValueError(
'Got a frames entry {}, but it must be between 0 and {}'.format(index, len(sicds)))
o_frames.append(index)
frames = tuple(o_frames)
# assign SUGGESTED_NAME to each sicd
for frame in frames:
sicd = sicds[frame]
suggested_name = sicd.get_suggested_name(frame+1)+'_SICD'
if suggested_name is None and sicd.CollectionInfo.CoreName is not None:
suggested_name = sicd.CollectionInfo.CoreName+'{}_SICD'.format(frame)
if suggested_name is None:
suggested_name = 'Unknown{}_SICD'.format(frame)
sicd.NITF['SUGGESTED_NAME'] = suggested_name
# construct output_files list
if output_files is None:
output_files = [sicds[frame].NITF['SUGGESTED_NAME']+'.nitf' for frame in frames]
elif isinstance(output_files, str):
if len(sicds) == 1:
output_files = [output_files, ]
else:
digits = int(numpy.ceil(numpy.log10(len(sicds))))
frm_str = '{0:s}-{1:0' + str(digits) + 'd}{2:s}'
fstem, fext = os.path.splitext(output_files)
o_files = []
for index in frames:
o_files.append(frm_str.format(fstem, index, fext))
output_files = tuple(o_files)
if len(output_files) != len(frames):
raise ValueError('The lengths of frames and output_files must match.')
if len(set(output_files)) != len(output_files):
raise ValueError(
'Entries in output_files (possibly constructed) must be unique,\n\t'
'got {} for frames {}'.format(output_files, frames))
# construct validated row/column_limits
row_limits = validate_lims(row_limits, 'row')
column_limits = validate_lims(column_limits, 'column')
for o_file, frame, row_lims, col_lims in zip(output_files, frames, row_limits, column_limits):
logger.info('Converting frame {} from file {} to file {}'.format(frame, input_file, o_file))
with Converter(
reader, output_directory, output_file=o_file, frame=frame,
row_limits=row_lims, col_limits=col_lims, output_format=output_format,
check_older_version=check_older_version,
check_existence=check_existence) as converter:
converter.write_data(max_block_size=max_block_size)
| 20,197 | 40.559671 | 121 | py |
sarpy | sarpy-master/sarpy/io/complex/__init__.py | """
This package contains the elements for interpreting complex radar data in a variety of formats.
For non-SICD files, the radar metadata will be converted to something compatible with the SICD
standard, to the extent feasible.
It also permits converting complex data from any form which can be read to a file or files in
SICD or SIO format.
"""
__classification__ = 'UNCLASSIFIED'
def open(*args, **kwargs):
from .converter import open_complex
return open_complex(*args, **kwargs)
def convert(*args, **kwargs):
from .converter import conversion_utility
return conversion_utility(*args, **kwargs)
| 620 | 28.571429 | 95 | py |
sarpy | sarpy-master/sarpy/io/complex/radarsat.py | """
Functionality for reading Radarsat (RS2 and RCM) data into a SICD model.
"""
__classification__ = "UNCLASSIFIED"
__author__ = ("Thomas McCullough", "Khanh Ho", "Wade Schwartzkopf", "Nathan Bombaci")
import logging
import re
import os
from datetime import datetime
from xml.etree import ElementTree
from typing import Tuple, List, Sequence, Union, Optional
import numpy
from scipy.interpolate import RectBivariateSpline
from numpy.polynomial import polynomial
from scipy.constants import speed_of_light
from sarpy.geometry.geocoords import geodetic_to_ecf
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.other_nitf import ComplexNITFReader
from sarpy.io.complex.sicd_elements.blocks import Poly1DType, Poly2DType
from sarpy.io.complex.sicd_elements.SICD import SICDType
from sarpy.io.complex.sicd_elements.CollectionInfo import CollectionInfoType, \
RadarModeType
from sarpy.io.complex.sicd_elements.ImageCreation import ImageCreationType
from sarpy.io.complex.sicd_elements.ImageData import ImageDataType
from sarpy.io.complex.sicd_elements.GeoData import GeoDataType, SCPType
from sarpy.io.complex.sicd_elements.Position import PositionType, XYZPolyType
from sarpy.io.complex.sicd_elements.Grid import GridType, DirParamType, WgtTypeType
from sarpy.io.complex.sicd_elements.RadarCollection import RadarCollectionType, \
WaveformParametersType, ChanParametersType, TxStepType
from sarpy.io.complex.sicd_elements.Timeline import TimelineType, IPPSetType
from sarpy.io.complex.sicd_elements.ImageFormation import ImageFormationType, RcvChanProcType
from sarpy.io.complex.sicd_elements.RMA import RMAType, INCAType
from sarpy.io.complex.sicd_elements.SCPCOA import SCPCOAType
from sarpy.io.complex.sicd_elements.Radiometric import RadiometricType, NoiseLevelType_
from sarpy.io.complex.utils import fit_time_coa_polynomial, fit_position_xvalidation
from sarpy.io.general.base import SarpyIOError
from sarpy.io.general.data_segment import DataSegment
from sarpy.io.general.tiff import NativeTiffDataSegment
from sarpy.io.general.utils import get_seconds, parse_timestring, is_file_like
logger = logging.getLogger(__name__)
_unhandled_generation_text = 'Unhandled generation `{}`'
############
# Helper functions
def _parse_xml(file_name: str, without_ns: bool = False) -> ElementTree.Element:
if without_ns:
with open(file_name, 'rb') as fi:
xml_string = fi.read()
# Remove the (first) default namespace definition (xmlns="http://some/namespace") and parse
return ElementTree.fromstring(re.sub(b'\\sxmlns="[^"]+"', b'', xml_string, count=1))
else:
return ElementTree.parse(file_name).getroot()
def _format_class_str(class_str: str) -> str:
if 'UNCLASS' in class_str:
return 'UNCLASSIFIED'
else:
return class_str
def _validate_segment_and_sicd(
the_sicd: SICDType,
data_segment: DataSegment,
name: str,
the_file: str):
"""
Check that chipper and sicd are compatible.
Parameters
----------
the_sicd : SICDType
data_segment : DataSegment
name : str
the_file : str
Returns
-------
None
"""
sicd_data_size = (the_sicd.ImageData.NumRows, the_sicd.ImageData.NumCols)
segment_data_size = data_segment.formatted_shape
if sicd_data_size != segment_data_size:
raise ValueError(
'The {} data segment construction for file {}\n'
'got incompatible sicd size `{}` and segment size `{}`'.format(
name, the_file, sicd_data_size, segment_data_size))
def _construct_tiff_segment(
the_sicd: SICDType,
the_file: str,
reverse_axes: Union[None, int, Sequence[int]] = None,
transpose_axes: Union[None, Tuple[int, ...]] = None):
"""
Parameters
----------
the_sicd : SICDType
the_file : str
reverse_axes : None|Tuple[int, ...]
transpose_axes : None|Tuple[int, ...]
Returns
-------
NativeTiffDataSegment
"""
segment = NativeTiffDataSegment(the_file, reverse_axes=reverse_axes, transpose_axes=transpose_axes)
_validate_segment_and_sicd(the_sicd, segment, 'tiff', the_file)
return segment
def _construct_single_nitf_segment(
the_sicd: SICDType,
the_file: str,
reverse_axes: Optional[Sequence[int]],
transpose_axes: Optional[Tuple[int, ...]]) -> Tuple[ComplexNITFReader, DataSegment]:
"""
Parameters
----------
the_sicd : SICDType
the_file : str
reverse_axes : None|Sequence[int]
transpose_axes : None|Tuple[int, ...]
Returns
-------
reader: ComplexNITFReader
data_segment: DataSegment
"""
if transpose_axes is not None:
transpose_axes = transpose_axes[:2]
reader = ComplexNITFReader(the_file, reverse_axes=reverse_axes, transpose_axes=transpose_axes)
data_segment = reader.data_segment
if not isinstance(data_segment, DataSegment):
raise ValueError(
'The SLC data for a single polarmetric band was provided '
'in a NITF file which has more than a single complex band.')
_validate_segment_and_sicd(the_sicd, data_segment, 'Single NITF', the_file)
return reader, data_segment
def _construct_multiple_nitf_segment(
the_sicds: List[SICDType],
the_file: str,
reverse_axes: Optional[Sequence[int]],
transpose_axes: Optional[Tuple[int, ...]]) -> Tuple[ComplexNITFReader, Tuple[DataSegment, ...]]:
"""
Parameters
----------
the_sicds : List[SICDType]
the_file : str
reverse_axes : None|Sequence[int]
transpose_axes : None|Tuple[int, ...]
Returns
-------
reader: ComplexNITFReader
data_segment: Tuple[DataSegment, ...]
"""
if transpose_axes is not None:
transpose_axes = transpose_axes[:2]
reader = ComplexNITFReader(the_file, reverse_axes=reverse_axes, transpose_axes=transpose_axes)
data_segment = reader.get_data_segment_as_tuple()
if len(data_segment) != len(the_sicds):
raise ValueError(
'The SLC data for {} polarmetric bands was provided '
'in a NITF file which has {} single complex band.'.format(len(the_sicds), len(data_segment)))
for i, (the_sicd, segment) in enumerate(zip(the_sicds, data_segment)):
_validate_segment_and_sicd(the_sicd, segment, 'NITF band {}'.format(i), the_file)
return reader, data_segment
##############
# Class for meta-data interpretation
class RadarSatDetails(object):
"""
Class for interpreting RadarSat-2 and RadarSat Constellation Mission (RCM)
metadata files, and creating the corresponding sicd structure(s).
"""
__slots__ = (
'_file_name', '_directory_name', '_satellite', '_root_node', '_beams', '_bursts',
'_num_lines_processed', '_polarizations',
'_x_spline', '_y_spline', '_z_spline',
'_state_time', '_state_position', '_state_velocity')
def __init__(self, file_name: str):
"""
Parameters
----------
file_name : str
"""
self._beams = None
self._bursts = None
self._num_lines_processed = None
self._polarizations = None
self._x_spline = None
self._y_spline = None
self._z_spline = None
self._state_time = None
self._state_position = None
self._state_velocity = None
if os.path.isdir(file_name): # it is the directory - point it at the product.xml file
for t_file_name in [
os.path.join(file_name, 'product.xml'),
os.path.join(file_name, 'metadata', 'product.xml')]:
if os.path.exists(t_file_name):
file_name = t_file_name
break
if not os.path.isfile(file_name):
raise SarpyIOError('path {} does not exist or is not a file'.format(file_name))
if os.path.split(file_name)[1] != 'product.xml':
raise SarpyIOError(
'The radarsat or rcm file is expected to be named product.xml,\n\t'
'got path {}'.format(file_name))
self._file_name = file_name
root_node = _parse_xml(file_name, without_ns=True)
sat_node = root_node.find('./sourceAttributes/satellite')
satellite = 'None' if sat_node is None else sat_node.text.upper()
product_node = root_node.find(
'./imageGenerationParameters/generalProcessingInformation/productType')
product_type = 'None' if product_node is None else product_node.text.upper()
if not ((satellite == 'RADARSAT-2' or satellite.startswith('RCM')) and product_type == 'SLC'):
raise SarpyIOError(
'File {} does not appear to be an SLC product\n\t'
'for a RADARSAT-2 or RCM mission.'.format(file_name))
self._root_node = root_node
self._satellite = satellite
absolute_path = os.path.abspath(self._file_name)
parent_dir, _ = os.path.split(absolute_path)
if self.generation == 'RS2':
self._directory_name = parent_dir
else:
self._directory_name, _ = os.path.split(parent_dir)
self._build_location_spline()
self._parse_state_vectors()
self._extract_beams_and_bursts()
@property
def file_name(self) -> str:
"""
str: the file name
"""
return self._file_name
@property
def directory_name(self) -> str:
"""
str: the package directory name
"""
return self._directory_name
@property
def satellite(self) -> str:
"""
str: the satellite name
"""
return self._satellite
@property
def generation(self) -> str:
"""
str: RS2 or RCM
"""
if self._satellite == 'RADARSAT-2':
return 'RS2'
else:
return 'RCM'
@property
def pass_direction(self) -> str:
"""
str: The pass direction
"""
return self._find('./sourceAttributes/orbitAndAttitude/orbitInformation/passDirection').text
def get_symmetry(self) -> Tuple[Optional[Tuple[int, ...]], Optional[Tuple[int, ...]]]:
"""
Get the symmetry transform information.
Returns
-------
reverse_axes : None|Tuple[int, ...]
transpose_axes : None|Tuple[int, ...]
"""
look_dir = self._find('./sourceAttributes/radarParameters/antennaPointing').text.upper()[0]
if self.generation == 'RS2':
line_order = self._find('./imageAttributes/rasterAttributes/lineTimeOrdering').text.upper()
sample_order = self._find('./imageAttributes/rasterAttributes/pixelTimeOrdering').text.upper()
else:
line_order = self._find('./imageReferenceAttributes/rasterAttributes/lineTimeOrdering').text.upper()
sample_order = self._find('./imageReferenceAttributes/rasterAttributes/pixelTimeOrdering').text.upper()
reverse_cols = not (
(line_order == 'DECREASING' and look_dir == 'L') or
(line_order != 'DECREASING' and look_dir != 'L'))
reverse_axes = []
if reverse_cols:
reverse_axes.append(0)
if sample_order == 'DECREASING':
reverse_axes.append(1)
reverse_axes = tuple(reverse_axes) if len(reverse_axes) > 0 else None
transpose_axes = (1, 0, 2)
return reverse_axes, transpose_axes
def _find(self, tag: str) -> ElementTree.Element:
return self._root_node.find(tag)
def _findall(self, tag: str) -> List[ElementTree.Element]:
return self._root_node.findall(tag)
def _get_tiepoint_nodes(self) -> List[ElementTree.Element]:
"""
Fetch the tie point nodes.
Returns
-------
List[ElementTree.Element]
"""
return self._findall('./imageAttributes/geographicInformation/geolocationGrid/imageTiePoint')
def _build_location_spline(self) -> None:
"""
Populates the three (line, sample) -> location coordinate splines. This
should be done once for all images.
Returns
-------
None
"""
if self.generation == 'RS2':
tie_points = self._findall('./imageAttributes/geographicInformation/geolocationGrid/imageTiePoint')
elif self.generation == 'RCM':
tie_points = self._findall('./imageReferenceAttributes/geographicInformation/geolocationGrid/imageTiePoint')
else:
raise ValueError('unexpected generation {}'.format(self.generation))
lines = []
samples = []
llh_coords = numpy.zeros((len(tie_points), 3), dtype='float64')
grid_row, grid_col = None, None
for i, entry in enumerate(tie_points):
img_coords = entry.find('./imageCoordinate')
geo_coords = entry.find('./geodeticCoordinate')
# parse lat/lon/hae
llh_coords[i, :] = [
float(geo_coords.find('./latitude').text),
float(geo_coords.find('./longitude').text),
float(geo_coords.find('./height').text)]
# parse line/sample
line = float(img_coords.find('./line').text)
sample = float(img_coords.find('./pixel').text)
# verify grid structure
if i == 0:
lines.append(line)
samples.append(sample)
grid_row = 0
grid_col = 0
continue
if sample == samples[0]:
# we are starting a new grid column
grid_row += 1
grid_col = 0
lines.append(line)
else:
grid_col += 1
if grid_row == 0:
samples.append(sample)
# verify that the grid assumption is preserved
if grid_col >= len(samples) or grid_row >= len(lines) or \
line != lines[grid_row] or sample != samples[grid_col]:
logger.error(
'Failed parsing grid at\n\t'
'grid_col = {}\n\t'
'samples = {}\n\t'
'grid_row = {}\n\t'
'lines = {}\n\t'
'line={}, sample={}'.format(grid_col, samples, grid_row, lines, line, sample))
raise ValueError('The grid assumption is invalid at imageTiePoint entry {}'.format(i))
lines = numpy.array(lines, dtype='float64')
samples = numpy.array(samples, dtype='float64')
ecf_coords = geodetic_to_ecf(llh_coords)
self._x_spline = RectBivariateSpline(
lines, samples, numpy.reshape(ecf_coords[:, 0], (lines.size, samples.size)), kx=3, ky=3, s=0)
self._y_spline = RectBivariateSpline(
lines, samples, numpy.reshape(ecf_coords[:, 1], (lines.size, samples.size)), kx=3, ky=3, s=0)
self._z_spline = RectBivariateSpline(
lines, samples, numpy.reshape(ecf_coords[:, 2], (lines.size, samples.size)), kx=3, ky=3, s=0)
def _get_image_location(self, line: Union[int, float], sample: Union[int, float]) -> numpy.ndarray:
"""
Fetch the image location estimate based on the previously constructed splines.
Parameters
----------
line : int|float
The RadarSat line number.
sample : int|float
The RadarSat sample number.
Returns
-------
numpy.ndarray
"""
return numpy.array(
[float(self._x_spline.ev(line, sample)),
float(self._y_spline.ev(line, sample)),
float(self._z_spline.ev(line, sample))], dtype='float64')
def _parse_state_vectors(self) -> None:
"""
Parses the state vectors.
Returns
-------
None
"""
state_vectors = self._findall(
'./sourceAttributes/orbitAndAttitude/orbitInformation/stateVector')
self._state_time = numpy.zeros((len(state_vectors), ), dtype='datetime64[us]')
self._state_position = numpy.zeros((len(state_vectors), 3), dtype='float64')
self._state_velocity = numpy.zeros((len(state_vectors), 3), dtype='float64')
for i, state_vec in enumerate(state_vectors):
self._state_time[i] = parse_timestring(
state_vec.find('timeStamp').text, precision='us')
self._state_position[i, :] = [
float(state_vec.find('xPosition').text),
float(state_vec.find('yPosition').text),
float(state_vec.find('zPosition').text)]
self._state_velocity[i, :] = [
float(state_vec.find('xVelocity').text),
float(state_vec.find('yVelocity').text),
float(state_vec.find('zVelocity').text)]
def _extract_beams_and_bursts(self) -> None:
"""
Extract the beam and burst and polarization information.
Returns
-------
None
"""
radar_params = self._find('./sourceAttributes/radarParameters')
self._beams = radar_params.find('./beams').text.strip().split()
self._polarizations = radar_params.find('./polarizations').text.strip().split()
if self.generation == 'RCM':
image_attributes = self._findall('./sceneAttributes/imageAttributes')
if 'burst' in image_attributes[0].attrib:
self._bursts = [(entry.attrib['beam'], entry.attrib['burst']) for entry in image_attributes]
num_lines_processed = 0
self._bursts = []
for entry in image_attributes:
self._bursts.append((entry.attrib['beam'], entry.attrib['burst']))
nlines = int(entry.find('./numLines').text)
line_offset = int(entry.find('./lineOffset').text)
num_lines_processed = max(num_lines_processed, nlines+line_offset)
self._num_lines_processed = num_lines_processed
def _get_sicd_radar_mode(self) -> RadarModeType:
"""
Gets the RadarMode information.
Returns
-------
RadarModeType
"""
mode_id = self._find('./sourceAttributes/beamModeMnemonic').text
beam_mode = self._find('./sourceAttributes/beamMode')
acq_type = self._find('./sourceAttributes/radarParameters/acquisitionType')
if (beam_mode is not None and beam_mode.text.upper().startswith("SPOTLIGHT")) \
or (acq_type is not None and acq_type.text.upper().startswith("SPOTLIGHT")) \
or 'SL' in mode_id:
mode_type = 'SPOTLIGHT'
elif mode_id.startswith('SC'):
# ScanSAR modes
mode_type = 'SPOTLIGHT'
else:
mode_type = 'STRIPMAP'
return RadarModeType(ModeID=mode_id, ModeType=mode_type)
def _get_sicd_collection_info(self, start_time: numpy.datetime64) -> Tuple[dict, CollectionInfoType]:
"""
Gets the sicd CollectionInfo information.
Parameters
----------
start_time : numpy.datetime64
Returns
-------
nitf : dict
The NITF element dictionary
collection_info : CollectionInfoType
"""
try:
import sarpy.io.complex.radarsat_addin as radarsat_addin
except ImportError:
radarsat_addin = None
collector_name = self.satellite
start_time_dt = start_time.astype(datetime)
date_str = start_time_dt.strftime('%d%b%y').upper()
nitf = {}
if self.generation == 'RS2':
classification = 'UNCLASSIFIED'
core_name = '{}{}{}'.format(date_str, self.generation, self._find('./sourceAttributes/imageId').text)
elif self.generation == 'RCM':
class_str = self._find('./securityAttributes/securityClassification').text.upper()
classification = _format_class_str(class_str) if radarsat_addin is None else \
radarsat_addin.extract_radarsat_sec(nitf, class_str)
core_name = '{}{}{}'.format(date_str, collector_name.replace('-', ''), start_time_dt.strftime('%H%M%S'))
else:
raise ValueError(_unhandled_generation_text.format(self.generation))
return nitf, CollectionInfoType(
Classification=classification,
CollectorName=collector_name,
CoreName=core_name,
RadarMode=self._get_sicd_radar_mode(),
CollectType='MONOSTATIC')
def _get_sicd_image_creation(self) -> ImageCreationType:
"""
Gets the ImageCreation metadata.
Returns
-------
ImageCreationType
"""
from sarpy.__about__ import __version__
processing_info = self._find('./imageGenerationParameters/generalProcessingInformation')
return ImageCreationType(
Application=processing_info.find('softwareVersion').text,
DateTime=processing_info.find('processingTime').text,
Site=processing_info.find('processingFacility').text,
Profile='sarpy {}'.format(__version__))
def _get_sicd_position(self, start_time: numpy.datetime64) -> PositionType:
"""
Gets the SICD Position definition, based on the given start time.
Parameters
----------
start_time : numpy.datetime64
Returns
-------
PositionType
"""
# convert to relative time for polynomial fitting
T = numpy.array([get_seconds(entry, start_time, precision='us') for entry in self._state_time], dtype='float64')
P_x, P_y, P_z = fit_position_xvalidation(T, self._state_position, self._state_velocity, max_degree=8)
return PositionType(ARPPoly=XYZPolyType(X=P_x, Y=P_y, Z=P_z))
@staticmethod
def _parse_polarization(str_in: str) -> Tuple[str, str]:
"""
Parses the Radarsat polarization string into it's two SICD components.
Parameters
----------
str_in : str
Returns
-------
(str, str)
"""
if len(str_in) != 2:
raise ValueError('Got input string of unexpected length {}'.format(str_in))
tx_pol = 'RHC' if str_in[0] == 'C' else str_in[0]
rcv_pol = 'RHC' if str_in[1] == 'C' else str_in[1] # probably only H/V
return tx_pol, rcv_pol
def _get_sicd_polarizations(self) -> Tuple[List[str], List[str]]:
tx_pols = []
tx_rcv_pols = []
for entry in self._polarizations:
tx_pol, rcv_pol = self._parse_polarization(entry)
if tx_pol not in tx_pols:
tx_pols.append(tx_pol)
tx_rcv_pols.append('{}:{}'.format(tx_pol, rcv_pol))
return tx_pols, tx_rcv_pols
def _get_side_of_track(self) -> str:
"""
Gets the sicd side of track.
Returns
-------
str
"""
return self._find('./sourceAttributes/radarParameters/antennaPointing').text[0].upper()
def _get_regular_sicd(self) -> Tuple[List[SICDType], List[str]]:
"""
Gets the SICD collection. This will return one SICD per polarimetric
collection. It will also return the data file(s). This is only applicable
for non-ScanSAR collects.
Returns
-------
sicds: List[SICDType]
files: List[str]
"""
def get_image_and_geo_data() -> Tuple[ImageDataType, GeoDataType]:
if self.generation == 'RS2':
pixel_type = 'RE16I_IM16I'
cols = int(self._find('./imageAttributes/rasterAttributes/numberOfLines').text)
rows = int(self._find('./imageAttributes/rasterAttributes/numberOfSamplesPerLine').text)
elif self.generation == 'RCM':
cols = int(self._find('./sceneAttributes/imageAttributes/numLines').text)
rows = int(self._find('./sceneAttributes/imageAttributes/samplesPerLine').text)
bits_per_sample = self._find('./imageReferenceAttributes/rasterAttributes/bitsPerSample').text
if bits_per_sample == '32':
pixel_type = 'RE32F_IM32F'
elif bits_per_sample == '16':
pixel_type = 'RE16I_IM16I'
else:
raise ValueError('Got unhandled bites per sample {}'.format(bits_per_sample))
else:
raise ValueError(_unhandled_generation_text.format(self.generation))
scp_rows = int(0.5*rows)
scp_cols = int(0.5*cols)
scp_ecf = self._get_image_location(scp_cols, scp_rows)
im_data = ImageDataType(
NumRows=rows, NumCols=cols, FirstRow=0, FirstCol=0, PixelType=pixel_type,
FullImage=(rows, cols), SCPPixel=(scp_rows, scp_cols))
t_geo_data = GeoDataType(SCP=SCPType(ECF=scp_ecf))
return im_data, t_geo_data
def get_grid_row() -> DirParamType:
if self.generation == 'RS2':
row_ss = float(self._find('./imageAttributes/rasterAttributes/sampledPixelSpacing').text)
row_irbw = 2*float(self._find('./imageGenerationParameters'
'/sarProcessingInformation'
'/totalProcessedRangeBandwidth').text)/speed_of_light
elif self.generation == 'RCM':
row_ss = float(self._find('./imageReferenceAttributes/rasterAttributes/sampledPixelSpacing').text)
row_irbw = 2*float(self._find('./sourceAttributes'
'/radarParameters'
'/pulseBandwidth').text)/speed_of_light
else:
raise ValueError(_unhandled_generation_text.format(self.generation))
row_wgt_type = WgtTypeType(
WindowName=self._find('./imageGenerationParameters'
'/sarProcessingInformation'
'/rangeWindow/windowName').text.upper())
if row_wgt_type.WindowName == 'KAISER':
row_wgt_type.Parameters = {
'BETA': self._find('./imageGenerationParameters'
'/sarProcessingInformation'
'/rangeWindow/windowCoefficient').text}
return DirParamType(
SS=row_ss, ImpRespBW=row_irbw, Sgn=-1, KCtr=2*center_frequency/speed_of_light,
DeltaKCOAPoly=Poly2DType(Coefs=((0,),)), WgtType=row_wgt_type)
def get_grid_col() -> DirParamType:
az_win = self._find('./imageGenerationParameters/sarProcessingInformation/azimuthWindow')
col_wgt_type = WgtTypeType(WindowName=az_win.find('./windowName').text.upper())
if col_wgt_type.WindowName == 'KAISER':
col_wgt_type.Parameters = {'BETA': az_win.find('./windowCoefficient').text}
return DirParamType(Sgn=-1, KCtr=0, WgtType=col_wgt_type)
def get_radar_collection() -> RadarCollectionType:
radar_params = self._find('./sourceAttributes/radarParameters')
# Ultrafine and spotlight modes have t pulses, otherwise just one.
bandwidth_elements = sorted(radar_params.findall('pulseBandwidth'), key=lambda x: x.get('pulse'))
pulse_length_elements = sorted(radar_params.findall('pulseLength'), key=lambda x: x.get('pulse'))
adc_elements = sorted(radar_params.findall('adcSamplingRate'), key=lambda x: x.get('pulse'))
samples_per_echo = float(radar_params.find('samplesPerEchoLine').text)
wf_params = []
bandwidths = numpy.empty((len(bandwidth_elements),), dtype=numpy.float64)
for j, (bwe, ple, adce) in enumerate(zip(bandwidth_elements, pulse_length_elements, adc_elements)):
bandwidths[j] = float(bwe.text)
samp_rate = float(adce.text)
wf_params.append(WaveformParametersType(index=j,
TxRFBandwidth=float(bwe.text),
TxPulseLength=float(ple.text),
ADCSampleRate=samp_rate,
RcvWindowLength=samples_per_echo / samp_rate,
RcvDemodType='CHIRP',
RcvFMRate=0))
tot_bw = numpy.sum(bandwidths)
tx_freq = (center_frequency-0.5*tot_bw, center_frequency+0.5*tot_bw)
t_radar_collection = RadarCollectionType(TxFrequency=tx_freq, Waveform=wf_params)
t_radar_collection.Waveform[0].TxFreqStart = tx_freq[0]
for j in range(1, len(bandwidth_elements)):
t_radar_collection.Waveform[j].TxFreqStart = t_radar_collection.Waveform[j - 1].TxFreqStart + \
t_radar_collection.Waveform[j - 1].TxRFBandwidth
t_radar_collection.RcvChannels = [
ChanParametersType(TxRcvPolarization=entry, index=j+1) for j, entry in enumerate(tx_rcv_pols)]
if len(tx_pols) == 1:
t_radar_collection.TxPolarization = tx_pols[0]
else:
t_radar_collection.TxPolarization = 'SEQUENCE'
t_radar_collection.TxSequence = [
TxStepType(TxPolarization=entry, index=j+1) for j, entry in enumerate(tx_pols)]
return t_radar_collection
def get_timeline() -> TimelineType:
pulse_parts = len(self._findall('./sourceAttributes/radarParameters/pulseBandwidth'))
if self.generation == 'RS2':
pulse_rep_freq = float(
self._find('./sourceAttributes/radarParameters/pulseRepetitionFrequency').text)
elif self.generation == 'RCM':
pulse_rep_freq = float(
self._find('./sourceAttributes/radarParameters/prfInformation/pulseRepetitionFrequency').text)
else:
raise ValueError(_unhandled_generation_text.format(self.generation))
pulse_rep_freq *= pulse_parts
if pulse_parts == 2 and collection_info.RadarMode.ModeType == 'STRIPMAP':
# it's not completely clear why we need an additional factor of 2 for strip map
pulse_rep_freq *= 2
lines_processed = [
float(entry.text) for entry in
self._findall('./imageGenerationParameters/sarProcessingInformation/numberOfLinesProcessed')]
duration = None
ipp = None
# there should be one entry of num_lines_processed for each transmit/receive polarization
# and they should all be the same. Omit if this is not the case.
if (len(lines_processed) == len(tx_rcv_pols)) and all(x == lines_processed[0] for x in lines_processed):
num_lines_processed = lines_processed[0] * len(tx_pols)
duration = num_lines_processed / pulse_rep_freq
ipp = IPPSetType(
index=0, TStart=0, TEnd=duration, IPPStart=0, IPPEnd=int(num_lines_processed),
IPPPoly=Poly1DType(Coefs=(0, pulse_rep_freq)))
return TimelineType(
CollectStart=collect_start, CollectDuration=duration, IPP=[ipp, ])
def get_image_formation() -> ImageFormationType:
pulse_parts = len(self._findall('./sourceAttributes/radarParameters/pulseBandwidth'))
return ImageFormationType(
# PRFScaleFactor for either polarimetric or multi-step, but not both.
RcvChanProc=RcvChanProcType(
NumChanProc=1, PRFScaleFactor=1./max(pulse_parts, len(tx_pols))),
ImageFormAlgo='RMA',
TStartProc=timeline.IPP[0].TStart,
TEndProc=timeline.IPP[0].TEnd,
TxFrequencyProc=(
radar_collection.TxFrequency.Min, radar_collection.TxFrequency.Max),
STBeamComp='GLOBAL',
ImageBeamComp='SV',
AzAutofocus='NO',
RgAutofocus='NO')
def get_rma_adjust_grid() -> RMAType:
# fetch all the things needed below
# generation agnostic
doppler_bandwidth = float(
self._find('./imageGenerationParameters'
'/sarProcessingInformation'
'/totalProcessedAzimuthBandwidth').text)
zero_dop_last_line = \
parse_timestring(
self._find('./imageGenerationParameters'
'/sarProcessingInformation'
'/zeroDopplerTimeLastLine').text,
precision='us')
zero_dop_first_line = parse_timestring(
self._find('./imageGenerationParameters'
'/sarProcessingInformation'
'/zeroDopplerTimeFirstLine').text,
precision='us')
if self.generation == 'RS2':
near_range = float(
self._find('./imageGenerationParameters'
'/sarProcessingInformation'
'/slantRangeNearEdge').text)
doppler_rate_node = self._find('./imageGenerationParameters'
'/dopplerRateValues')
doppler_rate_coeffs = numpy.array(
[float(entry) for entry in doppler_rate_node.find('./dopplerRateValuesCoefficients').text.split()],
dtype='float64')
doppler_centroid_node = self._find('./imageGenerationParameters'
'/dopplerCentroid')
elif self.generation == 'RCM':
near_range = float(
self._find('./sceneAttributes/imageAttributes/slantRangeNearEdge').text)
doppler_rate_node = self._find('./dopplerRate'
'/dopplerRateEstimate')
doppler_rate_coeffs = numpy.array(
[float(entry) for entry in doppler_rate_node.find('./dopplerRateCoefficients').text.split()],
dtype='float64')
doppler_centroid_node = self._find('./dopplerCentroid'
'/dopplerCentroidEstimate')
else:
raise ValueError(_unhandled_generation_text.format(self.generation))
doppler_rate_ref_time = float(doppler_rate_node.find('./dopplerRateReferenceTime').text)
doppler_cent_coeffs = numpy.array(
[float(entry) for entry in
doppler_centroid_node.find('./dopplerCentroidCoefficients').text.split()],
dtype='float64')
doppler_cent_ref_time = float(
doppler_centroid_node.find('./dopplerCentroidReferenceTime').text)
doppler_cent_time_est = parse_timestring(
doppler_centroid_node.find('./timeOfDopplerCentroidEstimate').text, precision='us')
look = scpcoa.look
if look > 0:
# SideOfTrack == 'L'
# we explicitly want negative time order
if zero_dop_first_line < zero_dop_last_line:
zero_dop_first_line, zero_dop_last_line = zero_dop_last_line, zero_dop_first_line
else:
# we explicitly want positive time order
if zero_dop_first_line > zero_dop_last_line:
zero_dop_first_line, zero_dop_last_line = zero_dop_last_line, zero_dop_first_line
col_spacing_zd = get_seconds(zero_dop_last_line, zero_dop_first_line, precision='us') / \
(image_data.NumCols - 1)
# zero doppler time of SCP relative to collect start
time_scp_zd = get_seconds(zero_dop_first_line, collect_start, precision='us') + \
image_data.SCPPixel.Col * col_spacing_zd
inca = INCAType(
R_CA_SCP=near_range + (image_data.SCPPixel.Row * grid.Row.SS),
FreqZero=center_frequency)
# doppler rate calculations
velocity = position.ARPPoly.derivative_eval(time_scp_zd, 1)
vel_ca_squared = numpy.sum(velocity * velocity)
# polynomial representing range as a function of range distance from SCP
r_ca = numpy.array([inca.R_CA_SCP, 1], dtype=numpy.float64)
# the doppler_rate_coeffs represents a polynomial in time, relative to
# doppler_rate_ref_time.
# to construct the doppler centroid polynomial, we need to change scales
# to a polynomial in space, relative to SCP.
doppler_rate_poly = Poly1DType(Coefs=doppler_rate_coeffs)
alpha = 2.0 / speed_of_light
t_0 = doppler_rate_ref_time - alpha * inca.R_CA_SCP
dop_rate_scaled_coeffs = doppler_rate_poly.shift(t_0, alpha, return_poly=False)
# DRateSFPoly is then a scaled multiple of this scaled poly and r_ca above
coeffs = -numpy.convolve(dop_rate_scaled_coeffs, r_ca) / (alpha * center_frequency * vel_ca_squared)
inca.DRateSFPoly = Poly2DType(Coefs=numpy.reshape(coeffs, (coeffs.size, 1)))
# modify a few of the other fields
ss_scale = abs(numpy.sqrt(vel_ca_squared)*inca.DRateSFPoly[0, 0])
grid.Col.SS = abs(col_spacing_zd*ss_scale)
grid.Col.ImpRespBW = doppler_bandwidth/ss_scale
inca.TimeCAPoly = Poly1DType(Coefs=[time_scp_zd, -look/ss_scale])
# doppler centroid
doppler_cent_poly = Poly1DType(Coefs=doppler_cent_coeffs)
alpha = 2.0 / speed_of_light
t_0 = doppler_cent_ref_time - alpha * inca.R_CA_SCP
scaled_coeffs = doppler_cent_poly.shift(t_0, alpha, return_poly=False)
# adjust doppler centroid for spotlight, we need to add a second
# dimension to DopCentroidPoly
if collection_info.RadarMode.ModeType == 'SPOTLIGHT':
doppler_cent_est = get_seconds(doppler_cent_time_est, collect_start, precision='us')
dop_poly = numpy.zeros((scaled_coeffs.shape[0], 2), dtype=numpy.float64)
dop_poly[0, 1] = -look*center_frequency*alpha*numpy.sqrt(vel_ca_squared)/inca.R_CA_SCP
dop_poly[1, 1] = -center_frequency*alpha*numpy.sqrt(vel_ca_squared)/(inca.R_CA_SCP ** 2)
one_way_time = inca.R_CA_SCP / speed_of_light
pos = position.ARPPoly(doppler_cent_est + one_way_time)
vel = position.ARPPoly.derivative_eval(doppler_cent_est + one_way_time)
los = geo_data.SCP.ECF.get_array() - pos
vel_hat = vel / numpy.linalg.norm(vel)
dop_poly[:, 0] += -look*(dop_poly[:, 1]*numpy.dot(los, vel_hat))
inca.DopCentroidPoly = Poly2DType(Coefs=dop_poly)
else:
inca.DopCentroidPoly = Poly2DType(Coefs=numpy.reshape(scaled_coeffs, (scaled_coeffs.size, 1)))
grid.Col.DeltaKCOAPoly = Poly2DType(Coefs=inca.DopCentroidPoly.get_array()*col_spacing_zd/grid.Col.SS)
# compute grid.Col.DeltaK1/K2 from DeltaKCOAPoly
coeffs = grid.Col.DeltaKCOAPoly.get_array()[:, 0]
# get roots
roots = polynomial.polyroots(coeffs)
# construct range bounds (in meters)
range_bounds = (numpy.array([0, image_data.NumRows - 1], dtype=numpy.float64)
- image_data.SCPPixel.Row) * grid.Row.SS
possible_ranges = numpy.copy(range_bounds)
useful_roots = ((roots > numpy.min(range_bounds)) & (roots < numpy.max(range_bounds)))
if numpy.any(useful_roots):
possible_ranges = numpy.concatenate((possible_ranges, roots[useful_roots]), axis=0)
azimuth_bounds = (numpy.array([0, (image_data.NumCols - 1)], dtype=numpy.float64)
- image_data.SCPPixel.Col) * grid.Col.SS
coords_az_2d, coords_rg_2d = numpy.meshgrid(azimuth_bounds, possible_ranges)
possible_bounds_deltak = grid.Col.DeltaKCOAPoly(coords_rg_2d, coords_az_2d)
grid.Col.DeltaK1 = numpy.min(possible_bounds_deltak) - 0.5 * grid.Col.ImpRespBW
grid.Col.DeltaK2 = numpy.max(possible_bounds_deltak) + 0.5 * grid.Col.ImpRespBW
# Wrapped spectrum
if (grid.Col.DeltaK1 < -0.5 / grid.Col.SS) or (grid.Col.DeltaK2 > 0.5 / grid.Col.SS):
grid.Col.DeltaK1 = -0.5 / abs(grid.Col.SS)
grid.Col.DeltaK2 = -grid.Col.DeltaK1
time_coa_poly = fit_time_coa_polynomial(inca, image_data, grid, dop_rate_scaled_coeffs, poly_order=2)
if collection_info.RadarMode.ModeType == 'SPOTLIGHT':
# using above was convenience, but not really sensible in spotlight mode
grid.TimeCOAPoly = Poly2DType(Coefs=[[time_coa_poly.Coefs[0, 0], ], ])
inca.DopCentroidPoly = None
elif collection_info.RadarMode.ModeType == 'STRIPMAP':
# fit TimeCOAPoly for grid
grid.TimeCOAPoly = time_coa_poly
inca.DopCentroidCOA = True
else:
raise ValueError('unhandled ModeType {}'.format(collection_info.RadarMode.ModeType))
return RMAType(RMAlgoType='OMEGA_K', INCA=inca)
def get_radiometric() -> Optional[RadiometricType]:
def perform_radiometric_fit(component_file: str) -> numpy.ndarray:
comp_struct = _parse_xml(component_file, without_ns=(self.generation != 'RS2'))
comp_values = numpy.array(
[float(entry) for entry in comp_struct.find('./gains').text.split()], dtype='float64')
comp_values = 1. / (comp_values * comp_values) # adjust for sicd convention
if numpy.all(comp_values == comp_values[0]):
return numpy.array([[comp_values[0], ], ], dtype=numpy.float64)
else:
# fit a 1-d polynomial in range
if self.generation == 'RS2':
coords_rg = \
(numpy.arange(image_data.NumRows) - image_data.SCPPixel.Row + image_data.FirstRow)\
* grid.Row.SS
elif self.generation == 'RCM': # the rows are sub-sampled
start = int(comp_struct.find('./pixelFirstLutValue').text)
num_vs = int(comp_struct.find('./numberOfValues').text)
t_step = int(comp_struct.find('./stepSize').text)
rng_indices = start + numpy.arange(num_vs)*t_step
coords_rg = (rng_indices - image_data.SCPPixel.Row + image_data.FirstRow) * grid.Row.SS
else:
raise ValueError('Unhandled generation {}'.format(self.generation))
return numpy.reshape(polynomial.polyfit(coords_rg, comp_values, 3), (-1, 1))
base_path = os.path.dirname(self.file_name)
if self.generation == 'RS2':
beta_file = os.path.join(
base_path,
self._find(
'./imageAttributes/lookupTable[@incidenceAngleCorrection="Beta Nought"]').text)
sigma_file = os.path.join(
base_path,
self._find(
'./imageAttributes/lookupTable[@incidenceAngleCorrection="Sigma Nought"]').text)
gamma_file = os.path.join(
base_path,
self._find('./imageAttributes/lookupTable[@incidenceAngleCorrection="Gamma"]').text)
elif self.generation == 'RCM':
beta_file = os.path.join(
base_path, 'calibration',
self._find(
'./imageReferenceAttributes/lookupTableFileName[@sarCalibrationType="Beta Nought"]').text)
sigma_file = os.path.join(
base_path, 'calibration',
self._find(
'./imageReferenceAttributes/lookupTableFileName[@sarCalibrationType="Sigma Nought"]').text)
gamma_file = os.path.join(
base_path, 'calibration',
self._find(
'./imageReferenceAttributes/lookupTableFileName[@sarCalibrationType="Gamma"]').text)
else:
raise ValueError(_unhandled_generation_text.format(self.generation))
if not os.path.isfile(beta_file):
logger.error(
msg="Beta calibration information should be located in file {}, "
"which doesn't exist.".format(beta_file))
return None
# perform beta, sigma, gamma fit
beta_zero_sf_poly = perform_radiometric_fit(beta_file)
sigma_zero_sf_poly = perform_radiometric_fit(sigma_file)
gamma_zero_sf_poly = perform_radiometric_fit(gamma_file)
# construct noise poly
noise_level = None
if self.generation == 'RS2':
# noise is in the main product.xml
beta0_element = self._find('./sourceAttributes/radarParameters'
'/referenceNoiseLevel[@incidenceAngleCorrection="Beta Nought"]')
elif self.generation == 'RCM':
noise_file = os.path.join(
base_path, 'calibration',
self._find('./imageReferenceAttributes/noiseLevelFileName').text)
noise_root = _parse_xml(noise_file, without_ns=True)
noise_levels = noise_root.findall('./referenceNoiseLevel')
beta0s = [entry for entry in noise_levels if entry.find('sarCalibrationType').text.startswith('Beta')]
beta0_element = beta0s[0] if len(beta0s) > 0 else None
else:
raise ValueError(_unhandled_generation_text.format(self.generation))
if beta0_element is not None:
pfv = float(beta0_element.find('pixelFirstNoiseValue').text)
step = float(beta0_element.find('stepSize').text)
beta0s = numpy.array(
[float(x) for x in beta0_element.find('noiseLevelValues').text.split()])
range_coords = grid.Row.SS * (numpy.arange(len(beta0s)) * step + pfv - image_data.SCPPixel.Row)
noise_poly = polynomial.polyfit(
range_coords,
beta0s - 10 * numpy.log10(polynomial.polyval(range_coords, beta_zero_sf_poly[:, 0])), 2)
noise_level = NoiseLevelType_(
NoiseLevelType='ABSOLUTE',
NoisePoly=Poly2DType(Coefs=numpy.reshape(noise_poly, (-1, 1))))
return RadiometricType(BetaZeroSFPoly=beta_zero_sf_poly,
SigmaZeroSFPoly=sigma_zero_sf_poly,
GammaZeroSFPoly=gamma_zero_sf_poly,
NoiseLevel=noise_level)
def correct_scp() -> None:
scp_pixel = base_sicd.ImageData.SCPPixel.get_array()
scp_ecf = base_sicd.project_image_to_ground(scp_pixel, projection_type='HAE')
base_sicd.update_scp(scp_ecf, coord_system='ECF')
def get_data_file_names() -> List[str]:
base_path = os.path.dirname(self.file_name)
image_files = []
if self.generation == 'RS2':
for pol in self._polarizations:
fname = self._find('./imageAttributes/fullResolutionImageData[@pole="{}"]'.format(pol))
if fname is None:
raise ValueError('Got unexpected image file structure.')
image_files.append(os.path.join(base_path, fname.text))
else:
img_attribute_node = self._find('./sceneAttributes/imageAttributes')
results = img_attribute_node.findall('./ipdf')
if len(results) == 1:
# there's either a single polarization, or it's one of the NITF files
image_files.append(os.path.join(base_path, results[0].text))
else:
for pol in self._polarizations:
fname = None
for entry in results:
if entry.attrib.get('pole', None) == pol:
fname = entry.text
if fname is None:
raise ValueError('Got unexpected image file structure.')
image_files.append(os.path.join(base_path, fname))
return image_files
center_frequency = float(self._find('./sourceAttributes/radarParameters/radarCenterFrequency').text)
collect_start = parse_timestring(self._find('./sourceAttributes/rawDataStartTime').text)
tx_pols, tx_rcv_pols = self._get_sicd_polarizations()
nitf, collection_info = self._get_sicd_collection_info(collect_start)
image_creation = self._get_sicd_image_creation()
position = self._get_sicd_position(collect_start)
image_data, geo_data = get_image_and_geo_data()
grid = GridType(ImagePlane='SLANT', Type='RGZERO', Row=get_grid_row(), Col=get_grid_col())
radar_collection = get_radar_collection()
timeline = get_timeline()
image_formation = get_image_formation()
scpcoa = SCPCOAType(SideOfTrack=self._get_side_of_track())
rma = get_rma_adjust_grid()
radiometric = get_radiometric()
base_sicd = SICDType(
CollectionInfo=collection_info,
ImageCreation=image_creation,
GeoData=geo_data,
ImageData=image_data,
Position=position,
Grid=grid,
RadarCollection=radar_collection,
Timeline=timeline,
ImageFormation=image_formation,
SCPCOA=scpcoa,
RMA=rma,
Radiometric=radiometric,
_NITF=nitf)
correct_scp()
base_sicd.derive() # derive all the fields
base_sicd.populate_rniirs(override=False)
# now, make one copy per polarimetric entry, as appropriate
the_files = get_data_file_names()
the_sicds = []
for i, (original_pol, sicd_pol) in enumerate(zip(self._polarizations, tx_rcv_pols)):
this_sicd = base_sicd.copy()
this_sicd.ImageFormation.RcvChanProc.ChanIndices = [i+1, ]
this_sicd.ImageFormation.TxRcvPolarizationProc = sicd_pol
the_sicds.append(this_sicd)
return the_sicds, the_files
def _get_scansar_sicd(self, beam: str, burst: str) -> Tuple[List[SICDType], List[str]]:
"""
Gets the SICD collection for the given burst. This is only applicable
to ScanSAR collects. This will return one SICD per polarimetric collection.
It will also return the data file(s) for the given beam/burst.
Parameters
----------
beam : str
burst : str
Returns
-------
sicds: List[SICDType]
files: List[str]
"""
def get_image_and_geo_data() -> Tuple[ImageDataType, GeoDataType, int]:
img_attributes = self._find('./sceneAttributes/imageAttributes[@burst="{}"]'.format(burst))
sample_offset = int(img_attributes.find('./pixelOffset').text)
line_offset = int(img_attributes.find('./lineOffset').text)
cols = int(img_attributes.find('./numLines').text)
rows = int(img_attributes.find('./samplesPerLine').text)
bits_per_sample = self._find('./imageReferenceAttributes/rasterAttributes/bitsPerSample').text
if bits_per_sample == '32':
pixel_type = 'RE32F_IM32F'
elif bits_per_sample == '16':
pixel_type = 'RE16I_IM16I'
else:
raise ValueError('Got unhandled bites per sample {}'.format(bits_per_sample))
scp_rows = int(0.5*rows)
scp_cols = int(0.5*cols)
scp_ecf = self._get_image_location(scp_cols+line_offset, scp_rows+sample_offset)
im_data = ImageDataType(
NumRows=rows, NumCols=cols, FirstRow=0, FirstCol=0, PixelType=pixel_type,
FullImage=(rows, cols), SCPPixel=(scp_rows, scp_cols))
t_geo_data = GeoDataType(SCP=SCPType(ECF=scp_ecf))
return im_data, t_geo_data, sample_offset
def get_grid_row() -> DirParamType:
row_ss = float(self._find('./imageReferenceAttributes/rasterAttributes/sampledPixelSpacing').text)
row_irbw = 2*float(self._find('./sourceAttributes'
'/radarParameters'
'/pulseBandwidth[@beam="{}"]'.format(beam)).text)/speed_of_light
row_wgt_type = WgtTypeType(
WindowName=self._find('./imageGenerationParameters'
'/sarProcessingInformation'
'/rangeWindow'
'/windowName').text.upper())
if row_wgt_type.WindowName == 'KAISER':
row_wgt_type.Parameters = {
'BETA': self._find('./imageGenerationParameters'
'/sarProcessingInformation'
'/rangeWindow'
'/windowCoefficient').text}
return DirParamType(
SS=row_ss, ImpRespBW=row_irbw, Sgn=-1, KCtr=2*center_frequency/speed_of_light,
DeltaKCOAPoly=Poly2DType(Coefs=((0,),)), WgtType=row_wgt_type)
def get_grid_col() -> DirParamType:
az_win = self._find(
'./imageGenerationParameters/sarProcessingInformation/azimuthWindow[@beam="{}"]'.format(beam))
col_wgt_type = WgtTypeType(WindowName=az_win.find('./windowName').text.upper())
if col_wgt_type.WindowName == 'KAISER':
col_wgt_type.Parameters = {'BETA': az_win.find('./windowCoefficient').text}
return DirParamType(Sgn=-1, KCtr=0, WgtType=col_wgt_type)
def get_radar_collection() -> RadarCollectionType:
radar_params = self._find('./sourceAttributes/radarParameters')
# Ultrafine and spotlight modes have t pulses, otherwise just one.
bandwidth_elements = sorted(
radar_params.findall('pulseBandwidth[@beam="{}"]'.format(beam)), key=lambda x: x.get('pulse'))
pulse_length_elements = sorted(
radar_params.findall('pulseLength[@beam="{}"]'.format(beam)), key=lambda x: x.get('pulse'))
adc_elements = sorted(
radar_params.findall('adcSamplingRate[@beam="{}"]'.format(beam)), key=lambda x: x.get('pulse'))
samples_per_echo = float(
radar_params.find('samplesPerEchoLine[@beam="{}"]'.format(beam)).text)
wf_params = []
bandwidths = numpy.empty((len(bandwidth_elements),), dtype=numpy.float64)
for j, (bwe, ple, adce) in enumerate(zip(bandwidth_elements, pulse_length_elements, adc_elements)):
bandwidths[j] = float(bwe.text)
samp_rate = float(adce.text)
wf_params.append(WaveformParametersType(index=j,
TxRFBandwidth=float(bwe.text),
TxPulseLength=float(ple.text),
ADCSampleRate=samp_rate,
RcvWindowLength=samples_per_echo / samp_rate,
RcvDemodType='CHIRP',
RcvFMRate=0))
tot_bw = numpy.sum(bandwidths)
tx_freq = (center_frequency-0.5*tot_bw, center_frequency+0.5*tot_bw)
t_radar_collection = RadarCollectionType(TxFrequency=tx_freq, Waveform=wf_params)
t_radar_collection.Waveform[0].TxFreqStart = tx_freq[0]
for j in range(1, len(bandwidth_elements)):
t_radar_collection.Waveform[j].TxFreqStart = t_radar_collection.Waveform[j - 1].TxFreqStart + \
t_radar_collection.Waveform[j - 1].TxRFBandwidth
t_radar_collection.RcvChannels = [
ChanParametersType(TxRcvPolarization=entry, index=j+1) for j, entry in enumerate(tx_rcv_pols)]
if len(tx_pols) == 1:
t_radar_collection.TxPolarization = tx_pols[0]
else:
t_radar_collection.TxPolarization = 'SEQUENCE'
t_radar_collection.TxSequence = [
TxStepType(TxPolarization=entry, index=j+1) for j, entry in enumerate(tx_pols)]
return t_radar_collection
def get_timeline():
# type: () -> TimelineType
ipp = IPPSetType(
index=0, TStart=0, TEnd=processing_time_span,
IPPStart=0, IPPEnd=num_of_pulses - 1,
IPPPoly=Poly1DType(Coefs=(0, pulse_rep_freq)))
return TimelineType(
CollectStart=collect_start,
CollectDuration=processing_time_span,
IPP=[ipp, ])
def get_image_formation() -> ImageFormationType:
pulse_parts = len(
self._findall('./sourceAttributes/radarParameters/pulseBandwidth[@beam="{}"]'.format(beam)))
return ImageFormationType(
# PRFScaleFactor for either polarimetric or multi-step, but not both.
RcvChanProc=RcvChanProcType(
NumChanProc=1, PRFScaleFactor=1./max(pulse_parts, len(tx_pols))),
ImageFormAlgo='RMA',
TStartProc=timeline.IPP[0].TStart,
TEndProc=timeline.IPP[0].TEnd,
TxFrequencyProc=(
radar_collection.TxFrequency.Min, radar_collection.TxFrequency.Max),
STBeamComp='GLOBAL',
ImageBeamComp='SV',
AzAutofocus='NO',
RgAutofocus='NO')
def get_rma_adjust_grid() -> RMAType:
sar_processing_info = self._find('./imageGenerationParameters/sarProcessingInformation')
doppler_bandwidth = float(sar_processing_info.find('./totalProcessedAzimuthBandwidth').text)
zero_dop_last_line = parse_timestring(
sar_processing_info.find('./zeroDopplerTimeLastLine').text, precision='us')
zero_dop_first_line = parse_timestring(
sar_processing_info.find('./zeroDopplerTimeFirstLine').text, precision='us')
# doppler rate coefficients
doppler_rate_coeffs = numpy.array(
[float(entry) for entry in dop_rate_estimate_node.find('./dopplerRateCoefficients').text.split()],
dtype='float64')
doppler_rate_ref_time = float(dop_rate_estimate_node.find('./dopplerRateReferenceTime').text)
col_spacing_zd = \
get_seconds(zero_dop_last_line, zero_dop_first_line, precision='us')/self._num_lines_processed
# NB: this will be negative for Ascending
# zero doppler time of SCP relative to collect start
time_scp_zd = 0.5*processing_time_span
r_ca_scp = numpy.linalg.norm(position.ARPPoly(time_scp_zd) - geo_data.SCP.ECF.get_array())
inca = INCAType(R_CA_SCP=r_ca_scp, FreqZero=center_frequency)
# doppler rate calculations
velocity = position.ARPPoly.derivative_eval(time_scp_zd, der_order=1)
vel_ca_squared = numpy.sum(velocity * velocity)
# polynomial representing range as a function of range distance from SCP
r_ca = numpy.array([inca.R_CA_SCP, 1], dtype='float64')
# the doppler_rate_coeffs represents a polynomial in time, relative to
# doppler_rate_ref_time.
# to construct the doppler centroid polynomial, we need to change scales
# to a polynomial in space, relative to SCP.
doppler_rate_poly = Poly1DType(Coefs=doppler_rate_coeffs)
alpha = 2.0 / speed_of_light
t_0 = doppler_rate_ref_time - alpha*inca.R_CA_SCP
dop_rate_scaled_coeffs = doppler_rate_poly.shift(t_0, alpha, return_poly=False)
# DRateSFPoly is then a scaled multiple of this scaled poly and r_ca above
coeffs = -numpy.convolve(dop_rate_scaled_coeffs, r_ca) / (alpha * center_frequency * vel_ca_squared)
inca.DRateSFPoly = Poly2DType(Coefs=numpy.reshape(coeffs, (coeffs.size, 1)))
look = scpcoa.look
# modify a few of the other fields
ss_scale = abs(numpy.sqrt(vel_ca_squared)*inca.DRateSFPoly[0, 0])
grid.Col.SS = abs(col_spacing_zd*ss_scale)
grid.Col.ImpRespBW = doppler_bandwidth/ss_scale
inca.TimeCAPoly = Poly1DType(Coefs=[time_scp_zd, -look/ss_scale])
# doppler centroid
doppler_cent_coeffs = numpy.array(
[float(entry) for entry in
dop_centroid_estimate_node.find('./dopplerCentroidCoefficients').text.split()],
dtype='float64')
doppler_cent_ref_time = float(
dop_centroid_estimate_node.find('./dopplerCentroidReferenceTime').text)
doppler_cent_poly = Poly1DType(Coefs=doppler_cent_coeffs)
alpha = 2.0 / speed_of_light
t_0 = doppler_cent_ref_time - alpha * inca.R_CA_SCP
scaled_coeffs = doppler_cent_poly.shift(t_0, alpha, return_poly=False)
# adjust doppler centroid for spotlight, we need to add a second
# dimension to DopCentroidPoly
if collection_info.RadarMode.ModeType == 'SPOTLIGHT':
doppler_cent_est = get_seconds(dop_centroid_est_time, collect_start, precision='us')
dop_poly = numpy.zeros((scaled_coeffs.shape[0], 2), dtype=numpy.float64)
dop_poly[0, 1] = -look*center_frequency*alpha*numpy.sqrt(vel_ca_squared)/inca.R_CA_SCP
dop_poly[1, 1] = -center_frequency*alpha*numpy.sqrt(vel_ca_squared)/(inca.R_CA_SCP**2)
one_way_time = inca.R_CA_SCP / speed_of_light
use_time = doppler_cent_est + one_way_time
pos = position.ARPPoly(use_time)
vel = position.ARPPoly.derivative_eval(use_time, der_order=1)
los = geo_data.SCP.ECF.get_array() - pos
vel_hat = vel / numpy.linalg.norm(vel)
dop_poly[:, 0] += -look*(dop_poly[:, 1]*numpy.dot(los, vel_hat))
inca.DopCentroidPoly = Poly2DType(Coefs=dop_poly) # NB: this is set for use in fit-time_coa_poly
else:
raise ValueError('ScanSAR mode data should be SPOTLIGHT mode')
grid.Col.DeltaKCOAPoly = Poly2DType(Coefs=inca.DopCentroidPoly.get_array()*col_spacing_zd/grid.Col.SS)
# compute grid.Col.DeltaK1/K2 from DeltaKCOAPoly
coeffs = grid.Col.DeltaKCOAPoly.get_array()[:, 0]
# get roots
roots = polynomial.polyroots(coeffs)
# construct range bounds (in meters)
range_bounds = (numpy.array([0, image_data.NumRows - 1], dtype='float64')
- image_data.SCPPixel.Row) * grid.Row.SS
possible_ranges = numpy.copy(range_bounds)
useful_roots = ((roots > numpy.min(range_bounds)) & (roots < numpy.max(range_bounds)))
if numpy.any(useful_roots):
possible_ranges = numpy.concatenate((possible_ranges, roots[useful_roots]), axis=0)
azimuth_bounds = (numpy.array([0, (image_data.NumCols - 1)], dtype='float64')
- image_data.SCPPixel.Col) * grid.Col.SS
coords_az_2d, coords_rg_2d = numpy.meshgrid(azimuth_bounds, possible_ranges)
possible_bounds_deltak = grid.Col.DeltaKCOAPoly(coords_rg_2d, coords_az_2d)
grid.Col.DeltaK1 = numpy.min(possible_bounds_deltak) - 0.5 * grid.Col.ImpRespBW
grid.Col.DeltaK2 = numpy.max(possible_bounds_deltak) + 0.5 * grid.Col.ImpRespBW
# Wrapped spectrum
if (grid.Col.DeltaK1 < -0.5 / grid.Col.SS) or (grid.Col.DeltaK2 > 0.5 / grid.Col.SS):
grid.Col.DeltaK1 = -0.5 / abs(grid.Col.SS)
grid.Col.DeltaK2 = -grid.Col.DeltaK1
time_coa_poly = fit_time_coa_polynomial(inca, image_data, grid, dop_rate_scaled_coeffs, poly_order=2)
if collection_info.RadarMode.ModeType == 'SPOTLIGHT':
# using above was convenience, but not really sensible in spotlight mode
grid.TimeCOAPoly = Poly2DType(Coefs=[[time_coa_poly.Coefs[0, 0], ], ])
inca.DopCentroidPoly = None
else:
raise ValueError('ScanSAR mode data should be SPOTLIGHT mode')
return RMAType(RMAlgoType='OMEGA_K', INCA=inca)
def get_radiometric() -> Optional[RadiometricType]:
def perform_radiometric_fit(component_file: str) -> numpy.ndarray:
comp_struct = _parse_xml(component_file, without_ns=True)
comp_values = numpy.array(
[float(entry) for entry in comp_struct.find('./gains').text.split()], dtype=numpy.float64)
comp_values = 1. / (comp_values * comp_values) # adjust for sicd convention
if numpy.all(comp_values == comp_values[0]):
return numpy.array([[comp_values[0], ], ], dtype=numpy.float64)
else:
# fit a 1-d polynomial in range
t_pfv = int(comp_struct.find('./pixelFirstLutValue').text)
t_step = int(comp_struct.find('./stepSize').text)
num_vs = int(comp_struct.find('./numberOfValues').text)
t_range_inds = t_pfv + numpy.arange(num_vs)*t_step
coords_rg = grid.Row.SS * (t_range_inds - (image_data.SCPPixel.Row + row_shift))
return numpy.reshape(polynomial.polyfit(coords_rg, comp_values, 3), (-1, 1))
# NB: I am neglecting the difference in polarization for these
base_path = os.path.dirname(self.file_name)
beta_file = os.path.join(
base_path, 'calibration',
self._find('./imageReferenceAttributes/lookupTableFileName[@sarCalibrationType="Beta Nought"]').text)
sigma_file = os.path.join(
base_path, 'calibration',
self._find('./imageReferenceAttributes/lookupTableFileName[@sarCalibrationType="Sigma Nought"]').text)
gamma_file = os.path.join(
base_path, 'calibration',
self._find('./imageReferenceAttributes/lookupTableFileName[@sarCalibrationType="Gamma"]').text)
if not os.path.isfile(beta_file):
logger.error(
msg="Beta calibration information should be located in file {}, "
"which doesn't exist.".format(beta_file))
return None
# perform beta, sigma, gamma fit
beta_zero_sf_poly = perform_radiometric_fit(beta_file)
sigma_zero_sf_poly = perform_radiometric_fit(sigma_file)
gamma_zero_sf_poly = perform_radiometric_fit(gamma_file)
# construct noise poly, neglecting the polarization again
noise_level = None
noise_stem = self._find('./imageReferenceAttributes/noiseLevelFileName').text
noise_file = os.path.join(base_path, 'calibration', noise_stem)
noise_root = _parse_xml(noise_file, without_ns=True)
noise_levels = noise_root.findall('./perBeamReferenceNoiseLevel')
beta0s = [entry for entry in noise_levels if (entry.find('./sarCalibrationType').text.startswith('Beta')
and entry.find('./beam').text == beam)]
beta0_element = beta0s[0] if len(beta0s) > 0 else None
if beta0_element is not None:
pfv = float(beta0_element.find('pixelFirstNoiseValue').text)
step = float(beta0_element.find('stepSize').text)
beta0s = numpy.array(
[float(x) for x in beta0_element.find('noiseLevelValues').text.split()])
range_inds = pfv + numpy.arange(len(beta0s)) * step
range_coords = grid.Row.SS * (range_inds - (image_data.SCPPixel.Row + row_shift))
noise_value = beta0s - 10*numpy.log10(polynomial.polyval(range_coords, beta_zero_sf_poly[:, 0]))
noise_poly = polynomial.polyfit(range_coords, noise_value, 2)
noise_level = NoiseLevelType_(
NoiseLevelType='ABSOLUTE',
NoisePoly=Poly2DType(Coefs=numpy.reshape(noise_poly, (-1, 1))))
return RadiometricType(BetaZeroSFPoly=beta_zero_sf_poly,
SigmaZeroSFPoly=sigma_zero_sf_poly,
GammaZeroSFPoly=gamma_zero_sf_poly,
NoiseLevel=noise_level)
def correct_scp() -> None:
scp_pixel = base_sicd.ImageData.SCPPixel.get_array()
scp_ecf = base_sicd.project_image_to_ground(scp_pixel, projection_type='HAE')
base_sicd.GeoData.SCP.ECF = scp_ecf
def get_data_file_names() -> List[str]:
base_path = os.path.dirname(self.file_name)
image_files = []
img_attribute_node = self._find('./sceneAttributes/imageAttributes[@burst="{}"]'.format(burst))
results = img_attribute_node.findall('./ipdf')
if len(results) == 1:
# there's either a single polarization, or it's one of the NITF files
image_files.append(os.path.join(base_path, results[0].text))
else:
for pol in self._polarizations:
fname = None
for entry in results:
if entry.attrib.get('pole', None) == pol:
fname = entry.text
if fname is None:
raise ValueError('Got unexpected image file structure for burst {}'.format(burst))
image_files.append(os.path.join(base_path, fname))
return image_files
if self.generation != 'RCM':
raise ValueError('Unhandled generation {}'.format(self.generation))
center_frequency = float(self._find('./sourceAttributes/radarParameters/radarCenterFrequency').text)
tx_pols, tx_rcv_pols = self._get_sicd_polarizations()
# extract some common use doppler information
num_of_pulses = int(
self._find(
'./sourceAttributes/radarParameters/numberOfPulseIntervalsPerDwell[@beam="{}"]'.format(beam)).text)
# NB: I am neglecting that prfInformation is provided separately for pols data because
# it appears identical
pulse_rep_freq = float(
self._find('./sourceAttributes'
'/radarParameters'
'/prfInformation[@beam="{}"]'.format(beam) +
'/pulseRepetitionFrequency').text)
dop_rate_estimate_node = self._find(
'./dopplerRate/dopplerRateEstimate[@burst="{}"]'.format(burst))
dop_centroid_estimate_node = self._find(
'./dopplerCentroid/dopplerCentroidEstimate[@burst="{}"]'.format(burst))
dop_centroid_est_time = parse_timestring(
dop_centroid_estimate_node.find(
'./timeOfDopplerCentroidEstimate').text, precision='us')
processing_time_span = (num_of_pulses - 1)/pulse_rep_freq # in seconds
collect_start = dop_centroid_est_time - int(0.5*(processing_time_span*1000000))
nitf, collection_info = self._get_sicd_collection_info(collect_start)
image_creation = self._get_sicd_image_creation()
position = self._get_sicd_position(collect_start)
image_data, geo_data, row_shift = get_image_and_geo_data()
grid = GridType(ImagePlane='SLANT', Type='RGZERO', Row=get_grid_row(), Col=get_grid_col())
radar_collection = get_radar_collection()
timeline = get_timeline()
image_formation = get_image_formation()
scpcoa = SCPCOAType(SideOfTrack=self._get_side_of_track())
rma = get_rma_adjust_grid()
radiometric = get_radiometric()
base_sicd = SICDType(
CollectionInfo=collection_info,
ImageCreation=image_creation,
GeoData=geo_data,
ImageData=image_data,
Position=position,
Grid=grid,
RadarCollection=radar_collection,
Timeline=timeline,
ImageFormation=image_formation,
SCPCOA=scpcoa,
RMA=rma,
Radiometric=radiometric,
_NITF=nitf)
correct_scp()
base_sicd.derive() # derive all the fields
base_sicd.populate_rniirs(override=False)
# now, make one copy per polarimetric entry, as appropriate
the_files = get_data_file_names()
the_sicds = []
for i, (original_pol, sicd_pol) in enumerate(zip(self._polarizations, tx_rcv_pols)):
this_sicd = base_sicd.copy()
this_sicd.ImageFormation.RcvChanProc.ChanIndices = [i+1, ]
this_sicd.ImageFormation.TxRcvPolarizationProc = sicd_pol
the_sicds.append(this_sicd)
return the_sicds, the_files
def get_sicd_collection(self) -> Tuple[List[List[SICDType]], List[List[str]]]:
"""
Gets the collection of sicd objects.
Returns
-------
sicds: List[List[SICDType]]
files: List[List[str]]
"""
sicds = []
data_files = []
if self._bursts is None:
t_sicds, t_files = self._get_regular_sicd()
sicds.append(t_sicds)
data_files.append(t_files)
else:
for (beam, burst) in self._bursts:
t_sicds, t_files = self._get_scansar_sicd(beam, burst)
sicds.append(t_sicds)
data_files.append(t_files)
return sicds, data_files
##############
# reader implementation - really just borrows from tiff or NITF reader
class RadarSatReader(SICDTypeReader):
"""
A RadarSat-2 and RadarSat Constellation Mission (RCM) SLC file package
reader implementation.
**Changed in version 1.3.0** for reading changes.
"""
__slots__ = ('_radar_sat_details', '_other_reader')
def __init__(self, radar_sat_details):
"""
Parameters
----------
radar_sat_details : str|RadarSatDetails
file name or RadarSatDetails object
"""
self._other_reader = None
if isinstance(radar_sat_details, str):
radar_sat_details = RadarSatDetails(radar_sat_details)
if not isinstance(radar_sat_details, RadarSatDetails):
raise TypeError('The input argument for RadarSatReader must be a '
'filename or RadarSatDetails object')
self._radar_sat_details = radar_sat_details
# determine symmetry
reverse_axes, transpose_axes = self._radar_sat_details.get_symmetry()
# get the sicd collection and data file names
the_sicds, the_files = self.radarsat_details.get_sicd_collection()
use_sicds = []
the_segments = []
for sicd_entry, file_entry in zip(the_sicds, the_files):
the_segments.extend(self._construct_segments(sicd_entry, file_entry, reverse_axes, transpose_axes))
use_sicds.extend(sicd_entry)
SICDTypeReader.__init__(self, the_segments, use_sicds, close_segments=True)
self._check_sizes()
def _construct_segments(
self,
sicds: List[SICDType],
data_files: List[str],
reverse_axes: Optional[Tuple[int, ...]],
transpose_axes: Optional[Tuple[int, ...]]) -> List[DataSegment]:
"""
Construct the data segments.
Parameters
----------
sicds : List[SICDType]
data_files : List[str]
reverse_axes : None|Tuple[int, ...]
transpose_axes : None|Tuple[int, ...]
Returns
-------
List[BaseChipper]
"""
data_segments = []
if len(sicds) == len(data_files):
for sicd, data_file in zip(sicds, data_files):
fext = os.path.splitext(data_file)[1]
if fext in ['.tiff', '.tif']:
data_segments.append(_construct_tiff_segment(sicd, data_file, reverse_axes, transpose_axes))
elif fext in ['.nitf', '.ntf']:
reader, segment = _construct_single_nitf_segment(sicd, data_file, reverse_axes, transpose_axes)
self._other_reader = reader # NB: maintain reference, to keep segment open
data_segments.append(segment)
else:
raise ValueError(
'The radarsat reader requires image files in tiff or nitf format. '
'Uncertain how to interpret file {}'.format(data_file))
elif len(data_files) == 1:
data_file = data_files[0]
fext = os.path.splitext(data_file)[1]
if fext not in ['.nitf', '.ntf']:
raise ValueError(
'The radarsat has image data for multiple polarizations provided in a '
'single image file. This requires an image files in nitf format. '
'Uncertain how to interpret file {}'.format(data_file))
reader, segments = _construct_multiple_nitf_segment(sicds, data_file, reverse_axes, transpose_axes)
self._other_reader = reader # NB: maintain reference, to keep segments open
data_segments.extend(segments)
else:
raise ValueError(
'Unclear how to construct chipper elements for {} sicd elements '
'from {} image files.'.format(len(sicds), len(data_files)))
return data_segments
@property
def radarsat_details(self) -> RadarSatDetails:
"""
RadarSarDetails: The RadarSat/RCM details object.
"""
return self._radar_sat_details
@property
def file_name(self) -> str:
return self.radarsat_details.directory_name
def close(self) -> None:
SICDTypeReader.close(self)
self._other_reader = None
########
# base expected functionality for a module with an implemented Reader
def is_a(file_name: str) -> Optional[RadarSatReader]:
"""
Tests whether a given file_name corresponds to a RadarSat file. Returns a reader instance, if so.
Parameters
----------
file_name : str
the file_name to check
Returns
-------
RadarSatReader|None
`RadarSatReader` instance if RadarSat file, `None` otherwise
"""
if is_file_like(file_name):
return None
try:
details = RadarSatDetails(file_name)
logger.info('Path {} is determined to be or contain a RadarSat or RCM product.xml file.'.format(file_name))
return RadarSatReader(details)
except SarpyIOError:
return None
| 80,332 | 46.282519 | 120 | py |
sarpy | sarpy-master/sarpy/io/complex/gff.py | """
Functionality for reading a GFF file into a SICD model.
Note: This has been tested on files of version 1.8 and 2.5, but hopefully works for others.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
import os
import struct
from typing import Tuple, Union, BinaryIO, Optional
from datetime import datetime
from tempfile import mkstemp
import zlib
import numpy
from scipy.constants import speed_of_light
from sarpy.io.general.base import SarpyIOError
from sarpy.io.general.format_function import ComplexFormatFunction
from sarpy.io.general.data_segment import DataSegment, NumpyMemmapSegment
from sarpy.io.general.utils import is_file_like, MemMap
from sarpy.geometry.geocoords import geodetic_to_ecf, wgs_84_norm, ned_to_ecf
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.sicd_elements.SICD import SICDType
from sarpy.io.complex.sicd_elements.CollectionInfo import CollectionInfoType, \
RadarModeType
from sarpy.io.complex.sicd_elements.ImageCreation import ImageCreationType
from sarpy.io.complex.sicd_elements.ImageData import ImageDataType
from sarpy.io.complex.sicd_elements.GeoData import GeoDataType, SCPType
from sarpy.io.complex.sicd_elements.Grid import GridType, DirParamType, \
WgtTypeType
from sarpy.io.complex.sicd_elements.SCPCOA import SCPCOAType
from sarpy.io.complex.sicd_elements.Timeline import TimelineType, IPPSetType
from sarpy.io.complex.sicd_elements.RadarCollection import RadarCollectionType, \
WaveformParametersType, ChanParametersType
from sarpy.io.complex.sicd_elements.ImageFormation import ImageFormationType, \
RcvChanProcType
from sarpy.io.complex.sicd_elements.Radiometric import RadiometricType, \
NoiseLevelType_
try:
import PIL
except ImportError:
PIL = None
logger = logging.getLogger(__name__)
_requires_array_text = 'Requires numpy.ndarray, got `{}`'
_requires_3darray_text = 'Requires a three-dimensional numpy.ndarray\n\t' \
'(with band in the last dimension), got shape {}'
####################
# utility functions
def _get_string(bytes_in):
bytes_in = bytes_in.replace(b'\x00', b'')
return bytes_in.decode('utf-8')
def _rescale_float(int_in, scale):
return float(int_in)/scale
####################
# version 1 specific header parsing
class _GFFHeader_1_6(object):
"""
Interpreter for the GFF version 1.6 header
"""
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
self.file_object = fi
self.estr = estr
self.version = '1.6'
fi.seek(12, os.SEEK_SET)
# starting at line 3 of def
self.header_length = struct.unpack(estr+'I', fi.read(4))[0]
if self.header_length < 952:
raise ValueError(
'The provided header is apparently too short to be a version 1.6 GFF header')
fi.read(2) # redundant
self.creator = _get_string(fi.read(24))
self.date_time = struct.unpack(estr+'6H', fi.read(6*2)) # year,month, day, hour, minute, second
fi.read(2) # endian, already parsed
self.bytes_per_pixel, self.frame_count, self.image_type, \
self.row_major, self.range_count, self.azimuth_count = \
struct.unpack(estr+'6I', fi.read(6*4))
self.scale_exponent, self.scale_mantissa, self.offset_exponent, self.offset_mantissa = \
struct.unpack(estr+'4i', fi.read(4*4))
# at line 17 of def
fi.read(2) # redundant
self.comment = _get_string(fi.read(166))
self.image_plane = struct.unpack(estr+'I', fi.read(4))[0]
range_pixel_size, azimuth_pixel_size, azimuth_overlap = struct.unpack(estr+'3I', fi.read(3*4))
self.range_pixel_size = _rescale_float(range_pixel_size, 1 << 16)
self.azimuth_pixel_size = _rescale_float(azimuth_pixel_size, 1 << 16)
self.azimuth_overlap = _rescale_float(azimuth_overlap, 1 << 16)
srp_lat, srp_lon, srp_alt, rfoa, x_to_srp = struct.unpack(estr+'5i', fi.read(5*4))
self.srp_lat = _rescale_float(srp_lat, 1 << 23)
self.srp_lon = _rescale_float(srp_lon, 1 << 23)
self.srp_alt = _rescale_float(srp_alt, 1 << 16)
self.rfoa = _rescale_float(rfoa, 1 << 23)
self.x_to_srp = _rescale_float(x_to_srp, 1 << 16)
fi.read(2)
self.phase_name = _get_string(fi.read(128))
fi.read(2)
self.image_name = _get_string(fi.read(128))
# at line 32 of def
self.look_count, self.param_ref_ap, self.param_ref_pos = \
struct.unpack(estr+'3I', fi.read(3*4))
graze_angle, squint, gta, range_beam_ctr, flight_time = \
struct.unpack(estr + 'I2i2I', fi.read(5*4))
self.graze_angle = _rescale_float(graze_angle, 1 << 23)
self.squint = _rescale_float(squint, 1 << 23)
self.gta = _rescale_float(gta, 1 << 23)
self.range_beam_ctr = _rescale_float(range_beam_ctr, 1 << 8)
self.flight_time = _rescale_float(flight_time, 1000)
self.range_chirp_rate, x_to_start, self.mo_comp_mode, v_x = \
struct.unpack(estr+'fi2I', fi.read(4*4))
self.x_to_start = _rescale_float(x_to_start, 1 << 16)
self.v_x = _rescale_float(v_x, 1 << 16)
# at line 44 of def
apc_lat, apc_lon, apc_alt = struct.unpack(estr+'3i', fi.read(3*4))
self.apc_lat = _rescale_float(apc_lat, 1 << 23)
self.apc_lon = _rescale_float(apc_lon, 1 << 23)
self.apc_alt = _rescale_float(apc_alt, 1 << 16)
cal_parm, self.logical_block_address = struct.unpack(estr+'2I', fi.read(2*4))
self.cal_parm = _rescale_float(cal_parm, 1 << 24)
az_resolution, range_resolution = struct.unpack(estr+'2I', fi.read(2*4))
self.az_resolution = _rescale_float(az_resolution, 1 << 16)
self.range_resolution = _rescale_float(range_resolution, 1 << 16)
des_sigma_n, des_graze, des_squint, des_range, scene_track_angle = \
struct.unpack(estr+'iIiIi', fi.read(5*4))
self.des_sigma_n = _rescale_float(des_sigma_n, 1 << 23)
self.des_graze = _rescale_float(des_graze, 1 << 23)
self.des_squint = _rescale_float(des_squint, 1 << 23)
self.des_range = _rescale_float(des_range, 1 << 8)
self.scene_track_angle = _rescale_float(scene_track_angle, 1 << 23)
# at line 56 of def
self.user_param = fi.read(48) # leave uninterpreted
self.coarse_snr, self.coarse_azimuth_sub, self.coarse_range_sub, \
self.max_azimuth_shift, self.max_range_shift, \
self.coarse_delta_azimuth, self.coarse_delta_range = \
struct.unpack(estr+'7i', fi.read(7*4))
self.tot_procs, self.tpt_box_cmode, self.snr_thresh, self.range_size, \
self.map_box_size, self.box_size, self.box_spc, self.tot_tpts, \
self.good_tpts, self.range_seed, self.range_shift, self.azimuth_shift = \
struct.unpack(estr+'12i', fi.read(12*4))
# at line 76 of def
self.sum_x_ramp, self.sum_y_ramp = struct.unpack(estr+'2i', fi.read(2*4))
self.cy9k_tape_block, self.nominal_center_frequency = struct.unpack(estr+'If', fi.read(2*4))
self.image_flags, self.line_number, self.patch_number = struct.unpack(estr+'3I', fi.read(3*4))
self.lambda0, self.srange_pix_space = struct.unpack(estr+'2f', fi.read(2*4))
self.dopp_pix_space, self.dopp_offset, self.dopp_range_scale, self.mux_time_delay = \
struct.unpack(estr+'4f', fi.read(4*4))
# at line 89 of def
self.apc_ecef = struct.unpack(estr+'3d', fi.read(3*8))
self.vel_ecef = struct.unpack(estr+'3f', fi.read(3*4))
self.phase_cal = struct.unpack(estr+'f', fi.read(4))[0]
self.srp_ecef = struct.unpack(estr+'3d', fi.read(3*8))
self.res5 = fi.read(64) # leave uninterpreted
class _Radar_1_8(object):
"""
The radar details, for version 1.8
"""
def __init__(self, the_bytes, estr):
"""
Parameters
----------
the_bytes : bytes
This will be required to have length 76
estr : str
The endianness format string
"""
if not (isinstance(the_bytes, bytes) and len(the_bytes) == 76):
raise ValueError('Incorrect length input')
self.platform = _get_string(the_bytes[:24])
self.proc_id = _get_string(the_bytes[24:36])
self.radar_model = _get_string(the_bytes[36:48])
self.radar_id = struct.unpack(estr+'I', the_bytes[48:52])[0]
self.swid = _get_string(the_bytes[52:76])
class _GFFHeader_1_8(object):
"""
Interpreter for the GFF version 1.8 header
"""
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
self.file_object = fi
self.estr = estr
self.version = '1.8'
fi.seek(12, os.SEEK_SET)
# starting at line 3 of def
self.header_length = struct.unpack(estr+'I', fi.read(4))[0]
if self.header_length < 2040:
raise ValueError(
'The provided header is apparently too short to be a version 1.8 GFF header')
fi.read(2) # redundant
self.creator = _get_string(fi.read(24))
self.date_time = struct.unpack(estr+'6H', fi.read(6*2)) # year, month, day, hour, minute, second
fi.read(2) # endian, already parsed
self.bytes_per_pixel = int(struct.unpack(estr+'f', fi.read(4))[0])
self.frame_count, self.image_type, self.row_major, self.range_count, \
self.azimuth_count = struct.unpack(estr+'5I', fi.read(5*4))
self.scale_exponent, self.scale_mantissa, self.offset_exponent, self.offset_mantissa = \
struct.unpack(estr+'4i', fi.read(4*4))
# at line 17 of def
self.res1 = fi.read(32) # leave uninterpreted
fi.read(2) # redundant
self.comment = _get_string(fi.read(166))
self.image_plane = struct.unpack(estr+'I', fi.read(4))[0]
range_pixel_size, azimuth_pixel_size, azimuth_overlap = struct.unpack(estr+'3I', fi.read(3*4))
self.range_pixel_size = _rescale_float(range_pixel_size, 1 << 16)
self.azimuth_pixel_size = _rescale_float(azimuth_pixel_size, 1 << 16)
self.azimuth_overlap = _rescale_float(azimuth_overlap, 1 << 16)
srp_lat, srp_lon, srp_alt, rfoa, x_to_srp = struct.unpack(estr+'5i', fi.read(5*4))
self.srp_lat = _rescale_float(srp_lat, 1 << 23)
self.srp_lon = _rescale_float(srp_lon, 1 << 23)
self.srp_alt = _rescale_float(srp_alt, 1 << 16)
self.rfoa = _rescale_float(rfoa, 1 << 23)
self.x_to_srp = _rescale_float(x_to_srp, 1 << 16)
self.res2 = fi.read(32) # leave uninterpreted
fi.read(2)
self.phase_name = _get_string(fi.read(128))
fi.read(2)
self.image_name = _get_string(fi.read(128))
# at line 34 of def
self.look_count, self.param_ref_ap, self.param_ref_pos = \
struct.unpack(estr + '3I', fi.read(3*4))
graze_angle, squint, gta, range_beam_ctr, flight_time = \
struct.unpack(estr + 'I2i2I', fi.read(5*4))
self.graze_angle = _rescale_float(graze_angle, 1 << 23)
self.squint = _rescale_float(squint, 1 << 23)
self.gta = _rescale_float(gta, 1 << 23)
self.range_beam_ctr = _rescale_float(range_beam_ctr, 1 << 8)
self.flight_time = _rescale_float(flight_time, 1000)
self.range_chirp_rate, x_to_start, self.mo_comp_mode, v_x = \
struct.unpack(estr + 'fi2I', fi.read(4*4))
self.x_to_start = _rescale_float(x_to_start, 1 << 16)
self.v_x = _rescale_float(v_x, 1 << 16)
# at line 46 of def
apc_lat, apc_lon, apc_alt = struct.unpack(estr + '3i', fi.read(3*4))
self.apc_lat = _rescale_float(apc_lat, 1 << 23)
self.apc_lon = _rescale_float(apc_lon, 1 << 23)
self.apc_alt = _rescale_float(apc_alt, 1 << 16)
cal_parm, self.logical_block_address = struct.unpack(estr + '2I', fi.read(2*4))
self.cal_parm = _rescale_float(cal_parm, 1 << 24)
az_resolution, range_resolution = struct.unpack(estr + '2I', fi.read(2*4))
self.az_resolution = _rescale_float(az_resolution, 1 << 16)
self.range_resolution = _rescale_float(range_resolution, 1 << 16)
des_sigma_n, des_graze, des_squint, des_range, scene_track_angle = \
struct.unpack(estr + 'iIiIi', fi.read(5*4))
self.des_sigma_n = _rescale_float(des_sigma_n, 1 << 23)
self.des_graze = _rescale_float(des_graze, 1 << 23)
self.des_squint = _rescale_float(des_squint, 1 << 23)
self.des_range = _rescale_float(des_range, 1 << 8)
self.scene_track_angle = _rescale_float(scene_track_angle, 1 << 23)
# at line 58 of def
self.user_param = fi.read(48) # leave uninterpreted
self.coarse_snr, self.coarse_azimuth_sub, self.coarse_range_sub, \
self.max_azimuth_shift, self.max_range_shift, \
self.coarse_delta_azimuth, self.coarse_delta_range = \
struct.unpack(estr + '7i', fi.read(7*4))
self.tot_procs, self.tpt_box_cmode, self.snr_thresh, self.range_size, \
self.map_box_size, self.box_size, self.box_spc, self.tot_tpts, \
self.good_tpts, self.range_seed, self.range_shift, self.azimuth_shift = \
struct.unpack(estr + '12i', fi.read(12*4))
# at line 78 of def
self.sum_x_ramp, self.sum_y_ramp = struct.unpack(estr + '2i', fi.read(2*4))
self.cy9k_tape_block, self.nominal_center_frequency = struct.unpack(estr + 'If', fi.read(2*4))
self.image_flags, self.line_number, self.patch_number = struct.unpack(estr + '3I', fi.read(3*4))
self.lambda0, self.srange_pix_space = struct.unpack(estr + '2f', fi.read(2*4))
self.dopp_pix_space, self.dopp_offset, self.dopp_range_scale, self.mux_time_delay = \
struct.unpack(estr + '4f', fi.read(4*4))
# at line 91 of def
self.apc_ecef = struct.unpack(estr+'3d', fi.read(3*8))
self.vel_ecef = struct.unpack(estr+'3f', fi.read(3*4))
self.phase_cal = struct.unpack(estr+'f', fi.read(4))[0]
self.srp_ecef = struct.unpack(estr+'3d', fi.read(3*8))
self.res5 = fi.read(64) # leave uninterpreted
# at line 102
self.header_length1 = struct.unpack(estr+'I', fi.read(4))[0]
self.image_date = struct.unpack(estr+'6H', fi.read(6*2)) # year,month, day, hour, minute, second
self.comp_file_name = _get_string(fi.read(128))
self.ref_file_name = _get_string(fi.read(128))
self.IE = _Radar_1_8(fi.read(76), estr)
self.IF = _Radar_1_8(fi.read(76), estr)
self.if_algo = _get_string(fi.read(8))
self.PH = _Radar_1_8(fi.read(76), estr)
# at line 122 of def
self.ph_data_rcd, self.proc_product = struct.unpack(estr+'2i', fi.read(2*4))
self.mission_text = _get_string(fi.read(8))
self.ph_source, self.gps_week = struct.unpack(estr+'iI', fi.read(2*4))
self.data_collect_reqh = _get_string(fi.read(14))
self.res6 = fi.read(2) # leave uninterpreted
# at line 129
self.grid_name = _get_string(fi.read(24))
self.pix_val_linearity, self.complex_or_real, self.bits_per_magnitude, \
self.bits_per_phase = struct.unpack(estr+'2i2H', fi.read(2*4+2*2))
self.complex_order_type, self.pix_data_type, self.image_length, \
self.image_cmp_scheme = struct.unpack(estr+'4i', fi.read(4*4))
# at line 138
self.apbo, self.asa_pitch, self.asa_squint, self.dsa_pitch, self.ira = \
struct.unpack(estr+'5f', fi.read(5*4))
self.rx_polarization = struct.unpack(estr+'2f', fi.read(2*4))
self.tx_polarization = struct.unpack(estr+'2f', fi.read(2*4))
self.v_avg = struct.unpack(estr+'3f', fi.read(3*4))
self.apc_avg = struct.unpack(estr+'3f', fi.read(3*4))
self.averaging_time, self.dgta = struct.unpack(estr+'2f', fi.read(2*4))
# at line 153
velocity_y, velocity_z = struct.unpack(estr+'2I', fi.read(2*4))
self.velocity_y = _rescale_float(velocity_y, 1 << 16)
self.velocity_z = _rescale_float(velocity_z, 1 << 16)
self.ba, self.be = struct.unpack(estr+'2f', fi.read(2*4))
self.az_geom_corr, self.range_geom_corr, self.az_win_fac_bw, \
self.range_win_fac_bw = struct.unpack(estr+'2i2f', fi.read(4*4))
self.az_win_id = _get_string(fi.read(48))
self.range_win_id = _get_string(fi.read(48))
# at line 163
self.keep_out_viol_prcnt = struct.unpack(estr+'f', fi.read(4))[0]
self.az_coeff = struct.unpack(estr+'6f', fi.read(6*4))
self.pos_uncert = struct.unpack(estr+'3f', fi.read(3*4))
self.nav_aiding_type = struct.unpack(estr+'i', fi.read(4))[0]
self.two_dnl_phase_coeffs = struct.unpack(estr+'10f', fi.read(10*4))
self.clutter_snr_thresh = struct.unpack(estr+'f', fi.read(4))[0]
# at line 171
self.elevation_coeff = struct.unpack(estr+'9f', fi.read(9*4))
self.monopulse_coeff = struct.unpack(estr+'12f', fi.read(12*4))
self.twist_pt_err_prcnt, self.tilt_pt_err_prcnt, self.az_pt_err_prcnt = \
struct.unpack(estr+'3f', fi.read(3*4))
sigma_n, self.take_num = struct.unpack(estr+'Ii', fi.read(2*4))
self.sigma_n = _rescale_float(sigma_n, 1 << 23)
self.if_sar_flags = struct.unpack(estr+'5i', fi.read(5*4))
self.mu_threshold, self.gff_app_type = struct.unpack(estr+'fi', fi.read(2*4))
self.res7 = fi.read(8) # leave uninterpreted
#####################
# version 2 specific header parsing
# NB: I am only parsing the GSATIMG, APINFO, IFINFO, and GEOINFO blocks
# because those are the only blocks referenced in the matlab that I
# am mirroring
class _BlockHeader_2(object):
"""
Read and interpret a block "sub"-header. This generically precedes every version
2 data block, including the main file header
"""
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
self.name = _get_string(fi.read(16))
self.major_version, self.minor_version = struct.unpack(estr+'HH', fi.read(2*2))
what0 = fi.read(4) # not sure what this is from looking at the matlab.
self.size = struct.unpack(estr+'I', fi.read(4))[0]
what1 = fi.read(4) # not sure what this is from looking at the matlab.
if (self.version == '2.0' and self.size == 64) or (self.version == '1.0' and self.size == 52):
self.name = 'RADARINFO' # fix known issue for some early version 2 GFF files
@property
def version(self):
"""
str: The version
"""
return '{}.{}'.format(self.major_version, self.minor_version)
# APINFO definitions
class _APInfo_1_0(object):
"""
The APINFO block
"""
serialized_length = 314
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
self.missionText = _get_string(fi.read(8))
self.swVerNum = _get_string(fi.read(8))
self.radarSerNum, self.phSource = struct.unpack(estr+'2I', fi.read(2*4))
fi.read(2)
self.phName = _get_string(fi.read(128))
self.ctrFreq, self.wavelength = struct.unpack(estr+'2f', fi.read(2*4))
self.rxPolarization, self.txPolarization = struct.unpack(estr+'2I', fi.read(2*4))
self.azBeamWidth, self.elBeamWidth = struct.unpack(estr+'2f', fi.read(2*4))
self.grazingAngle, self.squintAngle, self.gta, self.rngToBeamCtr = \
struct.unpack(estr+'4f', fi.read(4*4))
# line 16
self.desSquint, self.desRng, self.desGTA, self.antPhaseCtrBear = \
struct.unpack(estr+'4f', fi.read(4*4))
self.ApTimeUTC = struct.unpack(estr+'6H', fi.read(6*2))
self.flightTime, self.flightWeek = struct.unpack(estr+'2I', fi.read(2*4))
self.chirpRate, self.xDistToStart = struct.unpack(estr+'2f', fi.read(2*4))
self.momeasMode, self.radarMode = struct.unpack(estr+'2I', fi.read(2*4))
# line 32
self.rfoa = struct.unpack(estr+'f', fi.read(4))[0]
self.apcVel = struct.unpack(estr+'3d', fi.read(3*8))
self.apcLLH = struct.unpack(estr+'3d', fi.read(3*8))
self.keepOutViol, self.gimStopTwist, self.gimStopTilt, self.gimStopAz = \
struct.unpack(estr+'4f', fi.read(4*4))
class _APInfo_2_0(_APInfo_1_0):
"""
The APINFO block
"""
serialized_length = 318
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
_APInfo_1_0.__init__(self, fi, estr)
self.apfdFactor = struct.unpack(estr+'i', fi.read(4))[0]
class _APInfo_3_0(_APInfo_2_0):
"""
The APINFO block
"""
serialized_length = 334
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
_APInfo_2_0.__init__(self, fi, estr)
self.fastTimeSamples, self.adSampleFreq, self.apertureTime, \
self.numPhaseHistories = struct.unpack(estr+'I2fI', fi.read(4*4))
class _APInfo_4_0(object):
"""
The APINFO block
"""
serialized_length = 418
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
# essentially the same as version 3, except the first two fields are longer
self.missionText = _get_string(fi.read(50))
self.swVerNum = _get_string(fi.read(50))
self.radarSerNum, self.phSource = struct.unpack(estr+'2I', fi.read(2*4))
fi.read(2)
self.phName = _get_string(fi.read(128))
self.ctrFreq, self.wavelength = struct.unpack(estr+'2f', fi.read(2*4))
self.rxPolarization, self.txPolarization = struct.unpack(estr+'2I', fi.read(2*4))
self.azBeamWidth, self.elBeamWidth = struct.unpack(estr+'2f', fi.read(2*4))
self.grazingAngle, self.squintAngle, self.gta, self.rngToBeamCtr = \
struct.unpack(estr+'4f', fi.read(4*4))
# line 16
self.desSquint, self.desRng, self.desGTA, self.antPhaseCtrBear = \
struct.unpack(estr+'4f', fi.read(4*4))
self.ApTimeUTC = struct.unpack(estr+'6H', fi.read(6*2))
self.flightTime, self.flightWeek = struct.unpack(estr+'2I', fi.read(2*4))
self.chirpRate, self.xDistToStart = struct.unpack(estr+'2f', fi.read(2*4))
self.momeasMode, self.radarMode = struct.unpack(estr+'2I', fi.read(2*4))
# line 32
self.rfoa = struct.unpack(estr+'f', fi.read(4))[0]
self.apcVel = struct.unpack(estr+'3d', fi.read(3*8))
self.apcLLH = struct.unpack(estr+'3d', fi.read(3*8))
self.keepOutViol, self.gimStopTwist, self.gimStopTilt, self.gimStopAz = \
struct.unpack(estr+'4f', fi.read(4*4))
self.apfdFactor = struct.unpack(estr+'i', fi.read(4))[0]
self.fastTimeSamples, self.adSampleFreq, self.apertureTime, \
self.numPhaseHistories = struct.unpack(estr+'I2fI', fi.read(4*4))
class _APInfo_5_0(_APInfo_4_0):
"""
The APINFO block
"""
serialized_length = 426
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
_APInfo_4_0.__init__(self, fi, estr)
self.lightSpeed = struct.unpack(estr+'d', fi.read(8))[0] # really?
class _APInfo_5_1(_APInfo_5_0):
"""
The APINFO block
"""
serialized_length = 430
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
_APInfo_5_0.__init__(self, fi, estr)
self.delTanApAngle = struct.unpack(estr+'f', fi.read(4))[0]
class _APInfo_5_2(_APInfo_5_1):
"""
The APINFO block
"""
serialized_length = 434
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
_APInfo_5_1.__init__(self, fi, estr)
self.metersInSampledDoppler = struct.unpack(estr+'f', fi.read(4))[0]
# IFINFO definitions
class _IFInfo_1_0(object):
"""
Interpreter for IFInfo object
"""
serialized_length = 514
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
self.procProduct = struct.unpack(estr+'I', fi.read(4))[0]
fi.read(2)
self.imgFileName = _get_string(fi.read(128))
self.azResolution, self.rngResolution = struct.unpack(estr+'2f', fi.read(2*4))
self.imgCalParam, self.sigmaN = struct.unpack(estr+'2f', fi.read(2*4))
self.sampLocDCRow, self.sampLocDCCol = struct.unpack(estr+'2i', fi.read(2*4))
self.ifAlgo = _get_string(fi.read(8))
self.imgFlag = struct.unpack(estr+'i', fi.read(4))[0]
self.azCoeff = struct.unpack(estr+'6f', fi.read(6*4))
self.elCoeff = struct.unpack(estr+'9f', fi.read(9*4))
self.azGeoCorrect, self.rngGeoCorrect = struct.unpack(estr+'2i', fi.read(2*4))
self.wndBwFactAz, self.wndBwFactRng = struct.unpack(estr+'2f', fi.read(2*4))
self.wndFncIdAz = _get_string(fi.read(48))
self.wndFncIdRng = _get_string(fi.read(48))
fi.read(2)
self.cmtText = _get_string(fi.read(166))
self.autoFocusInfo = struct.unpack(estr+'i', fi.read(4))[0]
class _IFInfo_2_0(_IFInfo_1_0):
"""
Interpreter for IFInfo object - identical with version 2.1 and 2.2
"""
serialized_length = 582
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
_IFInfo_1_0.__init__(self, fi, estr)
self.rngFFTSize = struct.unpack(estr+'i', fi.read(4))[0]
self.RangePaneFilterCoeff = struct.unpack(estr+'11f', fi.read(11*4))
self.AzPreFilterCoeff = struct.unpack(estr+'5f', fi.read(5*4))
class _IFInfo_3_0(_IFInfo_2_0):
"""
Interpreter for IFInfo object
"""
serialized_length = 586
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
_IFInfo_2_0.__init__(self, fi, estr)
self.afPeakQuadComp = struct.unpack(estr+'f', fi.read(4))[0]
# GEOINFO definitions
class _GeoInfo_1(object):
"""
Interpreter for GeoInfo object - note that versions 1.0 and 1.1 are identical
"""
serialized_length = 52
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
self.imagePlane = struct.unpack(estr+'i', fi.read(4))[0]
self.rangePixSpacing, self.desiredGrazAng, self.azPixSpacing = \
struct.unpack(estr+'3f', fi.read(3*4))
self.patchCtrLLH = struct.unpack(estr+'3d', fi.read(3*8))
self.pixLocImCtrRow, self.pixLocImCtrCol = struct.unpack(estr+'2I', fi.read(2*4))
self.imgRotAngle = struct.unpack(estr+'f', fi.read(4))[0]
# GSATIMG definition
def _get_complex_domain_code(code_int):
# type: (int) -> str
if code_int in [0, 3]:
return 'IQ'
elif code_int in [1, 4]:
return 'QI'
elif code_int in [2, 5]:
return 'MP'
elif code_int == 6:
return 'PM'
elif code_int == 7:
return 'M'
elif code_int == 8:
return 'P'
else:
raise ValueError('Got unexpected code `{}`'.format(code_int))
def _get_band_order(code_int):
# type: (int) -> str
if code_int in [0, 1, 2, 7, 8]:
return 'interleaved'
elif code_int in [3, 4, 5, 6]:
return 'sequential'
else:
raise ValueError('Got unexpected code `{}`'.format(code_int))
class _PixelFormat(object):
"""
Interpreter for pixel format object
"""
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
self.comp0_bitSize, self.comp0_dataType = struct.unpack(estr+'HI', fi.read(2+4))
self.comp1_bitSize, self.comp1_dataType = struct.unpack(estr+'HI', fi.read(2+4))
self.cmplxDomain, self.numComponents = struct.unpack(estr+'Ii', fi.read(2*4))
class _GSATIMG_2(object):
"""
Interpreter for the GSATIMG object
"""
serialized_length = 82
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
self.endian = struct.unpack(estr+'I', fi.read(4))[0]
fi.read(2)
self.imageCreator = _get_string(fi.read(24))
self.rangePixels, self.azPixels = struct.unpack(estr+'2I', fi.read(2*4))
self.pixOrder, self.imageLengthBytes, self.imageCompressionScheme, \
self.pixDataType = struct.unpack(estr+'4I', fi.read(4*4))
self.pixelFormat = _PixelFormat(fi, estr)
self.pixValLin, self.autoScaleFac = struct.unpack(estr+'if', fi.read(2*4))
complex_domain = _get_complex_domain_code(self.pixelFormat.cmplxDomain)
if complex_domain not in ['IQ', 'QI', 'MP', 'PM']:
raise ValueError('We got unsupported complex domain `{}`'.format(complex_domain))
# combined GFF version 2 header collection
def _check_serialization(
block_header: _BlockHeader_2,
expected_length: int) -> None:
if block_header.size == expected_length:
return
raise ValueError(
'Got `{}` block of version `{}` and serialized length {},\n\t'
'but expected serialized length {}'.format(
block_header.name, block_header.version, block_header.size, expected_length))
class _GFFHeader_2(object):
"""
Interpreter for the GFF version 2.* header
"""
__slots__ = (
'file_object', 'estr', '_gsat_img', '_ap_info', '_if_info', '_geo_info',
'_image_header', '_image_offset')
def __init__(self, fi, estr):
"""
Parameters
----------
fi : BinaryIO
estr : str
The endianness string for format interpretation, one of `['<', '>']`
"""
self._gsat_img = None
self._ap_info = None
self._if_info = None
self._geo_info = None
self._image_header = None
self._image_offset = None
self.file_object = fi
self.estr = estr
# extract the initial file location
init_location = fi.tell()
# go to the begining of the file
fi.seek(0, os.SEEK_SET)
gsat_header = _BlockHeader_2(fi, estr)
self._gsat_img = _GSATIMG_2(fi, estr)
while True:
block_header = _BlockHeader_2(fi, estr)
if block_header.name == 'IMAGEDATA':
self._image_header = block_header
self._image_offset = fi.tell()
break
elif block_header.name == 'APINFO':
self._parse_apinfo(fi, estr, block_header)
elif block_header.name == 'IFINFO':
self._parse_ifinfo(fi, estr, block_header)
elif block_header.name == 'GEOINFO':
self._parse_geoinfo(fi, estr, block_header)
else:
# we are not parsing this block, so just skip it
fi.seek(block_header.size, os.SEEK_CUR)
# return to the initial file location
fi.seek(init_location, os.SEEK_SET)
self._check_valid(gsat_header)
@property
def gsat_img(self) -> _GSATIMG_2:
return self._gsat_img
@property
def ap_info(self) -> Union[_APInfo_1_0, _APInfo_2_0, _APInfo_3_0, _APInfo_4_0, _APInfo_5_0, _APInfo_5_1, _APInfo_5_2]:
return self._ap_info
@property
def if_info(self) -> Union[_IFInfo_1_0, _IFInfo_2_0, _IFInfo_3_0]:
return self._if_info
@property
def geo_info(self) -> _GeoInfo_1:
return self._geo_info
@property
def image_header(self) -> _BlockHeader_2:
return self._image_header
@property
def image_offset(self) -> int:
return self._image_offset
def _parse_apinfo(self, fi, estr, block_header) -> None:
if block_header.name != 'APINFO':
return
if block_header.major_version == 1:
_check_serialization(block_header, _APInfo_1_0.serialized_length)
self._ap_info = _APInfo_1_0(fi, estr)
elif block_header.major_version == 2:
_check_serialization(block_header, _APInfo_2_0.serialized_length)
self._ap_info = _APInfo_2_0(fi, estr)
elif block_header.major_version == 3:
_check_serialization(block_header, _APInfo_3_0.serialized_length)
self._ap_info = _APInfo_3_0(fi, estr)
elif block_header.major_version == 4:
_check_serialization(block_header, _APInfo_4_0.serialized_length)
self._ap_info = _APInfo_4_0(fi, estr)
elif block_header.major_version == 5:
if block_header.minor_version == 0:
_check_serialization(block_header, _APInfo_5_0.serialized_length)
self._ap_info = _APInfo_5_0(fi, estr)
elif block_header.minor_version == 1:
_check_serialization(block_header, _APInfo_5_1.serialized_length)
self._ap_info = _APInfo_5_1(fi, estr)
elif block_header.minor_version == 2:
_check_serialization(block_header, _APInfo_5_2.serialized_length)
self._ap_info = _APInfo_5_2(fi, estr)
else:
raise ValueError(
'Could not parse required `{}` block version `{}`'.format(
block_header.name, block_header.version))
def _parse_ifinfo(self, fi, estr, block_header) -> None:
if block_header.name != 'IFINFO':
return
if block_header.major_version == 1:
_check_serialization(block_header, _IFInfo_1_0.serialized_length)
self._if_info = _IFInfo_1_0(fi, estr)
elif block_header.major_version == 2:
_check_serialization(block_header, _IFInfo_2_0.serialized_length)
self._if_info = _IFInfo_2_0(fi, estr)
elif block_header.major_version == 3:
_check_serialization(block_header, _IFInfo_3_0.serialized_length)
self._if_info = _IFInfo_3_0(fi, estr)
else:
raise ValueError(
'Could not parse required `{}` block version `{}`'.format(
block_header.name, block_header.version))
def _parse_geoinfo(self, fi, estr, block_header) -> None:
if block_header.name != 'GEOINFO':
return
_check_serialization(block_header, _GeoInfo_1.serialized_length)
self._geo_info = _GeoInfo_1(fi, estr)
def _check_valid(self, gsat_header) -> None:
# ensure that the required elements are all set
valid = True
if self._ap_info is None:
valid = False
logger.error(
'GFF version {} file did not present APINFO block'.format(
gsat_header.version))
if self._if_info is None:
valid = False
logger.error(
'GFF version {} file did not present IFINFO block'.format(
gsat_header.version))
if self._geo_info is None:
valid = False
logger.error(
'GFF version {} file did not present GEOINFO block'.format(
gsat_header.version))
if not valid:
raise ValueError('GFF file determined to be invalid')
def get_arp_vel(self) -> numpy.ndarray:
"""
Gets the aperture velocity in ECF coordinates
Returns
-------
numpy.ndarray
"""
# get the aperture velocity in its native frame of reference (rotated ENU)
arp_vel_orig = numpy.array(self.ap_info.apcVel, dtype='float64')
# TODO: arp_vel_orig is in what coordinate system? Rick said "rotated ENU", wrt gta?
# gets the angle wrt to True North for the radar frame of reference
angle = numpy.deg2rad(self.ap_info.rfoa)
cosine, sine = numpy.cos(angle), numpy.sin(angle)
# construct the NED velocity vector
transform = numpy.array([[cosine, -sine, 0], [sine, cosine, 0], [0, 0, -1]], dtype='float64')
ned_velocity = transform.dot(arp_vel_orig)
# convert to ECF
orp = geodetic_to_ecf(self.ap_info.apcLLH, ordering='latlon')
out = ned_to_ecf(ned_velocity, orp, absolute_coords=False)
return out
####################
# object for creation of sicd structure from GFF header object
def _get_wgt(str_in: str) -> Optional[WgtTypeType]:
if str_in == '':
return None
elements = str_in.split()
win_name = elements[0].upper()
parameters = None
if win_name == 'TAYLOR':
if len(elements) < 2:
raise ValueError('Got unparseable window definition `{}`'.format(str_in))
params = elements[1].split(',')
if len(params) != 2:
raise ValueError('Got unparseable window definition `{}`'.format(str_in))
parameters = {'SLL': params[0].strip(), 'NBAR': params[1].strip()}
return WgtTypeType(
WindowName=win_name,
Parameters=parameters)
def _get_polarization_string(int_value: int) -> Optional[str]:
if int_value == 0:
return 'H'
elif int_value == 1:
return 'V'
elif int_value == 2:
return 'LHC'
elif int_value == 3:
return 'RHC'
elif int_value in [4, 5]:
# TODO: according to their enum, we have 4 -> "T" and 5 -> "P"
# what does that mean?
return 'OTHER'
else:
return 'UNKNOWN'
def _get_tx_rcv_polarization(tx_pol_int: int, rcv_pol_int: int) -> Tuple[str, str]:
tx_pol = _get_polarization_string(tx_pol_int)
rcv_pol = _get_polarization_string(rcv_pol_int)
if tx_pol in ['OTHER', 'UNKNOWN'] or rcv_pol in ['OTHER', 'UNKNOWN']:
tx_rcv_pol = 'OTHER'
else:
tx_rcv_pol = '{}:{}'.format(tx_pol, rcv_pol)
return tx_pol, tx_rcv_pol
class _GFFInterpreter(object):
"""
Extractor for the sicd details
"""
def get_sicd(self) -> SICDType:
"""
Gets the SICD structure.
Returns
-------
SICDType
"""
raise NotImplementedError
def get_data_segment(self) -> DataSegment:
"""
Gets the chipper for reading the data.
Returns
-------
DataSegment
"""
raise NotImplementedError
def clean_up(self) -> None:
return
class _GFFInterpreter1(_GFFInterpreter):
"""
Extractor of SICD structure and parameters from gff_header_1*
object
"""
def __init__(self, header: Union[_GFFHeader_1_6, _GFFHeader_1_8]):
"""
Parameters
----------
header : _GFFHeader_1_6|_GFFHeader_1_8
"""
self.header = header
if self.header.image_type == 0:
raise ValueError(
'ImageType indicates a magnitude only image, which is incompatible with SICD')
def get_sicd(self) -> SICDType:
def get_collection_info() -> CollectionInfoType:
core_name = self.header.image_name.replace(':', '_')
return CollectionInfoType(
CoreName=core_name,
CollectType='MONOSTATIC',
RadarMode=RadarModeType(
ModeType='SPOTLIGHT'),
Classification='UNCLASSIFIED')
def get_image_creation() -> ImageCreationType:
from sarpy.__about__ import __version__
from datetime import datetime
return ImageCreationType(
Application=self.header.creator,
DateTime=numpy.datetime64(datetime(*self.header.date_time)),
Site='Unknown',
Profile='sarpy {}'.format(__version__))
def get_image_data() -> ImageDataType:
return ImageDataType(
PixelType='RE32F_IM32F',
NumRows=num_rows,
NumCols=num_cols,
FullImage=(num_rows, num_cols),
FirstRow=0,
FirstCol=0,
SCPPixel=(scp_row, scp_col))
def get_geo_data() -> GeoDataType:
return GeoDataType(
SCP=SCPType(
LLH=[self.header.srp_lat, self.header.srp_lon, self.header.srp_alt]))
def get_grid() -> GridType:
image_plane = 'GROUND' if self.header.image_plane == 0 else 'SLANT'
# we presume that image_plane in [0, 1]
row_ss = self.header.range_pixel_size
col_ss = self.header.azimuth_pixel_size
row_bw = 1./row_ss
col_bw = 1./col_ss
if self.header.version == '1.8':
if self.header.range_win_fac_bw > 0:
row_bw = self.header.range_win_fac_bw/row_ss
if self.header.az_win_fac_bw > 0:
col_bw = self.header.az_win_fac_bw/col_ss
row = DirParamType(
Sgn=-1,
SS=row_ss,
ImpRespWid=self.header.range_resolution,
ImpRespBW=row_bw,
DeltaK1=0.5*row_bw,
DeltaK2=-0.5*row_bw,
WgtType=_get_wgt(
self.header.range_win_id if self.header.version == '1.8' else ''),
DeltaKCOAPoly=[[0, ], ]
)
col = DirParamType(
Sgn=-1,
SS=col_ss,
ImpRespWid=self.header.az_resolution,
ImpRespBW=col_bw,
DeltaK1=0.5*col_bw,
DeltaK2=-0.5*col_bw,
WgtType=_get_wgt(
self.header.az_win_id if self.header.version == '1.8' else ''),
DeltaKCOAPoly=[[0, ], ]
)
return GridType(
ImagePlane=image_plane,
Type='PLANE',
Row=row,
Col=col)
def get_scpcoa() -> SCPCOAType:
side_of_track = 'L' if self.header.squint < 0 else 'R'
apc_llh = numpy.array(
[self.header.apc_lat, self.header.apc_lon, self.header.apc_alt],
dtype='float64')
if numpy.all(apc_llh == 0):
arp_pos = None
else:
arp_pos = geodetic_to_ecf(apc_llh, ordering='latlon')
return SCPCOAType(
ARPPos=arp_pos,
GrazeAng=self.header.graze_angle,
SideOfTrack=side_of_track)
num_rows = self.header.range_count
num_cols = self.header.azimuth_count
scp_row = int(0.5*num_rows)
scp_col = int(0.5*num_cols)
collection_info = get_collection_info()
image_creation = get_image_creation()
image_data = get_image_data()
geo_data = get_geo_data()
grid = get_grid()
scpcoa = get_scpcoa()
return SICDType(
CollectionInfo=collection_info,
ImageCreation=image_creation,
ImageData=image_data,
GeoData=geo_data,
Grid=grid,
SCPCOA=scpcoa)
def get_data_segment(self) -> DataSegment:
if self.header.bits_per_phase not in [8, 16, 32]:
raise ValueError('Got unexpected bits per phase {}'.format(self.header.bits_per_phase))
if self.header.bits_per_magnitude not in [8, 16, 32]:
raise ValueError('Got unexpected bits per phase {}'.format(self.header.bits_per_magnitude))
raw_dtype = numpy.dtype('{}u{}'.format(self.header.estr, int(self.header.bits_per_phase/8)))
raw_shape = (self.header.range_count, self.header.azimuth_count, 2)
reverse_axes = (0, 1)
if self.header.row_major:
transpose_axes = (1, 0, 2)
formatted_shape = (raw_shape[1], raw_shape[0])
else:
transpose_axes = None
formatted_shape = raw_shape[:2]
if self.header.image_type == 1:
# phase/magnitude which is integer
return NumpyMemmapSegment(
self.header.file_object, self.header.header_length,
raw_dtype, raw_shape, formatted_dtype='complex64', formatted_shape=formatted_shape,
reverse_axes=reverse_axes, transpose_axes=transpose_axes,
format_function=ComplexFormatFunction(raw_dtype, order='PM', band_dimension=2),
close_file=True)
else:
raise ValueError('Got unsupported image type `{}`'.format(self.header.image_type))
def _get_numpy_dtype(data_type_int: int) -> str:
if data_type_int == 0:
return 'u1'
elif data_type_int == 1:
return 'u2'
elif data_type_int == 2:
return 'u4'
elif data_type_int == 3:
return 'u8'
elif data_type_int == 4:
return 'i1'
elif data_type_int == 5:
return 'i2'
elif data_type_int == 6:
return 'i4'
elif data_type_int == 7:
return 'i8'
elif data_type_int == 8:
return 'f4'
elif data_type_int == 9:
return 'f8'
else:
raise ValueError('Got unsupported data type code `{}`'.format(data_type_int))
class _GFFInterpreter2(_GFFInterpreter):
"""
Extractor of SICD structure and parameters from GFFHeader_2 object
"""
def __init__(self, header: _GFFHeader_2):
"""
Parameters
----------
header : _GFFHeader_2
"""
self.header = header
self._cached_files = []
if self.header.gsat_img.pixelFormat.numComponents != 2:
raise ValueError(
'The pixel format indicates that the number of components is `{}`, '
'which is not supported for a complex image'.format(
self.header.gsat_img.pixelFormat.numComponents))
def get_sicd(self) -> SICDType:
def get_collection_info() -> CollectionInfoType:
core_name = self.header.ap_info.phName # TODO: double check this...
return CollectionInfoType(
CollectorName=self.header.ap_info.missionText,
CoreName=core_name,
CollectType='MONOSTATIC',
RadarMode=RadarModeType(
ModeType='SPOTLIGHT'),
Classification='UNCLASSIFIED')
def get_image_creation() -> ImageCreationType:
from sarpy.__about__ import __version__
application = '{} {}'.format(self.header.gsat_img.imageCreator, self.header.ap_info.swVerNum)
date_time = None # todo: really?
return ImageCreationType(
Application=application,
DateTime=date_time,
Site='Unknown',
Profile='sarpy {}'.format(__version__))
def get_image_data() -> ImageDataType:
pix_data_type = self.header.gsat_img.pixDataType
amp_table = None
if pix_data_type == 12:
pixel_type = 'AMP8I_PHS8I'
amp_table = numpy.arange(256, dtype='float64')
elif pix_data_type in [1, 3, 4, 6, 8, 9, 10, 11]:
pixel_type = 'RE32F_IM32F'
elif pix_data_type in [2, 7]:
pixel_type = 'RE16I_IM16I'
else:
raise ValueError('Unhandled pixTypeData value `{}`'.format(pix_data_type))
return ImageDataType(
PixelType=pixel_type,
AmpTable=amp_table,
NumRows=num_rows,
NumCols=num_cols,
FullImage=(num_rows, num_cols),
FirstRow=0,
FirstCol=0,
SCPPixel=(scp_row, scp_col))
def get_geo_data() -> GeoDataType:
return GeoDataType(SCP=SCPType(ECF=scp))
def get_grid() -> GridType:
image_plane = 'GROUND' if self.header.geo_info.imagePlane == 0 else 'SLANT'
# we presume that image_plane in [0, 1]
# derive row/col uvect
ground_uvec = wgs_84_norm(scp)
urng = scp - arp_pos # unit vector for row in the slant plane
urng /= numpy.linalg.norm(urng)
if image_plane == 'GROUND':
row_uvec = urng - numpy.dot(urng, ground_uvec)*ground_uvec
row_uvec /= numpy.linalg.norm(row_uvec)
else:
row_uvec = urng
col_uvec = arp_vel/numpy.linalg.norm(arp_vel)
if self.header.ap_info.squintAngle < 0:
col_uvec *= -1
# verify that my orientation makes some sense
dumb_check = ground_uvec.dot(numpy.cross(row_uvec, col_uvec))
if dumb_check <= 0:
raise ValueError(
'The range vector, velocity vector, and squint angle have '
'incompatible orientations')
parallel_component = numpy.dot(row_uvec, col_uvec)
if numpy.abs(parallel_component) > 1e-7:
col_uvec = col_uvec - parallel_component*row_uvec
col_uvec /= numpy.linalg.norm(col_uvec)
row_ss = self.header.geo_info.rangePixSpacing
row_bw = self.header.if_info.wndBwFactRng/self.header.if_info.rngResolution
row_delta_kcoa_constant = 0.5*(1 - (self.header.if_info.sampLocDCRow/int(0.5*num_rows)))/row_ss
row = DirParamType(
Sgn=-1,
SS=row_ss,
UVectECF=row_uvec,
ImpRespWid=self.header.if_info.rngResolution,
ImpRespBW=row_bw,
KCtr=2*center_frequency/speed_of_light,
DeltaK1=0.5*row_bw,
DeltaK2=-0.5*row_bw,
WgtType=_get_wgt(self.header.if_info.wndFncIdRng),
DeltaKCOAPoly=[[row_delta_kcoa_constant, ], ])
col_ss = self.header.geo_info.azPixSpacing
col_bw = self.header.if_info.wndBwFactAz/self.header.if_info.azResolution
col_delta_kcoa_constant = 0.5*(1 - (self.header.if_info.sampLocDCCol/int(0.5*num_cols)))/col_ss
col = DirParamType(
Sgn=-1,
SS=col_ss,
UVectECF=col_uvec,
ImpRespWid=self.header.if_info.azResolution,
ImpRespBW=col_bw,
KCtr=0,
DeltaK1=0.5*col_bw,
DeltaK2=-0.5*col_bw,
WgtType=_get_wgt(self.header.if_info.wndFncIdAz),
DeltaKCOAPoly=[[col_delta_kcoa_constant, ], ])
return GridType(
ImagePlane=image_plane,
Type=grid_type,
Row=row,
Col=col)
def get_scpcoa() -> SCPCOAType:
return SCPCOAType(
ARPPos=arp_pos,
ARPVel=arp_vel,
SCPTime=0.5*collect_duration)
def get_timeline() -> TimelineType:
try:
# only exists for APINFO version 3 and above
ipp_end = self.header.ap_info.numPhaseHistories
ipp = [IPPSetType(
TStart=0,
TEnd=collect_duration,
IPPStart=0,
IPPEnd=ipp_end,
IPPPoly=[0, (ipp_end + 1)/collect_duration]), ]
except AttributeError:
ipp = None
return TimelineType(
CollectStart=start_time,
CollectDuration=collect_duration,
IPP=ipp)
def get_radar_collection() -> RadarCollectionType:
try:
sample_rate = self.header.ap_info.adSampleFreq
pulse_length = float(self.header.ap_info.fastTimeSamples)/sample_rate
waveform = [
WaveformParametersType(ADCSampleRate=sample_rate, TxPulseLength=pulse_length), ]
except AttributeError:
waveform = None
rcv_channels = [ChanParametersType(TxRcvPolarization=tx_rcv_pol, index=1), ]
return RadarCollectionType(
TxFrequency=(center_frequency-0.5*band_width, center_frequency+0.5*band_width),
Waveform=waveform,
TxPolarization=tx_pol,
RcvChannels=rcv_channels)
def get_image_formation() -> ImageFormationType:
return ImageFormationType(
RcvChanProc=RcvChanProcType(ChanIndices=[1, ]),
TxRcvPolarizationProc=tx_rcv_pol,
TxFrequencyProc=(
center_frequency-0.5*band_width,
center_frequency+0.5*band_width),
TStartProc=0,
TEndProc=collect_duration,
ImageFormAlgo=image_form_algo,
STBeamComp='NO',
ImageBeamComp='NO',
AzAutofocus='NO',
RgAutofocus='NO')
def repair_scpcoa() -> None:
# call after deriving the sicd fields
if out_sicd.SCPCOA.GrazeAng is None:
out_sicd.SCPCOA.GrazeAng = self.header.ap_info.grazingAngle
if out_sicd.SCPCOA.IncidenceAng is None:
out_sicd.SCPCOA.IncidenceAng = 90 - out_sicd.SCPCOA.GrazeAng
if out_sicd.SCPCOA.SideOfTrack is None:
out_sicd.SCPCOA.SideOfTrack = 'L' if self.header.ap_info.squintAngle < 0 else 'R'
def populate_radiometric() -> None:
# call after deriving the sicd fields
rcs_constant = self.header.if_info.imgCalParam**2
radiometric = RadiometricType(RCSSFPoly=[[rcs_constant, ]])
# noinspection PyProtectedMember
radiometric._derive_parameters(out_sicd.Grid, out_sicd.SCPCOA)
if radiometric.SigmaZeroSFPoly is not None:
noise_constant = self.header.if_info.sigmaN - 10*numpy.log10(radiometric.SigmaZeroSFPoly[0, 0])
radiometric.NoiseLevel = NoiseLevelType_(
NoiseLevelType='ABSOLUTE',
NoisePoly=[[noise_constant, ]])
out_sicd.Radiometric = radiometric
num_rows = self.header.gsat_img.rangePixels
num_cols = self.header.gsat_img.azPixels
scp_row = self.header.geo_info.pixLocImCtrRow
scp_col = self.header.geo_info.pixLocImCtrCol
collect_duration = self.header.ap_info.apertureTime
scp_time_utc_us = numpy.datetime64(datetime(*self.header.ap_info.ApTimeUTC), 'us').astype('int64')
start_time = (scp_time_utc_us - int(0.5*collect_duration*1e6)).astype('datetime64[us]')
tx_pol, tx_rcv_pol = _get_tx_rcv_polarization(
self.header.ap_info.txPolarization, self.header.ap_info.rxPolarization)
center_frequency = self.header.ap_info.ctrFreq
band_width = 0.0 # TODO: is this defined anywhere?
scp = geodetic_to_ecf(self.header.geo_info.patchCtrLLH)
arp_llh = self.header.ap_info.apcLLH
arp_pos = geodetic_to_ecf(arp_llh, ordering='latlon')
arp_vel = self.header.get_arp_vel()
if self.header.if_info.ifAlgo in ['PFA', 'OSAPF']:
# if self.header.if_info.ifAlgo == 'PFA':
image_form_algo = 'PFA'
grid_type = 'RGAZIM'
else:
image_form_algo = 'OTHER'
grid_type = 'PLANE'
collection_info = get_collection_info()
image_creation = get_image_creation()
image_data = get_image_data()
geo_data = get_geo_data()
scp = geo_data.SCP.ECF.get_array()
grid = get_grid()
scpcoa = get_scpcoa()
timeline = get_timeline()
radar_collection = get_radar_collection()
image_formation = get_image_formation()
out_sicd = SICDType(
CollectionInfo=collection_info,
ImageCreation=image_creation,
ImageData=image_data,
GeoData=geo_data,
Grid=grid,
SCPCOA=scpcoa,
Timeline=timeline,
RadarCollection=radar_collection,
ImageFormation=image_formation)
out_sicd.derive()
repair_scpcoa()
populate_radiometric()
out_sicd.populate_rniirs(override=False)
return out_sicd
def _get_size_and_symmetry(self) -> Tuple[Tuple[int, int], Tuple[int, ...], bool]:
if self.header.gsat_img.pixOrder == 0:
# in range consecutive order, opposite from a SICD
data_size = (self.header.gsat_img.azPixels, self.header.gsat_img.rangePixels)
reverse_axes = (0, 1)
transpose_axes = True
elif self.header.gsat_img.pixOrder == 1:
# in azimuth consecutive order, like a SICD
data_size = (self.header.gsat_img.rangePixels, self.header.gsat_img.azPixels)
reverse_axes = (0, 1)
transpose_axes = False
else:
raise ValueError('Got unexpected pixel order `{}`'.format(self.header.gsat_img.pixOrder))
return data_size, reverse_axes, transpose_axes
def _check_image_validity(self, band_order: str) -> None:
if self.header.gsat_img.pixelFormat.numComponents != 2:
raise ValueError(
'Got unexpected number of components `{}`'.format(
self.header.gsat_img.pixelFormat.numComponents))
image_compression_scheme = self.header.gsat_img.imageCompressionScheme
if image_compression_scheme in [1, 3]:
if band_order == 'sequential':
raise ValueError(
'GFF with sequential bands and jpeg or jpeg 2000 compression currently unsupported.')
if PIL is None:
raise ValueError(
'The GFF image is compressed using jpeg or jpeg 2000 compression, '
'and decompression requires the PIL library')
def _extract_zlib_image(self) -> str:
if self.header.gsat_img.imageCompressionScheme != 2:
raise ValueError('The image is not zlib compressed')
self.header.file_object.seek(self.header.image_offset, os.SEEK_SET)
data_bytes = zlib.decompress(self.header.file_object.read(self.header.image_header.size))
fi, path_name = mkstemp(suffix='.sarpy_cache', text=False)
os.close(fi)
self._cached_files.append(path_name)
logger.info('Created cached file {} for decompressed data'.format(path_name))
with open(path_name, 'wb') as the_file:
the_file.write(data_bytes)
logger.info('Filled cached file {}'.format(path_name))
return path_name
def _extract_pil_image(
self,
band_order: str,
data_size: Tuple[int, int]) -> str:
if band_order == 'sequential':
raise ValueError(
'GFF with sequential bands and jpeg or jpeg 2000 compression currently unsupported.')
our_memmap = MemMap(self.header.file_object.name, self.header.image_header.size, self.header.image_offset)
# noinspection PyUnresolvedReferences
img = PIL.Image.open(our_memmap) # this is a lazy operation
# dump the extracted image data out to a temp file
fi, path_name = mkstemp(suffix='.sarpy_cache', text=False)
os.close(fi)
self._cached_files.append(path_name)
logger.info('Created cached file {} for decompressed data'.format(path_name))
data = numpy.asarray(img) # create our numpy array from the PIL Image
if data.shape[:2] != data_size:
raise ValueError(
'Naively decompressed data of shape {}, but expected ({}, {}, {}).'.format(
data.shape, data_size[0], data_size[1], 2))
mem_map = numpy.memmap(path_name, dtype=data.dtype, mode='w+', offset=0, shape=data.shape)
mem_map[:] = data
# clean up this memmap and file overhead
del mem_map
logger.info('Filled cached file {}'.format(path_name))
return path_name
def _get_interleaved_segment(self) -> DataSegment:
complex_domain = _get_complex_domain_code(self.header.gsat_img.pixelFormat.cmplxDomain)
if complex_domain not in ['IQ', 'QI', 'MP', 'PM']:
raise ValueError('Got unsupported complex domain `{}`'.format(complex_domain))
if self.header.gsat_img.pixelFormat.comp0_dataType != self.header.gsat_img.pixelFormat.comp1_dataType:
raise ValueError(
'GFF with interleaved bands with the two components of different data types.\n\t'
'This is not currently unsupported.')
raw_dtype = _get_numpy_dtype(self.header.gsat_img.pixelFormat.comp0_dataType)
data_size, reverse_axes, transpose = self._get_size_and_symmetry()
raw_shape = data_size + (2, )
if transpose:
transpose_axes = (1, 0, 2)
formatted_shape = (data_size[1], data_size[0])
else:
transpose_axes = None
formatted_shape = data_size
image_compression_scheme = self.header.gsat_img.imageCompressionScheme
if image_compression_scheme == 0:
# no compression
the_file = self.header.file_object
data_offset = self.header.image_offset
elif image_compression_scheme in [1, 3]:
# jpeg or jpeg 2000 compression
the_file = self._extract_pil_image('interleaved', data_size)
data_offset = 0
elif image_compression_scheme == 2:
# zlib compression
the_file = self._extract_zlib_image()
data_offset = 0
else:
raise ValueError('Got unhandled image compression scheme code `{}`'.format(image_compression_scheme))
return NumpyMemmapSegment(
the_file, data_offset, raw_dtype, raw_shape,
formatted_dtype='complex64', formatted_shape=formatted_shape,
reverse_axes=reverse_axes, transpose_axes=transpose_axes,
format_function=ComplexFormatFunction(raw_dtype, complex_domain, band_dimension=2))
def _get_sequential_segment(self) -> DataSegment:
complex_domain = _get_complex_domain_code(self.header.gsat_img.pixelFormat.cmplxDomain)
if complex_domain not in ['IQ', 'QI', 'MP', 'PM']:
raise ValueError('Got unsupported complex domain `{}`'.format(complex_domain))
if self.header.gsat_img.pixelFormat.comp0_dataType != self.header.gsat_img.pixelFormat.comp1_dataType:
raise ValueError(
'GFF with sequential bands has the two components with different data types.\n\t'
'This is not currently unsupported.')
raw_dtype = numpy.dtype(_get_numpy_dtype(self.header.gsat_img.pixelFormat.comp0_dataType))
data_size, reverse_axes, transpose = self._get_size_and_symmetry()
raw_shape = (2, ) + data_size
if reverse_axes is not None:
reverse_axes = tuple(1+entry for entry in reverse_axes)
if transpose:
transpose_axes = (2, 1, 0)
formatted_shape = (data_size[1], data_size[0])
else:
transpose_axes = (1, 2, 0)
formatted_shape = data_size
image_compression_scheme = self.header.gsat_img.imageCompressionScheme
if image_compression_scheme == 0:
# no compression
the_file = self.header.file_object
main_offset = self.header.image_offset
elif image_compression_scheme == 2:
the_file = self._extract_zlib_image()
main_offset = 0
else:
raise ValueError('Unhandled image compression scheme `{}`'.format(image_compression_scheme))
return NumpyMemmapSegment(
the_file, main_offset, raw_dtype, raw_shape,
formatted_dtype='complex64', formatted_shape=formatted_shape,
reverse_axes=reverse_axes, transpose_axes=transpose_axes,
format_function=ComplexFormatFunction(raw_dtype, complex_domain, band_dimension=0))
def get_data_segment(self) -> DataSegment:
band_order = _get_band_order(self.header.gsat_img.pixelFormat.cmplxDomain)
self._check_image_validity(band_order)
if band_order == 'interleaved':
return self._get_interleaved_segment()
elif band_order == 'sequential':
return self._get_sequential_segment()
else:
raise ValueError('Unhandled band order `{}`'.format(band_order))
def clean_up(self) -> None:
try:
if self._cached_files is not None:
for fil in self._cached_files:
if os.path.exists(fil):
# noinspection PyBroadException
try:
os.remove(fil)
logger.info('Deleted cached file {}'.format(fil))
except Exception:
logger.error(
'Error in attempt to delete cached file {}.\n\t'
'Manually delete this file'.format(fil), exc_info=True)
self._cached_files = None
except AttributeError:
return
def __del__(self):
"""
Clean up any cached files.
Returns
-------
None
"""
self.clean_up()
####################
# the actual reader implementation
class GFFDetails(object):
__slots__ = (
'_file_name', '_file_object', '_close_after',
'_endianness', '_major_version', '_minor_version',
'_header', '_interpreter')
def __init__(self, file_name: str):
"""
Parameters
----------
file_name : str
"""
self._endianness = None
self._major_version = None
self._minor_version = None
self._header = None
self._close_after = True
self._interpreter = None
if not os.path.isfile(file_name):
raise SarpyIOError('Path {} is not a file'.format(file_name))
self._file_name = file_name
self._file_object = open(self._file_name, 'rb')
check = self._file_object.read(7)
if check != b'GSATIMG':
self._file_object.close()
self._close_after = False
raise SarpyIOError('file {} is not a GFF file'.format(self._file_name))
# initialize things
self._initialize()
@property
def file_name(self) -> str:
"""
str: the file name
"""
return self._file_name
@property
def endianness(self) -> str:
"""
str: The endian format of the GFF storage. Returns '<' if little-endian
or '>' if big endian.
"""
return self._endianness
@property
def major_version(self) -> int:
"""
int: The major GFF version number
"""
return self._major_version
@property
def minor_version(self) -> int:
"""
int: The minor GFF version number
"""
return self._minor_version
@property
def version(self) -> str:
"""
str: The GFF version number
"""
return '{}.{}'.format(self._major_version, self._minor_version)
@property
def header(self) -> Union[_GFFHeader_1_6, _GFFHeader_1_8, _GFFHeader_2]:
"""
The GFF header object.
Returns
-------
_GFFHeader_1_6|_GFFHeader_1_8|_GFFHeader_2
"""
return self._header
@property
def interpreter(self) -> _GFFInterpreter:
"""
The GFF interpreter object.
Returns
-------
_GFFInterpreter
"""
return self._interpreter
def _initialize(self) -> None:
"""
Initialize the various elements
"""
self._file_object.seek(7, os.SEEK_SET)
check = self._file_object.read(1)
if check == b'\x20':
# this should be version 1.*, but we will verify
self._file_object.seek(54, os.SEEK_SET)
endianness = struct.unpack('H', self._file_object.read(2))[0] # 0 if little endian
estr = '<' if endianness == 0 else '>'
self._file_object.seek(8, os.SEEK_SET)
self._minor_version, self._major_version = struct.unpack(
'{}HH'.format(estr), self._file_object.read(4))
elif check == b'\x00':
# this should be a version 2.*, but we will verify
estr = '<'
self._file_object.seek(16, os.SEEK_SET)
self._major_version, self._minor_version = struct.unpack(
'{}HH'.format(estr), self._file_object.read(4))
else:
raise ValueError('Got unexpected check byte')
self._file_object.seek(0, os.SEEK_SET)
self._endianness = estr
version = self.version
if version == '1.6':
self._header = _GFFHeader_1_6(self._file_object, self.endianness)
self._interpreter = _GFFInterpreter1(self._header)
elif version == '1.8':
self._header = _GFFHeader_1_8(self._file_object, self.endianness)
self._interpreter = _GFFInterpreter1(self._header)
elif self.major_version == 2:
self._header = _GFFHeader_2(self._file_object, self.endianness)
self._interpreter = _GFFInterpreter2(self._header)
else:
raise ValueError('Got unhandled GFF version `{}`'.format(version))
def get_sicd(self) -> SICDType:
"""
Gets the sicd structure.
Returns
-------
SICDType
"""
return self._interpreter.get_sicd()
def get_data_segment(self) -> DataSegment:
"""
Gets the data segment.
Returns
-------
DataSegment
"""
return self._interpreter.get_data_segment()
def close(self):
try:
if self._close_after:
self._file_object.close()
if self._interpreter is not None:
self._interpreter.clean_up()
self._interpreter = None
except AttributeError:
pass
def __del__(self):
self.close()
class GFFReader(SICDTypeReader):
"""
A GFF (Sandia format) reader implementation.
**Changed in version 1.3.0** for reading changes.
"""
__slots__ = ('_gff_details', )
def __init__(self, gff_details: Union[str, GFFDetails]):
"""
Parameters
----------
gff_details : str|GFFDetails
file name or GFFDetails object
"""
if isinstance(gff_details, str):
gff_details = GFFDetails(gff_details)
if not isinstance(gff_details, GFFDetails):
raise TypeError('The input argument for a GFFReader must be a '
'filename or GFFDetails object')
self._gff_details = gff_details
sicd = gff_details.get_sicd()
data_segment = gff_details.get_data_segment()
SICDTypeReader.__init__(self, data_segment, sicd, close_segments=True)
self._check_sizes()
@property
def gff_details(self) -> GFFDetails:
"""
GFFDetails: The details object.
"""
return self._gff_details
@property
def file_name(self):
return self.gff_details.file_name
def close(self) -> None:
SICDTypeReader.close(self)
if self._gff_details is not None:
self._gff_details.close()
self._gff_details = None
def __del__(self):
self.close()
########
# base expected functionality for a module with an implemented Reader
def is_a(file_name: str) -> Optional[GFFReader]:
"""
Tests whether a given file_name corresponds to a Cosmo Skymed file. Returns a reader instance, if so.
Parameters
----------
file_name : str|BinaryIO
the file_name to check
Returns
-------
CSKReader|None
`CSKReader` instance if Cosmo Skymed file, `None` otherwise
"""
if is_file_like(file_name):
return None
try:
gff_details = GFFDetails(file_name)
logger.info('File {} is determined to be a GFF version {} file.'.format(
file_name, gff_details.version))
return GFFReader(gff_details)
except SarpyIOError:
return None
| 74,129 | 35.862258 | 122 | py |
sarpy | sarpy-master/sarpy/io/complex/capella.py | """
Functionality for reading Capella SAR data into a SICD model.
**This functionality is really onl partially complete**
"""
__classification__ = "UNCLASSIFIED"
__author__ = ("Thomas McCullough", "Wade Schwartzkopf")
import logging
import json
from typing import Dict, Any, Tuple, Union, Optional
from collections import OrderedDict
from scipy.constants import speed_of_light
import numpy
from numpy.polynomial import polynomial
from sarpy.io.general.base import SarpyIOError
from sarpy.io.general.tiff import TiffDetails, NativeTiffDataSegment
from sarpy.io.general.utils import parse_timestring, get_seconds, is_file_like
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.utils import fit_position_xvalidation
from sarpy.io.complex.sicd_elements.blocks import XYZPolyType, Poly2DType
from sarpy.io.complex.sicd_elements.SICD import SICDType
from sarpy.io.complex.sicd_elements.CollectionInfo import CollectionInfoType, RadarModeType
from sarpy.io.complex.sicd_elements.ImageCreation import ImageCreationType
from sarpy.io.complex.sicd_elements.ImageData import ImageDataType
from sarpy.io.complex.sicd_elements.GeoData import GeoDataType, SCPType
from sarpy.io.complex.sicd_elements.Position import PositionType
from sarpy.io.complex.sicd_elements.Grid import GridType, DirParamType, WgtTypeType
from sarpy.io.complex.sicd_elements.RadarCollection import RadarCollectionType, \
WaveformParametersType, ChanParametersType
from sarpy.io.complex.sicd_elements.Timeline import TimelineType, IPPSetType
from sarpy.io.complex.sicd_elements.ImageFormation import ImageFormationType, \
RcvChanProcType, ProcessingType
from sarpy.io.complex.sicd_elements.RMA import RMAType, INCAType
from sarpy.io.complex.sicd_elements.Radiometric import RadiometricType, NoiseLevelType_
logger = logging.getLogger(__name__)
#########
# helper functions
def avci_nacaroglu_window(M, alpha=1.25):
"""
Avci-Nacaroglu Exponential window. See Doerry '17 paper window 4.40 p 154
Parameters
----------
M : int
alpha : float
"""
M2 = 0.5*M
t = (numpy.arange(M) - M2)/M
return numpy.exp(numpy.pi*alpha*(numpy.sqrt(1 - (2*t)**2) - 1))
###########
# parser and interpreter for tiff attributes
class CapellaDetails(object):
"""
Parses and converts the Cosmo Skymed metadata
"""
__slots__ = ('_tiff_details', '_img_desc_tags')
def __init__(self, file_name: str):
"""
Parameters
----------
file_name : str
"""
# verify that the file is a tiff file
self._tiff_details = TiffDetails(file_name)
# verify that ImageDescription tiff tag exists
if 'ImageDescription' not in self._tiff_details.tags:
raise SarpyIOError('No "ImageDescription" tag in the tiff.')
img_format = self._tiff_details.tags['ImageDescription']
# verify that ImageDescription has a reasonable format
try:
self._img_desc_tags = json.loads(img_format) # type: Dict[str, Any]
except Exception as e:
msg = 'Failed deserializing the ImageDescription tag as json with error {}'.format(e)
logger.info(msg)
raise SarpyIOError(msg)
# verify the file is not compressed
self._tiff_details.check_compression()
# verify the file is not tiled
self._tiff_details.check_tiled()
@property
def file_name(self) -> str:
"""
str: the file name
"""
return self._tiff_details.file_name
@property
def tiff_details(self) -> TiffDetails:
"""
TiffDetails: The tiff details object.
"""
return self._tiff_details
def get_symmetry(self) -> (Union[None, Tuple[int, ...]], Tuple[int, ...]):
"""
Gets the symmetry operations definition.
Returns
-------
reverse_axes : None|Tuple[int, ...]
transpose_axes : Tuple[int, ...]
"""
pointing = self._img_desc_tags['collect']['radar']['pointing'].lower()
if pointing == 'left':
return (0, ), (1, 0, 2)
elif pointing == 'right':
return None, (1, 0, 2)
else:
raise ValueError('Got unhandled pointing value {}'.format(pointing))
def get_sicd(self) -> SICDType:
"""
Get the SICD metadata for the image.
Returns
-------
SICDType
"""
def convert_string_dict(dict_in: dict) -> dict:
dict_out = OrderedDict()
for key, val in dict_in.items():
if isinstance(val, str):
dict_out[key] = val
elif isinstance(val, int):
dict_out[key] = str(val)
elif isinstance(val, float):
dict_out[key] = '{0:0.17G}'.format(val)
else:
raise TypeError('Got unhandled type {}'.format(type(val)))
return dict_out
def extract_state_vector() -> (numpy.ndarray, numpy.ndarray, numpy.ndarray):
vecs = collect['state']['state_vectors']
times = numpy.zeros((len(vecs), ), dtype=numpy.float64)
positions = numpy.zeros((len(vecs), 3), dtype=numpy.float64)
velocities = numpy.zeros((len(vecs), 3), dtype=numpy.float64)
for i, entry in enumerate(vecs):
times[i] = get_seconds(parse_timestring(entry['time'], precision='ns'), start_time, precision='ns')
positions[i, :] = entry['position']
velocities[i, :] = entry['velocity']
return times, positions, velocities
def get_radar_parameter(name) -> Any:
if name in radar:
return radar[name]
if len(radar_time_varying) > 0:
element = radar_time_varying[0]
if name in element:
return element[name]
raise ValueError('Unable to determine radar parameter `{}`'.format(name))
def get_collection_info() -> CollectionInfoType:
coll_name = collect['platform']
mode = collect['mode'].strip().lower()
if mode == 'stripmap':
radar_mode = RadarModeType(ModeType='STRIPMAP', ModeID=mode)
elif mode == 'spotlight':
radar_mode = RadarModeType(ModeType='SPOTLIGHT', ModeID=mode)
elif mode == 'sliding_spotlight':
radar_mode = RadarModeType(ModeType='DYNAMIC STRIPMAP', ModeID=mode)
else:
raise ValueError('Got unhandled radar mode {}'.format(mode))
return CollectionInfoType(
CollectorName=coll_name,
CoreName=collect['collect_id'],
RadarMode=radar_mode,
Classification='UNCLASSIFIED',
CollectType='MONOSTATIC')
def get_image_creation() -> ImageCreationType:
from sarpy.__about__ import __version__
return ImageCreationType(
Application=self._tiff_details.tags['Software'],
DateTime=parse_timestring(self._img_desc_tags['processing_time'], precision='us'),
Profile='sarpy {}'.format(__version__),
Site='Unknown')
def get_image_data() -> ImageDataType:
rows = int(img['columns']) # capella uses flipped row/column definition?
cols = int(img['rows'])
if img['data_type'] == 'CInt16':
pixel_type = 'RE16I_IM16I'
else:
raise ValueError('Got unhandled data_type {}'.format(img['data_type']))
scp_pixel = (int(0.5 * rows), int(0.5 * cols))
if radar['pointing'] == 'left':
scp_pixel = (rows - scp_pixel[0] - 1, cols - scp_pixel[1] - 1)
return ImageDataType(
NumRows=rows,
NumCols=cols,
FirstRow=0,
FirstCol=0,
PixelType=pixel_type,
FullImage=(rows, cols),
SCPPixel=scp_pixel)
def get_geo_data() -> GeoDataType:
return GeoDataType(SCP=SCPType(ECF=img['center_pixel']['target_position']))
def get_position() -> PositionType:
px, py, pz = fit_position_xvalidation(state_time, state_position, state_velocity, max_degree=8)
return PositionType(ARPPoly=XYZPolyType(X=px, Y=py, Z=pz))
def get_grid() -> GridType:
def get_weight(window_dict: dict) -> (WgtTypeType, Union[None, numpy.ndarray]):
window_name = window_dict['name']
if window_name.lower() == 'rectangular':
return WgtTypeType(WindowName='UNIFORM'), None
elif window_name.lower() == 'avci-nacaroglu':
return WgtTypeType(
WindowName=window_name.upper(),
Parameters=convert_string_dict(window_dict['parameters'])), \
avci_nacaroglu_window(64, alpha=window_dict['parameters']['alpha'])
else:
return WgtTypeType(
WindowName=window_name,
Parameters=convert_string_dict(window_dict['parameters'])), None
image_plane = 'SLANT'
grid_type = 'RGZERO'
coa_time = parse_timestring(img['center_pixel']['center_time'], precision='ns')
row_bw = img.get('processed_range_bandwidth', bw)
row_imp_rsp_bw = 2*row_bw/speed_of_light
row_wgt, row_wgt_funct = get_weight(img['range_window'])
row = DirParamType(
SS=img['image_geometry']['delta_range_sample'],
Sgn=-1,
ImpRespBW=row_imp_rsp_bw,
ImpRespWid=img['range_resolution'],
KCtr=2*fc/speed_of_light,
DeltaK1=-0.5*row_imp_rsp_bw,
DeltaK2=0.5*row_imp_rsp_bw,
DeltaKCOAPoly=[[0.0, ], ],
WgtFunct=row_wgt_funct,
WgtType=row_wgt)
# get timecoa value
timecoa_value = get_seconds(coa_time, start_time)
# find an approximation for zero doppler spacing - necessarily rough for backprojected images
col_ss = img['pixel_spacing_row']
dop_bw = img['processed_azimuth_bandwidth']
col_wgt, col_wgt_funct = get_weight(img['azimuth_window'])
col = DirParamType(
SS=col_ss,
Sgn=-1,
ImpRespWid=img['azimuth_resolution'],
ImpRespBW=dop_bw*abs(ss_zd_s)/col_ss,
KCtr=0,
WgtFunct=col_wgt_funct,
WgtType=col_wgt)
# TODO:
# column deltakcoa poly - it's in there at ["image"]["frequency_doppler_centroid_polynomial"]
return GridType(
ImagePlane=image_plane,
Type=grid_type,
TimeCOAPoly=[[timecoa_value, ], ],
Row=row,
Col=col)
def get_radar_collection():
# type: () -> RadarCollectionType
freq_min = fc - 0.5*bw
return RadarCollectionType(
TxPolarization=radar['transmit_polarization'],
TxFrequency=(freq_min, freq_min + bw),
Waveform=[WaveformParametersType(
TxRFBandwidth=bw,
TxPulseLength=get_radar_parameter('pulse_duration'),
RcvDemodType='CHIRP',
ADCSampleRate=radar['sampling_frequency'],
TxFreqStart=freq_min)],
RcvChannels=[ChanParametersType(
TxRcvPolarization='{}:{}'.format(radar['transmit_polarization'],
radar['receive_polarization']))])
def get_timeline() -> TimelineType:
prf = radar['prf'][0]['prf']
return TimelineType(
CollectStart=start_time,
CollectDuration=duration,
IPP=[
IPPSetType(
TStart=0,
TEnd=duration,
IPPStart=0,
IPPEnd=round(duration*prf) - 1,
IPPPoly=(0, prf)), ])
def get_image_formation() -> ImageFormationType:
algo = img['algorithm'].upper()
processings = None
if algo == 'BACKPROJECTION':
processings = [ProcessingType(Type='Backprojected to DEM', Applied=True), ]
else:
logger.warning(
'Got unexpected algorithm, the results for the '
'sicd structure might be unexpected')
if algo not in ('PFA', 'RMA', 'RGAZCOMP'):
logger.warning(
'Image formation algorithm {} not one of the recognized SICD options, '
'being set to "OTHER".'.format(algo))
algo = 'OTHER'
return ImageFormationType(
RcvChanProc=RcvChanProcType(NumChanProc=1, PRFScaleFactor=1),
ImageFormAlgo=algo,
TStartProc=0,
TEndProc=duration,
TxRcvPolarizationProc='{}:{}'.format(radar['transmit_polarization'], radar['receive_polarization']),
TxFrequencyProc=(
radar_collection.TxFrequency.Min,
radar_collection.TxFrequency.Max),
STBeamComp='NO',
ImageBeamComp='NO',
AzAutofocus='NO',
RgAutofocus='NO',
Processings=processings)
def get_rma() -> RMAType:
img_geometry = img['image_geometry']
near_range = img_geometry['range_to_first_sample']
center_time = parse_timestring(img['center_pixel']['center_time'], precision='us')
first_time = parse_timestring(img_geometry['first_line_time'], precision='us')
zd_time_scp = get_seconds(center_time, first_time, 'us')
r_ca_scp = near_range + image_data.SCPPixel.Row*grid.Row.SS
time_ca_poly = numpy.array([zd_time_scp, -look*ss_zd_s/grid.Col.SS], dtype='float64')
timecoa_value = get_seconds(center_time, start_time)
arp_velocity = position.ARPPoly.derivative_eval(timecoa_value, der_order=1)
vm_ca = numpy.linalg.norm(arp_velocity)
inca = INCAType(
R_CA_SCP=r_ca_scp,
FreqZero=fc,
TimeCAPoly=time_ca_poly,
DRateSFPoly=[[1/(vm_ca*ss_zd_s/grid.Col.SS)], ]
)
return RMAType(
RMAlgoType='RG_DOP',
INCA=inca)
def get_radiometric() -> Union[None, RadiometricType]:
if img['radiometry'].lower() != 'beta_nought':
logger.warning(
'Got unrecognized Capella radiometry {},\n\t'
'skipping the radiometric metadata'.format(img['radiometry']))
return None
return RadiometricType(BetaZeroSFPoly=[[img['scale_factor']**2, ], ])
def add_noise() -> None:
if sicd.Radiometric is None:
return
nesz_raw = numpy.array(img['nesz_polynomial']['coefficients'], dtype='float64')
test_value = polynomial.polyval(rma.INCA.R_CA_SCP, nesz_raw)
if abs(test_value - img['nesz_peak']) > 100:
# this polynomial reversed in early versions, so reverse if evaluated results are nonsense
nesz_raw = nesz_raw[::-1]
nesz_poly_raw = Poly2DType(Coefs=numpy.reshape(nesz_raw, (-1, 1)))
noise_coeffs = nesz_poly_raw.shift(-rma.INCA.R_CA_SCP, 1, 0, 1, return_poly=False)
# this is in nesz units, so shift to absolute units
noise_coeffs[0] -= 10*numpy.log10(sicd.Radiometric.SigmaZeroSFPoly[0, 0])
sicd.Radiometric.NoiseLevel = NoiseLevelType_(NoiseLevelType='ABSOLUTE', NoisePoly=noise_coeffs)
# extract general use information
collect = self._img_desc_tags['collect']
img = collect['image']
radar = collect['radar']
radar_time_varying = radar.get('time_varying_parameters', [])
start_time = parse_timestring(collect['start_timestamp'], precision='ns')
end_time = parse_timestring(collect['stop_timestamp'], precision='ns')
duration = get_seconds(end_time, start_time, precision='ns')
state_time, state_position, state_velocity = extract_state_vector()
bw = get_radar_parameter('pulse_bandwidth')
fc = get_radar_parameter('center_frequency')
ss_zd_s = img['image_geometry']['delta_line_time']
look = -1 if radar['pointing'] == 'right' else 1
# define the sicd elements
collection_info = get_collection_info()
image_creation = get_image_creation()
image_data = get_image_data()
geo_data = get_geo_data()
position = get_position()
grid = get_grid()
radar_collection = get_radar_collection()
timeline = get_timeline()
image_formation = get_image_formation()
rma = get_rma()
radiometric = get_radiometric()
sicd = SICDType(
CollectionInfo=collection_info,
ImageCreation=image_creation,
ImageData=image_data,
GeoData=geo_data,
Position=position,
Grid=grid,
RadarCollection=radar_collection,
Timeline=timeline,
ImageFormation=image_formation,
RMA=rma,
Radiometric=radiometric)
sicd.derive()
add_noise()
sicd.populate_rniirs(override=False)
return sicd
class CapellaReader(SICDTypeReader):
"""
The Capella SLC reader implementation. **This is only partially complete.**
**Changed in version 1.3.0** for reading changes.
"""
__slots__ = ('_capella_details', )
def __init__(self, capella_details):
"""
Parameters
----------
capella_details : str|CapellaDetails
"""
if isinstance(capella_details, str):
capella_details = CapellaDetails(capella_details)
if not isinstance(capella_details, CapellaDetails):
raise TypeError('The input argument for capella_details must be a '
'filename or CapellaDetails object')
self._capella_details = capella_details
sicd = self.capella_details.get_sicd()
reverse_axes, transpose_axes = self.capella_details.get_symmetry()
data_segment = NativeTiffDataSegment(
self.capella_details.tiff_details, reverse_axes=reverse_axes, transpose_axes=transpose_axes)
SICDTypeReader.__init__(self, data_segment, sicd, close_segments=True)
self._check_sizes()
@property
def capella_details(self) -> CapellaDetails:
"""
CapellaDetails: The capella details object.
"""
return self._capella_details
@property
def file_name(self) -> str:
return self.capella_details.file_name
########
# base expected functionality for a module with an implemented Reader
def is_a(file_name: str) -> Optional[CapellaReader]:
"""
Tests whether a given file_name corresponds to a Capella SAR file.
Returns a reader instance, if so.
Parameters
----------
file_name : str
the file_name to check
Returns
-------
CapellaReader|None
`CapellaReader` instance if Capella file, `None` otherwise
"""
if is_file_like(file_name):
return None
try:
capella_details = CapellaDetails(file_name)
logger.info('File {} is determined to be a Capella file.'.format(file_name))
return CapellaReader(capella_details)
except SarpyIOError:
return None
| 20,008 | 37.552987 | 116 | py |
sarpy | sarpy-master/sarpy/io/complex/other_nitf.py | """
Work in progress for reading some other kind of complex NITF.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
from typing import Union, Tuple, List, Optional, Callable, Sequence
import copy
from datetime import datetime
import numpy
from scipy.constants import foot
from sarpy.geometry.geocoords import geodetic_to_ecf, ned_to_ecf
from sarpy.geometry.latlon import num as lat_lon_parser
from sarpy.io.general.base import SarpyIOError
from sarpy.io.general.data_segment import DataSegment, SubsetSegment
from sarpy.io.general.format_function import FormatFunction, ComplexFormatFunction
from sarpy.io.general.nitf import extract_image_corners, NITFDetails, NITFReader
from sarpy.io.general.nitf_elements.security import NITFSecurityTags
from sarpy.io.general.nitf_elements.image import ImageSegmentHeader, ImageSegmentHeader0
from sarpy.io.general.nitf_elements.nitf_head import NITFHeader, NITFHeader0
from sarpy.io.general.nitf_elements.base import TREList
from sarpy.io.general.nitf_elements.tres.unclass.CMETAA import CMETAA
from sarpy.io.general.utils import is_file_like
from sarpy.io.complex.base import SICDTypeReader
from sarpy.io.complex.sicd_elements.SICD import SICDType
from sarpy.io.complex.sicd_elements.CollectionInfo import CollectionInfoType
from sarpy.io.complex.sicd_elements.ImageData import ImageDataType
from sarpy.io.complex.sicd_elements.GeoData import GeoDataType, SCPType
from sarpy.io.complex.sicd_elements.Grid import GridType, DirParamType, WgtTypeType
from sarpy.io.complex.sicd_elements.Timeline import TimelineType, IPPSetType
from sarpy.io.complex.sicd_elements.RadarCollection import RadarCollectionType, \
TxFrequencyType, WaveformParametersType, ChanParametersType
from sarpy.io.complex.sicd_elements.SCPCOA import SCPCOAType
from sarpy.io.complex.sicd_elements.ImageFormation import ImageFormationType, TxFrequencyProcType
from sarpy.io.complex.sicd_elements.ImageCreation import ImageCreationType
from sarpy.io.complex.sicd_elements.PFA import PFAType
logger = logging.getLogger(__name__)
_iso_date_format = '{}-{}-{}T{}:{}:{}'
# NB: DO NOT implement is_a() here.
# This will explicitly happen after other readers
########
# Define sicd structure from image sub-header information
def extract_sicd(
img_header: Union[ImageSegmentHeader, ImageSegmentHeader0],
transpose: True,
nitf_header: Optional[Union[NITFHeader, NITFHeader0]] = None) -> SICDType:
"""
Extract the best available SICD structure from relevant nitf header structures.
Parameters
----------
img_header : ImageSegmentHeader|ImageSegmentHeader0
transpose : bool
nitf_header : None|NITFHeader|NITFHeader0
Returns
-------
SICDType
"""
def get_collection_info() -> CollectionInfoType:
isorce = img_header.ISORCE.strip()
collector_name = None if len(isorce) < 1 else isorce
iid2 = img_header.IID2.strip()
core_name = img_header.IID1.strip() if len(iid2) < 1 else iid2
class_str = img_header.Security.CLAS
if class_str == 'T':
classification = 'TOPSECRET'
elif class_str == 'S':
classification = 'SECRET'
elif class_str == 'C':
classification = 'CONFIDENTIAL'
elif class_str == 'U':
classification = 'UNCLASSIFIED'
else:
classification = ''
ctlh = img_header.Security.CTLH.strip()
if len(ctlh) < 1:
classification += '//' + ctlh
code = img_header.Security.CODE.strip()
if len(code) < 1:
classification += '//' + code
return CollectionInfoType(
CollectorName=collector_name,
CoreName=core_name,
Classification=classification)
def get_image_data() -> ImageDataType:
pvtype = img_header.PVTYPE
if pvtype == 'C':
if img_header.NBPP != 64:
logger.warning(
'This NITF has complex bands that are not 64-bit.\n\t'
'This is not currently supported.')
pixel_type = 'RE32F_IM32F'
elif pvtype == 'R':
if img_header.NBPP == 64:
logger.warning(
'The real/imaginary data in the NITF are stored as 64-bit floating point.\n\t'
'The closest Pixel Type, RE32F_IM32F, will be used,\n\t'
'but there may be overflow issues if converting this file.')
pixel_type = 'RE32F_IM32F'
elif pvtype == 'SI':
pixel_type = 'RE16I_IM16I'
else:
raise ValueError('Got unhandled PVTYPE {}'.format(pvtype))
if transpose:
rows = img_header.NCOLS
cols = img_header.NROWS
else:
rows = img_header.NROWS
cols = img_header.NCOLS
return ImageDataType(
PixelType=pixel_type,
NumRows=rows,
NumCols=cols,
FirstRow=0,
FirstCol=0,
FullImage=(rows, cols),
SCPPixel=(0.5 * rows, 0.5 * cols))
def append_country_code(cc) -> None:
if len(cc) > 0:
if the_sicd.CollectionInfo is None:
the_sicd.CollectionInfo = CollectionInfoType(CountryCodes=[cc, ])
elif the_sicd.CollectionInfo.CountryCodes is None:
the_sicd.CollectionInfo.CountryCodes = [cc, ]
elif cc not in the_sicd.CollectionInfo.CountryCodes:
the_sicd.CollectionInfo.CountryCodes.append(cc)
def set_image_corners(icps: numpy.ndarray, override: bool = False) -> None:
if the_sicd.GeoData is None:
the_sicd.GeoData = GeoDataType(ImageCorners=icps)
elif the_sicd.GeoData.ImageCorners is None or override:
the_sicd.GeoData.ImageCorners = icps
def set_arp_position(arp_ecf: numpy.ndarray, override: bool = False) -> None:
if the_sicd.SCPCOA is None:
the_sicd.SCPCOA = SCPCOAType(ARPPos=arp_ecf)
elif override:
# prioritize this information first - it should be more reliable than other sources
the_sicd.SCPCOA.ARPPos = arp_ecf
def set_scp(scp_ecf: numpy.ndarray, scp_pixel: Union[numpy.ndarray, list, tuple], override: bool = False) -> None:
def set_scppixel():
if the_sicd.ImageData is None:
the_sicd.ImageData = ImageDataType(SCPPixel=scp_pixel)
else:
the_sicd.ImageData.SCPPixel = scp_pixel
if the_sicd.GeoData is None:
the_sicd.GeoData = GeoDataType(SCP=SCPType(ECF=scp_ecf))
set_scppixel()
elif the_sicd.GeoData.SCP is None or override:
the_sicd.GeoData.SCP = SCPType(ECF=scp_ecf)
set_scppixel()
def set_collect_start(
collect_start: Union[str, datetime, numpy.datetime64], override: bool = False) -> None:
if the_sicd.Timeline is None:
the_sicd.Timeline = TimelineType(CollectStart=collect_start)
elif the_sicd.Timeline.CollectStart is None or override:
the_sicd.Timeline.CollectStart = collect_start
def set_uvects(row_unit: numpy.ndarray, col_unit: numpy.ndarray) -> None:
if the_sicd.Grid is None:
the_sicd.Grid = GridType(
Row=DirParamType(UVectECF=row_unit),
Col=DirParamType(UVectECF=col_unit))
return
if the_sicd.Grid.Row is None:
the_sicd.Grid.Row = DirParamType(UVectECF=row_unit)
elif the_sicd.Grid.Row.UVectECF is None:
the_sicd.Grid.Row.UVectECF = row_unit
if the_sicd.Grid.Col is None:
the_sicd.Grid.Col = DirParamType(UVectECF=col_unit)
elif the_sicd.Grid.Col.UVectECF is None:
the_sicd.Grid.Col.UVectECF = col_unit
def try_CMETAA() -> None:
# noinspection PyTypeChecker
tre = None if tres is None else tres['CMETAA'] # type: CMETAA
if tre is None:
return
cmetaa = tre.DATA
if the_sicd.GeoData is None:
the_sicd.GeoData = GeoDataType()
if the_sicd.SCPCOA is None:
the_sicd.SCPCOA = SCPCOAType()
if the_sicd.Grid is None:
the_sicd.Grid = GridType()
if the_sicd.Timeline is None:
the_sicd.Timeline = TimelineType()
if the_sicd.RadarCollection is None:
the_sicd.RadarCollection = RadarCollectionType()
if the_sicd.ImageFormation is None:
the_sicd.ImageFormation = ImageFormationType()
the_sicd.SCPCOA.SCPTime = 0.5*float(cmetaa.WF_CDP)
the_sicd.GeoData.SCP = SCPType(ECF=tre.get_scp())
the_sicd.SCPCOA.ARPPos = tre.get_arp()
the_sicd.SCPCOA.SideOfTrack = cmetaa.CG_LD.strip().upper()
the_sicd.SCPCOA.SlantRange = float(cmetaa.CG_SRAC)
the_sicd.SCPCOA.DopplerConeAng = float(cmetaa.CG_CAAC)
the_sicd.SCPCOA.GrazeAng = float(cmetaa.CG_GAAC)
the_sicd.SCPCOA.IncidenceAng = 90 - float(cmetaa.CG_GAAC)
if hasattr(cmetaa, 'CG_TILT'):
the_sicd.SCPCOA.TwistAng = float(cmetaa.CG_TILT)
if hasattr(cmetaa, 'CG_SLOPE'):
the_sicd.SCPCOA.SlopeAng = float(cmetaa.CG_SLOPE)
the_sicd.ImageData.SCPPixel = [int(cmetaa.IF_DC_IS_COL), int(cmetaa.IF_DC_IS_ROW)]
img_corners = tre.get_image_corners()
if img_corners is not None:
the_sicd.GeoData.ImageCorners = img_corners
if cmetaa.CMPLX_SIGNAL_PLANE.upper() == 'S':
the_sicd.Grid.ImagePlane = 'SLANT'
elif cmetaa.CMPLX_SIGNAL_PLANE.upper() == 'G':
the_sicd.Grid.ImagePlane = 'GROUND'
else:
logger.warning(
'Got unexpected CMPLX_SIGNAL_PLANE value {},\n\t'
'setting ImagePlane to SLANT'.format(cmetaa.CMPLX_SIGNAL_PLANE))
the_sicd.Grid.Row = DirParamType(
SS=float(cmetaa.IF_RSS),
ImpRespWid=float(cmetaa.IF_RGRES),
Sgn=1 if cmetaa.IF_RFFTS.strip() == '-' else -1, # opposite sign convention
ImpRespBW=float(cmetaa.IF_RFFT_SAMP)/(float(cmetaa.IF_RSS)*float(cmetaa.IF_RFFT_TOT)))
the_sicd.Grid.Col = DirParamType(
SS=float(cmetaa.IF_AZSS),
ImpRespWid=float(cmetaa.IF_AZRES),
Sgn=1 if cmetaa.IF_AFFTS.strip() == '-' else -1, # opposite sign convention
ImpRespBW=float(cmetaa.IF_AZFFT_SAMP)/(float(cmetaa.IF_AZSS)*float(cmetaa.IF_AZFFT_TOT)))
cmplx_weight = cmetaa.CMPLX_WEIGHT.strip().upper()
if cmplx_weight == 'UWT':
the_sicd.Grid.Row.WgtType = WgtTypeType(WindowName='UNIFORM')
the_sicd.Grid.Col.WgtType = WgtTypeType(WindowName='UNIFORM')
elif cmplx_weight == 'HMW':
the_sicd.Grid.Row.WgtType = WgtTypeType(WindowName='HAMMING')
the_sicd.Grid.Col.WgtType = WgtTypeType(WindowName='HAMMING')
elif cmplx_weight == 'HNW':
the_sicd.Grid.Row.WgtType = WgtTypeType(WindowName='HANNING')
the_sicd.Grid.Col.WgtType = WgtTypeType(WindowName='HANNING')
elif cmplx_weight == 'TAY':
the_sicd.Grid.Row.WgtType = WgtTypeType(
WindowName='TAYLOR',
Parameters={
'SLL': '-{0:d}'.format(int(cmetaa.CMPLX_RNG_SLL)),
'NBAR': '{0:d}'.format(int(cmetaa.CMPLX_RNG_TAY_NBAR))})
the_sicd.Grid.Col.WgtType = WgtTypeType(
WindowName='TAYLOR',
Parameters={
'SLL': '-{0:d}'.format(int(cmetaa.CMPLX_AZ_SLL)),
'NBAR': '{0:d}'.format(int(cmetaa.CMPLX_AZ_TAY_NBAR))})
else:
logger.warning(
'Got unsupported CMPLX_WEIGHT value {}.\n\tThe resulting SICD will '
'not have valid weight array populated'.format(cmplx_weight))
the_sicd.Grid.Row.define_weight_function()
the_sicd.Grid.Col.define_weight_function()
# noinspection PyBroadException
try:
date_str = cmetaa.T_UTC_YYYYMMMDD
time_str = cmetaa.T_HHMMSSUTC
date_time = _iso_date_format.format(
date_str[:4], date_str[4:6], date_str[6:8],
time_str[:2], time_str[2:4], time_str[4:6])
the_sicd.Timeline.CollectStart = numpy.datetime64(date_time, 'us')
except Exception:
logger.info('Failed extracting start time from CMETAA')
pass
the_sicd.Timeline.CollectDuration = float(cmetaa.WF_CDP)
the_sicd.Timeline.IPP = [
IPPSetType(TStart=0,
TEnd=float(cmetaa.WF_CDP),
IPPStart=0,
IPPEnd=numpy.floor(float(cmetaa.WF_CDP)*float(cmetaa.WF_PRF)),
IPPPoly=[0, float(cmetaa.WF_PRF)])]
the_sicd.RadarCollection.TxFrequency = TxFrequencyType(
Min=float(cmetaa.WF_SRTFR),
Max=float(cmetaa.WF_ENDFR))
the_sicd.RadarCollection.TxPolarization = cmetaa.POL_TR.upper()
the_sicd.RadarCollection.Waveform = [WaveformParametersType(
TxPulseLength=float(cmetaa.WF_WIDTH),
TxRFBandwidth=float(cmetaa.WF_BW),
TxFreqStart=float(cmetaa.WF_SRTFR),
TxFMRate=float(cmetaa.WF_CHRPRT)*1e12)]
tx_rcv_pol = '{}:{}'.format(cmetaa.POL_TR.upper(), cmetaa.POL_RE.upper())
the_sicd.RadarCollection.RcvChannels = [
ChanParametersType(TxRcvPolarization=tx_rcv_pol)]
the_sicd.ImageFormation.TxRcvPolarizationProc = tx_rcv_pol
if_process = cmetaa.IF_PROCESS.strip().upper()
if if_process == 'PF':
the_sicd.ImageFormation.ImageFormAlgo = 'PFA'
scp_ecf = tre.get_scp()
fpn_ned = numpy.array(
[float(cmetaa.CG_FPNUV_X), float(cmetaa.CG_FPNUV_Y), float(cmetaa.CG_FPNUV_Z)], dtype='float64')
ipn_ned = numpy.array(
[float(cmetaa.CG_IDPNUVX), float(cmetaa.CG_IDPNUVY), float(cmetaa.CG_IDPNUVZ)], dtype='float64')
fpn_ecf = ned_to_ecf(fpn_ned, scp_ecf, absolute_coords=False)
ipn_ecf = ned_to_ecf(ipn_ned, scp_ecf, absolute_coords=False)
the_sicd.PFA = PFAType(FPN=fpn_ecf, IPN=ipn_ecf)
elif if_process in ['RM', 'CD']:
the_sicd.ImageFormation.ImageFormAlgo = 'RMA'
# the remainder of this is guesswork to define required fields
the_sicd.ImageFormation.TStartProc = 0 # guess work
the_sicd.ImageFormation.TEndProc = float(cmetaa.WF_CDP)
the_sicd.ImageFormation.TxFrequencyProc = TxFrequencyProcType(
MinProc=float(cmetaa.WF_SRTFR), MaxProc=float(cmetaa.WF_ENDFR))
# all remaining guess work
the_sicd.ImageFormation.STBeamComp = 'NO'
the_sicd.ImageFormation.ImageBeamComp = 'SV' if cmetaa.IF_BEAM_COMP[0] == 'Y' else 'NO'
the_sicd.ImageFormation.AzAutofocus = 'NO' if cmetaa.AF_TYPE[0] == 'N' else 'SV'
the_sicd.ImageFormation.RgAutofocus = 'NO'
def try_AIMIDA() -> None:
tre = None if tres is None else tres['AIMIDA']
if tre is None:
return
aimida = tre.DATA
append_country_code(aimida.COUNTRY.strip())
create_time = datetime.strptime(aimida.CREATION_DATE, '%d%b%y')
if the_sicd.ImageCreation is None:
the_sicd.ImageCreation = ImageCreationType(DateTime=create_time)
elif the_sicd.ImageCreation.DateTime is None:
the_sicd.ImageCreation.DateTime = create_time
collect_start = datetime.strptime(aimida.MISSION_DATE+aimida.TIME, '%d%b%y%H%M')
set_collect_start(collect_start, override=False)
def try_AIMIDB() -> None:
tre = None if tres is None else tres['AIMIDB']
if tre is None:
return
aimidb = tre.DATA
append_country_code(aimidb.COUNTRY.strip())
if the_sicd.ImageFormation is not None and the_sicd.ImageFormation.SegmentIdentifier is None:
the_sicd.ImageFormation.SegmentIdentifier = aimidb.CURRENT_SEGMENT.strip()
date_str = aimidb.ACQUISITION_DATE
collect_start = numpy.datetime64(_iso_date_format.format(
date_str[:4], date_str[4:6], date_str[6:8],
date_str[8:10], date_str[10:12], date_str[12:14]), 'us')
set_collect_start(collect_start, override=False)
def try_ACFT() -> None:
if tres is None:
return
tre = tres['ACFTA']
if tre is None:
tre = tres['ACFTB']
if tre is None:
return
acft = tre.DATA
sensor_id = acft.SENSOR_ID.strip()
if len(sensor_id) > 1:
if the_sicd.CollectionInfo is None:
the_sicd.CollectionInfo = CollectionInfoType(CollectorName=sensor_id)
elif the_sicd.CollectionInfo.CollectorName is None:
the_sicd.CollectionInfo.CollectorName = sensor_id
row_ss = float(acft.ROW_SPACING)
col_ss = float(acft.COL_SPACING)
if hasattr(acft, 'ROW_SPACING_UNITS') and acft.ROW_SPACING_UNITS.strip().lower() == 'f':
row_ss *= foot
if hasattr(acft, 'COL_SPACING_UNITS') and acft.COL_SPACING_UNITS.strip().lower() == 'f':
col_ss *= foot
# NB: these values are actually ground plane values, and should be
# corrected to slant plane if possible
if the_sicd.SCPCOA is not None:
if the_sicd.SCPCOA.GrazeAng is not None:
col_ss *= numpy.cos(numpy.deg2rad(the_sicd.SCPCOA.GrazeAng))
if the_sicd.SCPCOA.TwistAng is not None:
row_ss *= numpy.cos(numpy.deg2rad(the_sicd.SCPCOA.TwistAng))
if the_sicd.Grid is None:
the_sicd.Grid = GridType(Row=DirParamType(SS=row_ss), Col=DirParamType(SS=col_ss))
return
if the_sicd.Grid.Row is None:
the_sicd.Grid.Row = DirParamType(SS=row_ss)
elif the_sicd.Grid.Row.SS is None:
the_sicd.Grid.Row.SS = row_ss
if the_sicd.Grid.Col is None:
the_sicd.Grid.Col = DirParamType(SS=col_ss)
elif the_sicd.Grid.Col.SS is None:
the_sicd.Grid.Col.SS = col_ss
def try_BLOCKA() -> None:
tre = None if tres is None else tres['BLOCKA']
if tre is None:
return
blocka = tre.DATA
icps = []
for fld_name in ['FRFC_LOC', 'FRLC_LOC', 'LRLC_LOC', 'LRFC_LOC']:
value = getattr(blocka, fld_name)
# noinspection PyBroadException
try:
lat_val = float(value[:10])
lon_val = float(value[10:21])
except ValueError:
lat_val = lat_lon_parser(value[:10])
lon_val = lat_lon_parser(value[10:21])
icps.append([lat_val, lon_val])
set_image_corners(numpy.array(icps, dtype='float64'), override=False)
def try_MPDSRA() -> None:
def valid_array(arr):
return numpy.all(numpy.isfinite(arr)) and numpy.any(arr != 0)
tre = None if tres is None else tres['MPDSRA']
if tre is None:
return
mpdsra = tre.DATA
scp_ecf = foot*numpy.array(
[float(mpdsra.ORO_X), float(mpdsra.ORO_Y), float(mpdsra.ORO_Z)], dtype='float64')
if valid_array(scp_ecf):
set_scp(scp_ecf, (int(mpdsra.ORP_COLUMN) - 1, int(mpdsra.ORP_ROW) - 1), override=False)
arp_pos_ned = foot*numpy.array(
[float(mpdsra.ARP_POS_N), float(mpdsra.ARP_POS_E), float(mpdsra.ARP_POS_D)], dtype='float64')
arp_vel_ned = foot*numpy.array(
[float(mpdsra.ARP_VEL_N), float(mpdsra.ARP_VEL_E), float(mpdsra.ARP_VEL_D)], dtype='float64')
arp_acc_ned = foot*numpy.array(
[float(mpdsra.ARP_ACC_N), float(mpdsra.ARP_ACC_E), float(mpdsra.ARP_ACC_D)], dtype='float64')
arp_pos = ned_to_ecf(arp_pos_ned, scp_ecf, absolute_coords=True) if valid_array(arp_pos_ned) else None
set_arp_position(arp_pos, override=False)
arp_vel = ned_to_ecf(arp_vel_ned, scp_ecf, absolute_coords=False) if valid_array(arp_vel_ned) else None
if the_sicd.SCPCOA.ARPVel is None:
the_sicd.SCPCOA.ARPVel = arp_vel
arp_acc = ned_to_ecf(arp_acc_ned, scp_ecf, absolute_coords=False) if valid_array(arp_acc_ned) else None
if the_sicd.SCPCOA.ARPAcc is None:
the_sicd.SCPCOA.ARPAcc = arp_acc
if the_sicd.PFA is not None and the_sicd.PFA.FPN is None:
# TODO: is this already in meters?
fpn_ecf = numpy.array(
[float(mpdsra.FOC_X), float(mpdsra.FOC_Y), float(mpdsra.FOC_Z)], dtype='float64') # *foot
if valid_array(fpn_ecf):
the_sicd.PFA.FPN = fpn_ecf
def try_MENSRB() -> None:
tre = None if tres is None else tres['MENSRB']
if tre is None:
return
mensrb = tre.DATA
arp_llh = numpy.array(
[lat_lon_parser(mensrb.ACFT_LOC[:12]),
lat_lon_parser(mensrb.ACFT_LOC[12:25]),
foot*float(mensrb.ACFT_ALT)], dtype='float64')
scp_llh = numpy.array(
[lat_lon_parser(mensrb.RP_LOC[:12]),
lat_lon_parser(mensrb.RP_LOC[12:25]),
foot*float(mensrb.RP_ELV)], dtype='float64')
# TODO: handle the conversion from msl to hae
arp_ecf = geodetic_to_ecf(arp_llh)
scp_ecf = geodetic_to_ecf(scp_llh)
set_arp_position(arp_ecf, override=True)
set_scp(scp_ecf, (int(mensrb.RP_COL)-1, int(mensrb.RP_ROW)-1), override=False)
row_unit_ned = numpy.array(
[float(mensrb.C_R_NC), float(mensrb.C_R_EC), float(mensrb.C_R_DC)], dtype='float64')
col_unit_ned = numpy.array(
[float(mensrb.C_AZ_NC), float(mensrb.C_AZ_EC), float(mensrb.C_AZ_DC)], dtype='float64')
set_uvects(ned_to_ecf(row_unit_ned, scp_ecf, absolute_coords=False),
ned_to_ecf(col_unit_ned, scp_ecf, absolute_coords=False))
def try_MENSRA() -> None:
tre = None if tres is None else tres['MENSRA']
if tre is None:
return
mensra = tre.DATA
arp_llh = numpy.array(
[lat_lon_parser(mensra.ACFT_LOC[:10]),
lat_lon_parser(mensra.ACFT_LOC[10:21]),
foot*float(mensra.ACFT_ALT)], dtype='float64')
scp_llh = numpy.array(
[lat_lon_parser(mensra.CP_LOC[:10]),
lat_lon_parser(mensra.CP_LOC[10:21]),
foot*float(mensra.CP_ALT)], dtype='float64')
# TODO: handle the conversion from msl to hae
arp_ecf = geodetic_to_ecf(arp_llh)
scp_ecf = geodetic_to_ecf(scp_llh)
set_arp_position(arp_ecf, override=True)
# TODO: is this already zero based?
set_scp(geodetic_to_ecf(scp_llh), (int(mensra.CCRP_COL), int(mensra.CCRP_ROW)), override=False)
row_unit_ned = numpy.array(
[float(mensra.C_R_NC), float(mensra.C_R_EC), float(mensra.C_R_DC)], dtype='float64')
col_unit_ned = numpy.array(
[float(mensra.C_AZ_NC), float(mensra.C_AZ_EC), float(mensra.C_AZ_DC)], dtype='float64')
set_uvects(ned_to_ecf(row_unit_ned, scp_ecf, absolute_coords=False),
ned_to_ecf(col_unit_ned, scp_ecf, absolute_coords=False))
def extract_corners() -> None:
icps = extract_image_corners(img_header)
if icps is None:
return
# TODO: include symmetry transform issue
set_image_corners(icps, override=False)
def extract_start() -> None:
# noinspection PyBroadException
try:
date_str = img_header.IDATIM
collect_start = numpy.datetime64(
_iso_date_format.format(
date_str[:4], date_str[4:6], date_str[6:8],
date_str[8:10], date_str[10:12], date_str[12:14]), 'us')
except Exception:
logger.info('failed extracting start time from IDATIM tre')
return
set_collect_start(collect_start, override=False)
# noinspection PyUnresolvedReferences
tres = None if img_header.ExtendedHeader.data is None \
else img_header.ExtendedHeader.data # type: Union[None, TREList]
collection_info = get_collection_info()
image_data = get_image_data()
the_sicd = SICDType(
CollectionInfo=collection_info,
ImageData=image_data)
# apply the various tres and associated logic
# NB: this should generally be in order of preference
try_CMETAA()
try_AIMIDB()
try_AIMIDA()
try_ACFT()
try_BLOCKA()
try_MPDSRA()
try_MENSRA()
try_MENSRB()
extract_corners()
extract_start()
return the_sicd
# Helper methods for transforming data
def get_linear_magnitude_scaling(scale_factor: float):
"""
Get a linear magnitude scaling function, to correct magnitude.
Parameters
----------
scale_factor : float
The scale factor, according to the definition given in STDI-0002.
Returns
-------
callable
"""
def scaler(data):
return data/scale_factor
return scaler
def get_linear_power_scaling(scale_factor):
"""
Get a linear power scaling function, to derive correct magnitude.
Parameters
----------
scale_factor : float
The scale factor, according to the definition given in STDI-0002.
Returns
-------
callable
"""
def scaler(data):
return numpy.sqrt(data/scale_factor)
return scaler
def get_log_magnitude_scaling(scale_factor, db_per_step):
"""
Gets the log magnitude scaling function, to derive correct magnitude.
Parameters
----------
scale_factor : float
The scale factor, according to the definition given in STDI-0002.
db_per_step : float
The db_per_step factor, according to the definiton given in STDI-0002
Returns
-------
callable
"""
lin_scaler = get_linear_magnitude_scaling(scale_factor)
def scaler(data):
return lin_scaler(numpy.exp(0.05*numpy.log(10)*db_per_step*data))
return scaler
def get_log_power_scaling(scale_factor, db_per_step):
"""
Gets the log power scaling function, to derive correct magnitude.
Parameters
----------
scale_factor : float
The scale factor, according to the definition given in STDI-0002.
db_per_step : float
The db_per_step factor, according to the definiton given in STDI-0002
Returns
-------
callable
"""
power_scaler = get_linear_power_scaling(scale_factor)
def scaler(data):
return power_scaler(numpy.exp(0.1*numpy.log(10)*db_per_step*data))
return scaler
def get_linlog_magnitude_scaling(scale_factor, tipping_point):
"""
Gets the magnitude scaling function for the model which
is initially linear, and then switches to logarithmic beyond a fixed
tipping point.
Parameters
----------
scale_factor : float
The scale factor, according to the definition given in STDI-0002.
tipping_point : float
The tipping point between the two models.
Returns
-------
callable
"""
db_per_step = 20*numpy.log10(tipping_point)/tipping_point
log_scaler = get_log_magnitude_scaling(scale_factor, db_per_step)
def scaler(data):
out = data/scale_factor
above_tipping = (out > tipping_point)
out[above_tipping] = log_scaler(data[above_tipping])
return out
return scaler
class ApplyAmplitudeScalingFunction(ComplexFormatFunction):
__slots__ = ('_scaling_function', )
_allowed_ordering = ('MP', 'PM')
has_inverse = False
def __init__(
self,
raw_dtype: Union[str, numpy.dtype],
order: str,
scaling_function: Optional[Callable] = None,
raw_shape: Optional[Tuple[int, ...]] = None,
formatted_shape: Optional[Tuple[int, ...]] = None,
reverse_axes: Optional[Tuple[int, ...]] = None,
transpose_axes: Optional[Tuple[int, ...]] = None,
band_dimension: int = -1):
"""
Parameters
----------
raw_dtype : str|numpy.dtype
The raw datatype. Valid options dependent on the value of order.
order : str
One of `('MP', 'PM')`, with allowable raw_dtype
`('uint8', 'uint16', 'uint32', 'float32', 'float64')`.
scaling_function : Optional[Callable]
raw_shape : None|Tuple[int, ...]
formatted_shape : None|Tuple[int, ...]
reverse_axes : None|Tuple[int, ...]
transpose_axes : None|Tuple[int, ...]
band_dimension : int
Which band is the complex dimension, **after** the transpose operation.
"""
self._scaling_function = None
ComplexFormatFunction.__init__(
self, raw_dtype, order, raw_shape=raw_shape, formatted_shape=formatted_shape,
reverse_axes=reverse_axes, transpose_axes=transpose_axes, band_dimension=band_dimension)
self._set_scaling_function(scaling_function)
@property
def scaling_function(self) -> Optional[Callable]:
"""
The magnitude scaling function.
Returns
-------
None|Callable
"""
return self._scaling_function
def _set_scaling_function(self, value: Optional[Callable]):
if value is None:
self._scaling_function = None
return
if not isinstance(value, Callable):
raise TypeError('scaling_function must be callable')
self._scaling_function = value
def _forward_magnitude_theta(
self,
data: numpy.ndarray,
out: numpy.ndarray,
magnitude: numpy.ndarray,
theta: numpy.ndarray,
subscript: Tuple[slice, ...]) -> None:
if self._scaling_function is not None:
magnitude = self._scaling_function(magnitude)
ComplexFormatFunction._forward_magnitude_theta(
self, data, out, magnitude, theta, subscript)
def _extract_transform_data(
image_header: Union[ImageSegmentHeader, ImageSegmentHeader0],
band_dimension: int):
"""
Helper function for defining necessary transform_data definition for
interpreting image segment data.
Parameters
----------
image_header : ImageSegmentHeader|ImageSegmentHeader0
Returns
-------
None|str|callable
"""
if len(image_header.Bands) != 2:
raise ValueError('Got unhandled case of {} image bands'.format(len(image_header.Bands)))
complex_order = image_header.Bands[0].ISUBCAT+image_header.Bands[1].ISUBCAT
if complex_order not in ['IQ', 'QI', 'MP', 'PM']:
raise ValueError('Got unhandled complex order `{}`'.format(complex_order))
bpp = int(image_header.NBPP/8)
pv_type = image_header.PVTYPE
if pv_type == 'INT':
raw_dtype = '>u{}'.format(bpp)
elif pv_type == 'SI':
raw_dtype = '>i{}'.format(bpp)
elif pv_type == 'R':
raw_dtype = '>f{}'.format(bpp)
else:
raise ValueError('Got unhandled PVTYPE {}'.format(pv_type))
# noinspection PyUnresolvedReferences
tre = None if img_header.ExtendedHeader.data is None else \
img_header.ExtendedHeader.data['CMETAA'] # type: Optional[CMETAA]
if tre is None:
return ComplexFormatFunction(raw_dtype, complex_order, band_dimension=band_dimension)
cmetaa = tre.DATA
if cmetaa.CMPLX_PHASE_SCALING_TYPE.strip() != 'NS':
raise ValueError(
'Got unsupported CMPLX_PHASE_SCALING_TYPE {}'.format(
cmetaa.CMPLX_PHASE_SCALING_TYPE))
remap_type = cmetaa.CMPLX_MAG_REMAP_TYPE.strip()
if remap_type == 'NS':
if complex_order in ['IQ', 'QI']:
return ComplexFormatFunction(raw_dtype, complex_order, band_dimension=band_dimension)
else:
raise ValueError(
'Got unexpected state where cmetaa.CMPLX_MAG_REMAP_TYPE is "NS",\n\t '
'but Band[0].ISUBCAT/Band[1].ISUBCAT = `{}`'.format(complex_order))
elif remap_type not in ['LINM', 'LINP', 'LOGM', 'LOGP', 'LLM']:
raise ValueError('Got unsupported CMETAA.CMPLX_MAG_REMAP_TYPE {}'.format(remap_type))
if complex_order not in ['MP', 'PM']:
raise ValueError(
'Got unexpected state where cmetaa.CMPLX_MAG_REMAP_TYPE is `{}`,\n\t'
'but Band[0].ISUBCAT/Band[1].ISUBCAT = `{}`'.format(
remap_type, complex_order))
scale_factor = float(cmetaa.CMPLX_LIN_SCALE)
if remap_type == 'LINM':
scaling_function = get_linear_magnitude_scaling(scale_factor)
elif remap_type == 'LINP':
scaling_function = get_linear_power_scaling(scale_factor)
elif remap_type == 'LOGM':
# NB: there is nowhere in the CMETAA structure to define
# the db_per_step value. Strangely, the use of this value is laid
# out in the STDI-0002 standards document, which defines CMETAA
# structure. We will generically use a value which maps the
# max uint8 value to the max int16 value.
db_per_step = 300*numpy.log(2)/255.0
scaling_function = get_log_magnitude_scaling(scale_factor, db_per_step)
elif remap_type == 'LOGP':
db_per_step = 300*numpy.log(2)/255.0
scaling_function = get_log_power_scaling(scale_factor, db_per_step)
elif remap_type == 'LLM':
scaling_function = get_linlog_magnitude_scaling(
scale_factor, int(cmetaa.CMPLX_LINLOG_TP))
else:
raise ValueError('Got unhandled CMETAA.CMPLX_MAG_REMAP_TYPE {}'.format(remap_type))
return ApplyAmplitudeScalingFunction(raw_dtype, complex_order, scaling_function, band_dimension=band_dimension)
######
# The interpreter and reader objects
class ComplexNITFDetails(NITFDetails):
"""
Details object for NITF file containing complex data.
"""
__slots__ = (
'_segment_status', '_segment_bands', '_sicd_meta', '_reverse_axes', '_transpose_axes')
def __init__(
self,
file_name: str,
reverse_axes: Union[None, int, Sequence[int]] = None,
transpose_axes: Optional[Tuple[int, ...]] = None):
"""
Parameters
----------
file_name : str
file name for a NITF file containing a complex SICD
reverse_axes : None|Sequence[int]
Any entries should be restricted to `{0, 1}`. The presence of
`0` means to reverse the rows (in the raw sense), and the presence
of `1` means to reverse the columns (in the raw sense).
transpose_axes : None|Tuple[int, ...]
If presented this should be only `(1, 0)`.
"""
self._reverse_axes = reverse_axes
self._transpose_axes = transpose_axes
self._segment_status = None
self._sicd_meta = None
self._segment_bands = None
NITFDetails.__init__(self, file_name)
self._find_complex_image_segments()
if len(self.sicd_meta) == 0:
raise SarpyIOError(
'No complex valued image segments found in file {}'.format(file_name))
@property
def reverse_axes(self) -> Union[None, int, Sequence[int]]:
return self._reverse_axes
@property
def transpose_axes(self) -> Optional[Tuple[int, ...]]:
return self._transpose_axes
@property
def segment_status(self) -> Tuple[bool, ...]:
"""
Tuple[bool, ...]: Where each image segment is viable for use.
"""
return self._segment_status
@property
def sicd_meta(self) -> Tuple[SICDType, ...]:
"""
Tuple[SICDType, ...]: The best inferred sicd structures.
"""
return self._sicd_meta
@property
def segment_bands(self) -> Tuple[Tuple[int, Optional[int]], ...]:
"""
This describes the structure for the output data segments from the NITF,
with each entry of the form `(image_segment, output_band)`, where
`output_band` will be `None` if the image segment has exactly one
complex band.
Returns
-------
Tuple[Tuple[int, Optional[int]], ...]
The band details for use.
"""
return self._segment_bands
def _check_band_details(
self,
index: int,
sicd_meta: List,
segment_status: List,
segment_bands: List):
if len(segment_status) != index:
raise ValueError('Inconsistent status checking state')
image_header = self.img_headers[index]
if image_header.ICAT.strip() not in ['SAR', 'SARIQ']:
segment_status.append(False)
return
# construct a preliminary sicd
sicd = extract_sicd(image_header, self._transpose_axes is not None)
bands = image_header.Bands
pvtype = image_header.PVTYPE
# handle odd bands
if (len(bands) % 2) == 1:
if image_header.PVTYPE != 'C':
# it's not complex, so we're done
segment_status.append(False)
return
segment_status.append(True)
sicd_meta.append(sicd)
segment_bands.append((index, len(bands)))
return
# we have an even number of bands - ensure that the bands are marked
# IQ/QI/MP/PM
order = bands[0].ISUBCAT + bands[1].ISUBCAT
if order not in ['IQ', 'QI', 'MP', 'PM']:
segment_status.append(False)
return
if len(bands) == 2:
# this should be the most common by far
segment_status.append(True)
sicd_meta.append(sicd)
segment_bands.append((index, 1))
return
for i in range(2, len(bands), 2):
if order != bands[i].ISUBCAT + bands[i+1].ISUBCAT:
logging.error(
'Image segment appears to multiband with switch complex ordering')
segment_status.append(False)
return
if order in ['IQ', 'QI']:
if pvtype not in ['SI', 'R']:
logging.error(
'Image segment appears to be complex of order `{}`, \n\t'
'but PVTYPE is `{}`'.format(order, pvtype))
segment_status.append(False)
if order in ['MP', 'PM']:
if pvtype not in ['INT', 'R']:
logging.error(
'Image segment appears to be complex of order `{}`, \n\t'
'but PVTYPE is `{}`'.format(order, pvtype))
segment_status.append(False)
segment_status.append(True)
sicd_meta.append(sicd)
segment_bands.append((index, int(len(bands)/2)))
def _find_complex_image_segments(self):
"""
Find complex image segments.
Returns
-------
None
"""
sicd_meta = []
segment_status = []
segment_bands = []
for index in range(len(self.img_headers)):
self._check_band_details(index, sicd_meta, segment_status, segment_bands)
self._segment_status = tuple(segment_status)
use_sicd_meta = []
use_segment_bands = []
for (the_index, out_bands), sicd in zip(segment_bands, sicd_meta):
if out_bands == 1:
use_sicd_meta.append(sicd)
use_segment_bands.append((the_index, None))
else:
for j in range(out_bands):
use_sicd_meta.append(sicd.copy())
use_segment_bands.append((the_index, j))
self._sicd_meta = tuple(use_sicd_meta)
self._segment_bands = tuple(use_segment_bands)
class ComplexNITFReader(NITFReader, SICDTypeReader):
"""
A reader for complex valued NITF elements, this should be explicitly tried AFTER
the SICDReader.
"""
def __init__(
self,
nitf_details: Union[str, ComplexNITFDetails],
reverse_axes: Union[None, int, Sequence[int]] = None,
transpose_axes: Optional[Tuple[int, ...]] = None):
"""
Parameters
----------
nitf_details : str|ComplexNITFDetails
reverse_axes : None|Sequence[int]
Any entries should be restricted to `{0, 1}`. The presence of
`0` means to reverse the rows (in the raw sense), and the presence
of `1` means to reverse the columns (in the raw sense).
transpose_axes : None|Tuple[int, ...]
If presented this should be only `(1, 0)`.
"""
if isinstance(nitf_details, str):
nitf_details = ComplexNITFDetails(
nitf_details, reverse_axes=reverse_axes, transpose_axes=transpose_axes)
if not isinstance(nitf_details, ComplexNITFDetails):
raise TypeError('The input argument for ComplexNITFReader must be a filename or '
'ComplexNITFDetails object.')
SICDTypeReader.__init__(self, None, nitf_details.sicd_meta)
NITFReader.__init__(
self,
nitf_details,
reader_type="SICD",
reverse_axes=nitf_details.reverse_axes,
transpose_axes=nitf_details.transpose_axes)
self._check_sizes()
@property
def nitf_details(self) -> ComplexNITFDetails:
"""
ComplexNITFDetails: The NITF details object.
"""
# noinspection PyTypeChecker
return self._nitf_details
def get_nitf_dict(self):
"""
Populate a dictionary with the pertinent NITF header information. This
is for use in more faithful preservation of NITF header information
in copying or rewriting sicd files.
Returns
-------
dict
"""
out = {}
security = {}
security_obj = self.nitf_details.nitf_header.Security
# noinspection PyProtectedMember
for field in NITFSecurityTags._ordering:
value = getattr(security_obj, field).strip()
if value != '':
security[field] = value
if len(security) > 0:
out['Security'] = security
out['OSTAID'] = self.nitf_details.nitf_header.OSTAID
out['FTITLE'] = self.nitf_details.nitf_header.FTITLE
return out
def populate_nitf_information_into_sicd(self):
"""
Populate some pertinent NITF header information into the SICD structure.
This provides more faithful copying or rewriting options.
"""
nitf_dict = self.get_nitf_dict()
for sicd_meta in self._sicd_meta:
sicd_meta.NITF = copy.deepcopy(nitf_dict)
def depopulate_nitf_information(self):
"""
Eliminates the NITF information dict from the SICD structure.
"""
for sicd_meta in self._sicd_meta:
sicd_meta.NITF = {}
def get_format_function(
self,
raw_dtype: numpy.dtype,
complex_order: Optional[str],
lut: Optional[numpy.ndarray],
band_dimension: int,
image_segment_index: Optional[int] = None,
**kwargs) -> Optional[FormatFunction]:
image_header = self.nitf_details.img_headers[image_segment_index]
bands = len(image_header.Bands)
if complex_order is not None and bands == 2:
return _extract_transform_data(image_header, band_dimension)
# TODO: strange nonstandard float16 handling?
return NITFReader.get_format_function(
self, raw_dtype, complex_order, lut, band_dimension, image_segment_index, **kwargs)
def _check_image_segment_for_compliance(
self,
index: int,
img_header: Union[ImageSegmentHeader, ImageSegmentHeader0]) -> bool:
return self.nitf_details.segment_status[index]
def find_image_segment_collections(self) -> Tuple[Tuple[int, ...]]:
return tuple((entry[0], ) for entry in self.nitf_details.segment_bands)
def create_data_segment_for_collection_element(self, collection_index: int) -> DataSegment:
the_index, the_band = self.nitf_details.segment_bands[collection_index]
if the_index not in self._image_segment_data_segments:
data_segment = self.create_data_segment_for_image_segment(the_index, apply_format=True)
else:
data_segment = self._image_segment_data_segments[the_index]
if the_band is None:
return data_segment
else:
return SubsetSegment(
data_segment, (slice(None, None, 1), slice(None, None, 1), slice(the_band, the_band+1, 1)),
'formatted', close_parent=True)
def final_attempt(file_name: str) -> Optional[ComplexNITFReader]:
"""
Contingency check to open for some other complex NITF type file.
Returns a reader instance, if so.
Parameters
----------
file_name : str|BinaryIO
the file_name to check
Returns
-------
ComplexNITFReader|None
"""
if is_file_like(file_name):
return None
try:
nitf_details = ComplexNITFDetails(file_name)
logger.info('File {} is determined to be some other format complex NITF.')
return ComplexNITFReader(nitf_details)
except (SarpyIOError, ValueError):
return None
| 45,508 | 36.987479 | 118 | py |
sarpy | sarpy-master/sarpy/io/complex/naming/utils.py | """
This module provide utilities for extracting a suggested name for a SICD.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
import pkgutil
from importlib import import_module
from datetime import datetime
logger = logging.getLogger(__name__)
###########
# Module variables
_name_functions = []
_parsed_name_functions = False
def register_name_function(name_func):
"""
Provide a new name function.
Parameters
----------
name_func : dict
Returns
-------
None
"""
if not callable(name_func):
raise TypeError('name_func must be a callable')
if name_func not in _name_functions:
_name_functions.append(name_func)
def parse_name_functions():
"""
Automatically find the viable name functions in the top-level modules.
"""
global _parsed_name_functions
if _parsed_name_functions:
return
_parsed_name_functions = True
start_package = 'sarpy.io.complex.naming'
module = import_module(start_package)
for details in pkgutil.walk_packages(module.__path__, start_package+'.'):
_, module_name, is_pkg = details
if is_pkg:
# don't bother checking for packages
continue
sub_module = import_module(module_name)
if hasattr(sub_module, 'get_commercial_id'):
register_name_function(sub_module.get_commercial_id)
def get_sicd_name(the_sicd, product_number=1):
"""
Gets the suggested name.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
The sicd structure.
product_number : int
The index of the product from the present file.
Returns
-------
str
"""
def get_commercial_id():
commercial_id = None
for entry in _name_functions:
commercial_id = entry(collector, cdate_str, cdate_mins, product_number)
if commercial_id is not None:
break
if commercial_id is None:
return '{0:s}_{1:03d}'.format(the_sicd.CollectionInfo.CoreName, product_number)
return commercial_id
def get_vendor_id():
_time_str = 'HHMMSS' if cdate is None else cdate.strftime('%H%M%S')
_mode = '{}{}{}'.format(the_sicd.CollectionInfo.RadarMode.get_mode_abbreviation(),
the_sicd.Grid.get_resolution_abbreviation(),
the_sicd.SCPCOA.SideOfTrack)
_coords = the_sicd.GeoData.SCP.get_image_center_abbreviation()
_freq_band = the_sicd.RadarCollection.TxFrequency.get_band_abbreviation()
_pol = '{}{}'.format(
the_sicd.RadarCollection.get_polarization_abbreviation(),
the_sicd.ImageFormation.get_polarization_abbreviation())
return '_{}_{}_{}_001{}_{}_0101_SPY'.format(_time_str, _mode, _coords, _freq_band, _pol)
# parse name function, if not already done
parse_name_functions()
# extract the common use variables
if the_sicd.Timeline.CollectStart is None:
cdate = None
cdate_str = "DATE"
cdate_mins = 0
else:
start_time = the_sicd.Timeline.CollectStart.astype('datetime64[s]')
cdate = start_time.astype(datetime)
cdate_str = cdate.strftime('%d%b%y')
cdate_mins = cdate.hour * 60 + cdate.minute + cdate.second / 60.
if the_sicd.CollectionInfo.CollectorName is None:
collector = 'Unknown'
else:
collector = the_sicd.CollectionInfo.CollectorName.strip()
# noinspection PyBroadException
try:
return get_commercial_id() + get_vendor_id()
except Exception:
logger.error('Failed to construct suggested name.')
return None
def get_pass_number(minutes, orbits_per_day):
"""
Gets appropriately formatted pass number string.
Parameters
----------
minutes : float
Minutes elapsed in the day since midnight UTC.
orbits_per_day : float
The number of orbits per day, around 15 for vehicles in low earth orbit.
Returns
-------
str
"""
return '{0:02d}'.format(int(round(minutes*orbits_per_day/1440.)))
| 4,173 | 27.986111 | 96 | py |
sarpy | sarpy-master/sarpy/io/complex/naming/__init__.py |
__classification__ = 'UNCLASSIFIED'
| 37 | 11.666667 | 35 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/MatchInfo.py | """
The MatchInfoType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from xml.etree import ElementTree
from typing import List, Union, Dict, Optional
from sarpy.io.xml.base import Serializable, ParametersCollection, \
get_node_value, find_first_child, find_children
from sarpy.io.xml.descriptors import StringDescriptor, IntegerDescriptor, \
SerializableListDescriptor, ParametersDescriptor
from .base import DEFAULT_STRICT
class MatchCollectionType(Serializable):
"""The match collection type."""
_fields = ('CoreName', 'MatchIndex', 'Parameters')
_required = ('CoreName', )
_collections_tags = {'Parameters': {'array': False, 'child_tag': 'Parameter'}}
# descriptors
CoreName = StringDescriptor(
'CoreName', _required, strict=DEFAULT_STRICT,
docstring='Unique identifier for the match type.') # type: str
MatchIndex = IntegerDescriptor(
'MatchIndex', _required, strict=DEFAULT_STRICT,
docstring='Collection sequence index for the match collection, assuming '
'that this makes sense.') # type: int
Parameters = ParametersDescriptor(
'Parameters', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='The match parameters.') # type: ParametersCollection
def __init__(
self,
CoreName: str = None,
MatchIndex: Optional[int] = None,
Parameters: Union[None, ParametersCollection, Dict] = None,
**kwargs):
"""
Parameters
----------
CoreName : str
MatchIndex : int
Parameters : ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.CoreName = CoreName
self.MatchIndex = MatchIndex
self.Parameters = Parameters
super(MatchCollectionType, self).__init__(**kwargs)
class MatchType(Serializable):
"""The is an array element for match information."""
_fields = ('TypeID', 'CurrentIndex', 'NumMatchCollections', 'MatchCollections')
_required = ('TypeID',)
_collections_tags = {'MatchCollections': {'array': False, 'child_tag': 'MatchCollection'}}
# descriptors
TypeID = StringDescriptor(
'TypeID', _required, strict=DEFAULT_STRICT,
docstring='The match type identifier. *Examples - "MULTI-IMAGE", "COHERENT" or "STEREO"*') # type: str
CurrentIndex = IntegerDescriptor(
'CurrentIndex', _required, strict=DEFAULT_STRICT,
docstring='Collection sequence index for the current collection. That is, which collection in the '
'collection series (defined in MatchCollections) is this collection? '
'(1-based enumeration).') # type: Optional[int]
MatchCollections = SerializableListDescriptor(
'MatchCollections', MatchCollectionType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='The match collections.') # type: Optional[List[MatchCollectionType]]
def __init__(
self,
TypeID: str = None,
CurrentIndex: Optional[int] = None,
MatchCollections: Optional[List[MatchCollectionType]] = None,
**kwargs):
"""
Parameters
----------
TypeID : str
CurrentIndex : None|int
MatchCollections : None|List[MatchCollectionType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TypeID = TypeID
self.CurrentIndex = CurrentIndex
self.MatchCollections = MatchCollections
super(MatchType, self).__init__(**kwargs)
@property
def NumMatchCollections(self):
"""int: The number of match collections for this match type."""
if self.MatchCollections is None:
return 0
else:
return len(self.MatchCollections)
class MatchInfoType(Serializable):
"""
The match information container. This contains data for multiple collection taskings.
"""
_fields = ('NumMatchTypes', 'MatchTypes')
_required = ('MatchTypes', )
_collections_tags = {'MatchTypes': {'array': False, 'child_tag': 'MatchType'}}
# descriptors
MatchTypes = SerializableListDescriptor(
'MatchTypes', MatchType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='The match types list.') # type: List[MatchType]
def __init__(
self,
MatchTypes: List[MatchType] = None,
**kwargs):
"""
Parameters
----------
MatchTypes : List[MatchType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.MatchTypes = MatchTypes
super(MatchInfoType, self).__init__(**kwargs)
@property
def NumMatchTypes(self) -> int:
"""int: The number of types of matched collections."""
if self.MatchTypes is None:
return 0
else:
return len(self.MatchTypes)
@classmethod
def _from_node_0_5(cls, node, xml_ns, ns_key):
"""
Helper method, not really for public usage. For XML deserialization from SICD version prior to 1.0.
Parameters
----------
node : ElementTree.Element
dom element for serialized class instance
xml_ns : dict
The xml namespace dictionary
ns_key : str
The namespace key in the dictionary
Returns
-------
Serializable
corresponding class instance
"""
def get_element(tid, cid, cname, params):
return {
'TypeID': tid,
'CurrentIndex': cid,
'MatchCollections': [{'CoreName': cname, 'Parameters': params}, ]}
# Note that this is NOT converting the MatchType.MatchCollection in spirit.
# There isn't enough structure to guarantee that you actually can. This will
# always yield MatchType.MatchCollection length 1, because the collection details are stuffed
# into the parameters free form, while CurrentIndex is extracted and actually yields the
# collection index number (likely larger than 1). This is at least confusing, but more likely
# completely misleading.
match_types = []
coll_key = cls._child_xml_ns_key.get('Collect', ns_key)
cnodes = find_children(node, 'Collect', xml_ns, coll_key)
for cnode in cnodes: # assumed non-empty
# this describes one series of collects, possibly with more than one MatchType = TypeID
# It is not clear how it would be possible to deconflict a repeat of MatchType between
# Collect tags, so I will not.
core_key = cls._child_xml_ns_key.get('CoreName', ns_key)
core_name = get_node_value(find_first_child(cnode, 'CoreName', xml_ns, core_key))
current_index = None
parameters = []
pkey = cls._child_xml_ns_key.get('Parameters', ns_key)
pnodes = find_children(cnode, 'Parameter', xml_ns, pkey)
for pnode in pnodes:
name = pnode.attrib['name']
value = get_node_value(pnode)
if name == 'CURRENT_INSTANCE':
current_index = int(value) # extract the current index (and exclude)
else:
parameters.append({'name': name, 'value': value}) # copy the parameter
if current_index is None:
continue # I don't know what we would do?
mt_key = cls._child_xml_ns_key.get('MatchType', ns_key)
mtypes = find_children(cnode, 'MatchType', xml_ns, mt_key)
for tnode in mtypes:
type_id = get_node_value(tnode)
match_types.append(get_element(type_id, current_index, core_name, parameters))
if len(match_types) > 0:
# noinspection PyTypeChecker
return cls(MatchTypes=match_types)
else:
return None
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
coll_key = cls._child_xml_ns_key.get('Collect', ns_key)
coll = find_first_child(node, 'Collect', xml_ns, coll_key)
if coll is not None:
# This is from SICD version prior to 1.0, so handle manually.
return cls._from_node_0_5(node, xml_ns, ns_key)
else:
return super(MatchInfoType, cls).from_node(node, xml_ns, ns_key=ns_key, kwargs=kwargs)
| 8,888 | 37.647826 | 111 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/base.py | """
This module contains the base objects for use in the SICD elements, and the base serializable functionality.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
from typing import Dict, Tuple, Optional
import numpy
from sarpy.io.xml.base import SerializableArray, create_new_node
from sarpy.io.xml.descriptors import BasicDescriptor
logger = logging.getLogger(__name__)
DEFAULT_STRICT = False
FLOAT_FORMAT = '0.17G'
class SerializableCPArrayDescriptor(BasicDescriptor):
"""A descriptor for properties of a list or array of specified extension of Serializable"""
minimum_length = 4
maximum_length = 4
def __init__(
self,
name: str,
child_type,
tag_dict: Dict,
required: Tuple[str, ...],
strict: bool = DEFAULT_STRICT,
docstring: Optional[str] = None):
self.child_type = child_type
tags = tag_dict[name]
self.array = tags.get('array', False)
if not self.array:
raise ValueError(
'Attribute {} is populated in the `_collection_tags` dictionary without `array`=True. '
'This is inconsistent with using SerializableCPArrayDescriptor.'.format(name))
self.child_tag = tags['child_tag']
self._typ_string = 'numpy.ndarray[{}]:'.format(str(child_type).strip().split('.')[-1][:-2])
super(SerializableCPArrayDescriptor, self).__init__(name, required, strict=strict, docstring=docstring)
def __set__(self, instance, value):
if super(SerializableCPArrayDescriptor, self).__set__(instance, value): # the None handler...kinda hacky
return
if isinstance(value, SerializableCPArray):
self.data[instance] = value
else:
xml_ns = getattr(instance, '_xml_ns', None)
# noinspection PyProtectedMember, PyUnresolvedReferences
if hasattr(instance, '_child_xml_ns_key') and self.name in instance._child_xml_ns_key:
# noinspection PyProtectedMember
xml_ns_key = instance._child_xml_ns_key[self.name]
else:
xml_ns_key = getattr(instance, '_xml_ns_key', None)
the_inst = self.data.get(instance, None)
if the_inst is None:
self.data[instance] = SerializableCPArray(
coords=value, name=self.name, child_tag=self.child_tag,
child_type=self.child_type, _xml_ns=xml_ns, _xml_ns_key=xml_ns_key)
else:
the_inst.set_array(value)
class SerializableCPArray(SerializableArray):
__slots__ = (
'_child_tag', '_child_type', '_array', '_name', '_minimum_length',
'_maximum_length', '_index_as_string', '_xml_ns', '_xml_ns_key')
def __init__(
self,
coords=None,
name: str = None,
child_tag: str = None,
child_type=None,
_xml_ns: Optional[Dict[str, str]] = None,
_xml_ns_key: Optional[str] = None):
if hasattr(child_type, '_CORNER_VALUES'):
self._index_as_string = True
else:
self._index_as_string = False
super(SerializableCPArray, self).__init__(
coords=coords, name=name, child_tag=child_tag, child_type=child_type,
_xml_ns=_xml_ns, _xml_ns_key=_xml_ns_key)
self._minimum_length = 4
self._maximum_length = 4
@property
def FRFC(self) -> Optional[numpy.ndarray]:
if self._array is None:
return None
return self._array[0].get_array()
@property
def FRLC(self) -> Optional[numpy.ndarray]:
if self._array is None:
return None
return self._array[1].get_array()
@property
def LRLC(self) -> Optional[numpy.ndarray]:
if self._array is None:
return None
return self._array[2].get_array()
@property
def LRFC(self) -> Optional[numpy.ndarray]:
if self._array is None:
return None
return self._array[3].get_array()
def _check_indices(self):
if not self._index_as_string:
self._array[0].index = 1
self._array[1].index = 2
self._array[2].index = 3
self._array[3].index = 4
else:
self._array[0].index = '1:FRFC'
self._array[1].index = '2:FRLC'
self._array[2].index = '3:LRLC'
self._array[3].index = '4:LRFC'
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT):
if self.size == 0:
return None # nothing to be done
if ns_key is None:
anode = create_new_node(doc, tag, parent=parent)
else:
anode = create_new_node(doc, '{}:{}'.format(ns_key, tag), parent=parent)
for i, entry in enumerate(self._array):
entry.to_node(doc, self._child_tag, ns_key=ns_key, parent=anode,
check_validity=check_validity, strict=strict)
return anode
| 5,105 | 34.706294 | 113 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/CollectionInfo.py | """
The CollectionInfo object definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import List, Union, Dict, Optional
from sarpy.io.xml.base import Serializable, ParametersCollection
from sarpy.io.xml.descriptors import StringDescriptor, StringEnumDescriptor, \
StringListDescriptor, SerializableDescriptor, ParametersDescriptor
from .base import DEFAULT_STRICT
class RadarModeType(Serializable):
"""
Radar mode type container class
"""
_fields = ('ModeType', 'ModeID')
_required = ('ModeType', )
# other class variable
_MODE_TYPE_VALUES = ('SPOTLIGHT', 'STRIPMAP', 'DYNAMIC STRIPMAP')
# descriptors
ModeType = StringEnumDescriptor(
'ModeType', _MODE_TYPE_VALUES, _required, strict=True,
docstring="The Radar imaging mode.") # type: str
ModeID = StringDescriptor(
'ModeID', _required, strict=DEFAULT_STRICT,
docstring='Radar imaging mode per Program Specific Implementation Document.') # type: str
def __init__(
self,
ModeType: str = None,
ModeID: Optional[str] = None,
**kwargs):
"""
Parameters
----------
ModeType : str
ModeID : None|str
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.ModeID = ModeID
self.ModeType = ModeType
super(RadarModeType, self).__init__(**kwargs)
def get_mode_abbreviation(self) -> str:
"""
Get the mode abbreviation for the suggested name.
Returns
-------
str
"""
mode = self.ModeType
if mode == 'SPOTLIGHT':
return 'SL'
elif mode == 'STRIPMAP':
return 'ST'
elif mode == 'DYNAMIC STRIPMAP':
return 'DS'
class CollectionInfoType(Serializable):
"""General information about the collection."""
_collections_tags = {
'Parameters': {'array': False, 'child_tag': 'Parameter'},
'CountryCodes': {'array': False, 'child_tag': 'CountryCode'},
}
_fields = (
'CollectorName', 'IlluminatorName', 'CoreName', 'CollectType',
'RadarMode', 'Classification', 'CountryCodes', 'Parameters')
_required = ('CollectorName', 'CoreName', 'RadarMode', 'Classification')
# other class variable
_COLLECT_TYPE_VALUES = ('MONOSTATIC', 'BISTATIC')
# descriptors
CollectorName = StringDescriptor(
'CollectorName', _required, strict=DEFAULT_STRICT,
docstring='Radar platform identifier. For Bistatic collections, list the Receive platform.') # type: str
IlluminatorName = StringDescriptor(
'IlluminatorName', _required, strict=DEFAULT_STRICT,
docstring='Radar platform identifier that provided the illumination. For Bistatic collections, '
'list the transmit platform.') # type: str
CoreName = StringDescriptor(
'CoreName', _required, strict=DEFAULT_STRICT,
docstring='Collection and imaging data set identifier. Uniquely identifies imaging collections per '
'Program Specific Implementation Doc.') # type: str
CollectType = StringEnumDescriptor(
'CollectType', _COLLECT_TYPE_VALUES, _required,
docstring="Collection type identifier. Monostatic collections include single platform collections with "
"unique transmit and receive apertures.") # type: str
RadarMode = SerializableDescriptor(
'RadarMode', RadarModeType, _required, strict=DEFAULT_STRICT,
docstring='The radar mode.') # type: RadarModeType
Classification = StringDescriptor(
'Classification', _required, strict=DEFAULT_STRICT, default_value='UNCLASSIFIED',
docstring='Contains the human-readable banner. Contains classification, file control and handling, '
'file releasing, and/or proprietary markings. Specified per Program Specific '
'Implementation Document.') # type: str
CountryCodes = StringListDescriptor(
'CountryCodes', _required, strict=DEFAULT_STRICT,
docstring="List of country codes for region covered by the image.") # type: List[str]
Parameters = ParametersDescriptor(
'Parameters', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Free form parameters object collection.') # type: ParametersCollection
def __init__(
self,
CollectorName: str = None,
IlluminatorName: Optional[str] = None,
CoreName: str = None,
CollectType: Optional[str] = None,
RadarMode: RadarModeType = None,
Classification: str = "UNCLASSIFIED",
CountryCodes: Union[str, List[str]] = None,
Parameters: Union[ParametersCollection, Dict] = None,
**kwargs):
"""
Parameters
----------
CollectorName : str
IlluminatorName : str
CoreName : str
CollectType : str
RadarMode : RadarModeType
Classification : str
CountryCodes : list|str
Parameters : ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.CollectorName = CollectorName
self.IlluminatorName = IlluminatorName
self.CoreName = CoreName
self.CollectType = CollectType
self.RadarMode = RadarMode
self.Classification = Classification
self.CountryCodes = CountryCodes
self.Parameters = Parameters
super(CollectionInfoType, self).__init__(**kwargs)
| 5,859 | 36.564103 | 113 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/Antenna.py | """
The AntennaType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, Optional
import numpy
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import BooleanDescriptor, FloatDescriptor, \
SerializableDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from .blocks import Poly1DType, XYZPolyType, GainPhasePolyType
class EBType(Serializable):
"""
Electrical boresight (EB) steering directions for an electronically steered array.
"""
_fields = ('DCXPoly', 'DCYPoly')
_required = _fields
# descriptors
DCXPoly = SerializableDescriptor(
'DCXPoly', Poly1DType, _required, strict=DEFAULT_STRICT,
docstring='Electrical boresight steering *X-axis direction cosine (DCX)* as a function of '
'slow time ``(variable 1)``.') # type: Poly1DType
DCYPoly = SerializableDescriptor(
'DCYPoly', Poly1DType, _required, strict=DEFAULT_STRICT,
docstring='Electrical boresight steering *Y-axis direction cosine (DCY)* as a function of '
'slow time ``(variable 1)``.') # type: Poly1DType
def __init__(
self,
DCXPoly: Union[Poly1DType, numpy.ndarray, list, tuple] = None,
DCYPoly: Union[Poly1DType, numpy.ndarray, list, tuple] = None,
**kwargs):
"""
Parameters
----------
DCXPoly : Poly1DType|numpy.ndarray|list|tuple
DCYPoly : Poly1DType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.DCXPoly = DCXPoly
self.DCYPoly = DCYPoly
super(EBType, self).__init__(**kwargs)
def __call__(
self,
t: Union[float, int, numpy.ndarray]):
"""
Evaluate the polynomial at points `t`. This passes `t` straight through
to :func:`polyval` of `numpy.polynomial.polynomial` for each of
`DCXPoly,DCYPoly` components. If any of `DCXPoly,DCYPoly` is not populated,
then `None` is returned.
Parameters
----------
t : float|int|numpy.ndarray
The point(s) at which to evaluate.
Returns
-------
None|numpy.ndarray
"""
if self.DCXPoly is None or self.DCYPoly is None:
return None
return numpy.array([self.DCXPoly(t), self.DCYPoly(t)])
class AntParamType(Serializable):
"""
The antenna parameters container.
"""
_fields = (
'XAxisPoly', 'YAxisPoly', 'FreqZero', 'EB', 'Array', 'Elem', 'GainBSPoly', 'EBFreqShift', 'MLFreqDilation')
_required = ('XAxisPoly', 'YAxisPoly', 'FreqZero', 'Array')
_numeric_format = {'FreqZero': FLOAT_FORMAT}
# descriptors
XAxisPoly = SerializableDescriptor(
'XAxisPoly', XYZPolyType, _required, strict=DEFAULT_STRICT,
docstring='Antenna X-Axis unit vector in ECF coordinates as a function of time ``(variable 1)``.'
) # type: XYZPolyType
YAxisPoly = SerializableDescriptor(
'YAxisPoly', XYZPolyType, _required, strict=DEFAULT_STRICT,
docstring='Antenna Y-Axis unit vector in ECF coordinates as a function of time ``(variable 1)``.'
) # type: XYZPolyType
FreqZero = FloatDescriptor(
'FreqZero', _required, strict=DEFAULT_STRICT,
docstring='RF frequency *(f0)* used to specify the array pattern and electrical boresight *(EB)* '
'steering direction cosines.') # type: float
EB = SerializableDescriptor(
'EB', EBType, _required, strict=DEFAULT_STRICT,
docstring='Electrical boresight *(EB)* steering directions for an electronically '
'steered array.') # type: EBType
Array = SerializableDescriptor(
'Array', GainPhasePolyType, _required, strict=DEFAULT_STRICT,
docstring='Array pattern polynomials that define the shape of the main-lobe.') # type: GainPhasePolyType
Elem = SerializableDescriptor(
'Elem', GainPhasePolyType, _required, strict=DEFAULT_STRICT,
docstring='Element array pattern polynomials for electronically steered arrays.') # type: GainPhasePolyType
GainBSPoly = SerializableDescriptor(
'GainBSPoly', Poly1DType, _required, strict=DEFAULT_STRICT,
docstring='Gain polynomial *(in dB)* as a function of frequency for boresight *(BS)* at :math:`DCX=0, DCY=0`. '
'Frequency ratio :math:`(f-f0)/f0` is the input variable ``(variable 1)``, and the constant '
'coefficient is always `0.0`.') # type: Poly1DType
EBFreqShift = BooleanDescriptor(
'EBFreqShift', _required, strict=DEFAULT_STRICT,
docstring="""
Parameter indicating whether the electronic boresite shifts with frequency for an electronically steered array.
* `False` - No shift with frequency.
* `True` - Shift with frequency per ideal array theory.
""") # type: bool
MLFreqDilation = BooleanDescriptor(
'MLFreqDilation', _required, strict=DEFAULT_STRICT,
docstring="""
Parameter indicating the mainlobe (ML) width changes with frequency.
* `False` - No change with frequency.
* `True` - Change with frequency per ideal array theory.
""") # type: bool
def __init__(
self,
XAxisPoly: XYZPolyType = None,
YAxisPoly: XYZPolyType = None,
FreqZero: float = None,
EB: Optional[EBType] = None,
Array: GainPhasePolyType = None,
Elem: Optional[GainPhasePolyType] = None,
GainBSPoly: Union[None, Poly1DType, numpy.ndarray, list, tuple] = None,
EBFreqShift: Optional[bool] = None,
MLFreqDilation: Optional[bool] = None,
**kwargs):
"""
Parameters
----------
XAxisPoly : XYZPolyType
YAxisPoly : XYZPolyType
FreqZero : float
EB : EBType
Array : GainPhasePolyType
Elem : GainPhasePolyType
GainBSPoly : Poly1DType|numpy.ndarray|list|tuple
EBFreqShift : bool
MLFreqDilation : bool
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.XAxisPoly, self.YAxisPoly = XAxisPoly, YAxisPoly
self.FreqZero = FreqZero
self.EB = EB
self.Array, self.Elem = Array, Elem
self.GainBSPoly = GainBSPoly
self.EBFreqShift, self.MLFreqDilation = EBFreqShift, MLFreqDilation
super(AntParamType, self).__init__(**kwargs)
def _apply_reference_frequency(self, reference_frequency):
if self.FreqZero is not None:
self.FreqZero += reference_frequency
class AntennaType(Serializable):
"""Parameters that describe the antenna illumination patterns during the collection."""
_fields = ('Tx', 'Rcv', 'TwoWay')
_required = ()
# descriptors
Tx = SerializableDescriptor(
'Tx', AntParamType, _required, strict=DEFAULT_STRICT,
docstring='The transmit antenna parameters.') # type: AntParamType
Rcv = SerializableDescriptor(
'Rcv', AntParamType, _required, strict=DEFAULT_STRICT,
docstring='The receive antenna parameters.') # type: AntParamType
TwoWay = SerializableDescriptor(
'TwoWay', AntParamType, _required, strict=DEFAULT_STRICT,
docstring='The bidirectional transmit/receive antenna parameters.') # type: AntParamType
def __init__(
self,
Tx: Optional[AntParamType] = None,
Rcv: Optional[AntParamType] = None,
TwoWay: Optional[AntParamType] = None,
**kwargs):
"""
Parameters
----------
Tx : AntParamType
Rcv : AntParamType
TwoWay : AntParamType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Tx, self.Rcv, self.TwoWay = Tx, Rcv, TwoWay
super(AntennaType, self).__init__(**kwargs)
def _apply_reference_frequency(self, reference_frequency):
"""
If the reference frequency is used, adjust the necessary fields accordingly.
Expected to be called by SICD parent.
Parameters
----------
reference_frequency : float
The reference frequency.
Returns
-------
None
"""
if self.Tx is not None:
# noinspection PyProtectedMember
self.Tx._apply_reference_frequency(reference_frequency)
if self.Rcv is not None:
# noinspection PyProtectedMember
self.Rcv._apply_reference_frequency(reference_frequency)
if self.TwoWay is not None:
# noinspection PyProtectedMember
self.TwoWay._apply_reference_frequency(reference_frequency)
| 9,216 | 36.46748 | 119 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/Position.py | """
The PositionType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import List, Union, Optional
import numpy
from sarpy.io.xml.base import Serializable, SerializableArray
from sarpy.io.xml.descriptors import SerializableDescriptor, SerializableArrayDescriptor
from .base import DEFAULT_STRICT
from .blocks import XYZType, XYZPolyType, XYZPolyAttributeType
class PositionType(Serializable):
"""The details for platform and ground reference positions as a function of time since collection start."""
_fields = ('ARPPoly', 'GRPPoly', 'TxAPCPoly', 'RcvAPC')
_required = ('ARPPoly',)
_collections_tags = {'RcvAPC': {'array': True, 'child_tag': 'RcvAPCPoly'}}
# descriptors
ARPPoly = SerializableDescriptor(
'ARPPoly', XYZPolyType, _required, strict=DEFAULT_STRICT,
docstring='Aperture Reference Point (ARP) position polynomial in ECF as a function of elapsed '
'seconds since start of collection.') # type: XYZPolyType
GRPPoly = SerializableDescriptor(
'GRPPoly', XYZPolyType, _required, strict=DEFAULT_STRICT,
docstring='Ground Reference Point (GRP) position polynomial in ECF as a function of elapsed '
'seconds since start of collection.') # type: XYZPolyType
TxAPCPoly = SerializableDescriptor(
'TxAPCPoly', XYZPolyType, _required, strict=DEFAULT_STRICT,
docstring='Transmit Aperture Phase Center (APC) position polynomial in ECF as a function of '
'elapsed seconds since start of collection.') # type: XYZPolyType
RcvAPC = SerializableArrayDescriptor(
'RcvAPC', XYZPolyAttributeType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Receive Aperture Phase Center polynomials array. '
'Each polynomial has output in ECF, and represents a function of elapsed seconds since start of '
'collection.') # type: Union[SerializableArray, List[XYZPolyAttributeType]]
def __init__(
self,
ARPPoly: XYZPolyType = None,
GRPPoly: Optional[XYZPolyType] = None,
TxAPCPoly: Optional[XYZPolyType] = None,
RcvAPC=None,
**kwargs):
"""
Parameters
----------
ARPPoly : XYZPolyType
GRPPoly : None|XYZPolyType
TxAPCPoly : None|XYZPolyType
RcvAPC : SerializableArray|List[XYZPolyAttributeType]|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.ARPPoly = ARPPoly
self.GRPPoly = GRPPoly
self.TxAPCPoly = TxAPCPoly
self.RcvAPC = RcvAPC
super(PositionType, self).__init__(**kwargs)
def _derive_arp_poly(self, SCPCOA):
"""
Expected to be called from SICD parent. Set the aperture position polynomial from position, time,
acceleration at scptime, if necessary.
.. Note::
This assumes constant velocity and acceleration.
Parameters
----------
SCPCOA : sarpy.io.complex.sicd_elements.SCPCOA.SCPCOAType
Returns
-------
None
"""
if self.ARPPoly is not None:
return # nothing to be done
if SCPCOA is None or SCPCOA.ARPPos is None or SCPCOA.ARPVel is None or SCPCOA.SCPTime is None:
return # not enough information to derive
if SCPCOA.ARPAcc is None:
SCPCOA.ARPAcc = XYZType.from_array((0, 0, 0))
# define the polynomial
coefs = numpy.zeros((3, 3), dtype=numpy.float64)
scptime = SCPCOA.SCPTime
pos = SCPCOA.ARPPos.get_array()
vel = SCPCOA.ARPVel.get_array()
acc = SCPCOA.ARPAcc.get_array()
coefs[:, 0] = pos - vel*scptime + 0.5*acc*scptime*scptime
coefs[:, 1] = vel - acc*scptime
coefs[:, 2] = acc
self.ARPPoly = XYZPolyType(X=coefs[0, :], Y=coefs[1, :], Z=coefs[2, :])
def _basic_validity_check(self) -> bool:
condition = super(PositionType, self)._basic_validity_check()
if self.ARPPoly is not None and \
(self.ARPPoly.X.order1 < 1 or self.ARPPoly.Y.order1 < 1 or self.ARPPoly.Z.order1 < 1):
self.log_validity_error(
'ARPPoly should be order at least 1 in each component. '
'Got X.order1 = {}, Y.order1 = {}, and Z.order1 = {}'.format(self.ARPPoly.X.order1,
self.ARPPoly.Y.order1,
self.ARPPoly.Z.order1))
condition = False
return condition
| 4,818 | 38.826446 | 115 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/RgAzComp.py | """
The RgAzCompType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
from typing import Union
import numpy
from numpy.linalg import norm
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import FloatDescriptor, SerializableDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from .blocks import Poly1DType
logger = logging.getLogger(__name__)
class RgAzCompType(Serializable):
"""
Parameters included for a Range, Doppler image.
"""
_fields = ('AzSF', 'KazPoly')
_required = _fields
_numeric_format = {'AzSF': FLOAT_FORMAT}
# descriptors
AzSF = FloatDescriptor(
'AzSF', _required, strict=DEFAULT_STRICT,
docstring='Scale factor that scales image coordinate az = ycol (meters) to a delta cosine of the '
'Doppler Cone Angle at COA, *(in 1/m)*') # type: float
KazPoly = SerializableDescriptor(
'KazPoly', Poly1DType, _required, strict=DEFAULT_STRICT,
docstring='Polynomial function that yields azimuth spatial frequency *(Kaz = Kcol)* as a function of '
'slow time ``(variable 1)``. That is '
r':math:`\text{Slow Time (sec)} \to \text{Azimuth spatial frequency (cycles/meter)}`. '
'Time relative to collection start.') # type: Poly1DType
def __init__(
self,
AzSF: float = None,
KazPoly: Union[Poly1DType, numpy.ndarray, list, tuple] = None,
**kwargs):
"""
Parameters
----------
AzSF : float
KazPoly : Poly1DType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.AzSF = AzSF
self.KazPoly = KazPoly
super(RgAzCompType, self).__init__(**kwargs)
def _derive_parameters(self, Grid, Timeline, SCPCOA):
"""
Expected to be called by the SICD object.
Parameters
----------
Grid : sarpy.io.complex.sicd_elements.GridType
Timeline : sarpy.io.complex.sicd_elements.TimelineType
SCPCOA : sarpy.io.complex.sicd_elements.SCPCOA.SCPCOAType
Returns
-------
None
"""
look = SCPCOA.look
az_sf = -look*numpy.sin(numpy.deg2rad(SCPCOA.DopplerConeAng))/SCPCOA.SlantRange
if self.AzSF is None:
self.AzSF = az_sf
elif abs(self.AzSF - az_sf) > 1e-3:
logger.warning(
'The derived value for RgAzComp.AzSF is {},\n\t'
'while the current setting is {}.'.format(az_sf, self.AzSF))
if self.KazPoly is None:
if Grid.Row.KCtr is not None and Timeline is not None and Timeline.IPP is not None and \
Timeline.IPP.size == 1 and Timeline.IPP[0].IPPPoly is not None and SCPCOA.SCPTime is not None:
st_rate_coa = Timeline.IPP[0].IPPPoly.derivative_eval(SCPCOA.SCPTime, 1)
krg_coa = Grid.Row.KCtr
if Grid.Row is not None and Grid.Row.DeltaKCOAPoly is not None:
krg_coa += Grid.Row.DeltaKCOAPoly.Coefs[0, 0]
# Scale factor described in SICD spec
delta_kaz_per_delta_v = \
look*krg_coa*norm(SCPCOA.ARPVel.get_array()) * \
numpy.sin(numpy.deg2rad(SCPCOA.DopplerConeAng))/(SCPCOA.SlantRange*st_rate_coa)
self.KazPoly = Poly1DType(Coefs=delta_kaz_per_delta_v*Timeline.IPP[0].IPPPoly.Coefs)
| 3,652 | 33.462264 | 114 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/RMA.py | """
The RMAType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, Optional
import numpy
from numpy.linalg import norm
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import StringEnumDescriptor, FloatDescriptor, \
BooleanDescriptor, SerializableDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from .blocks import XYZType, Poly1DType, Poly2DType
from .utils import _get_center_frequency
class RMRefType(Serializable):
"""
Range migration reference element of RMA type.
"""
_fields = ('PosRef', 'VelRef', 'DopConeAngRef')
_required = _fields
_numeric_format = {'DopConeAngRef': FLOAT_FORMAT, }
# descriptors
PosRef = SerializableDescriptor(
'PosRef', XYZType, _required, strict=DEFAULT_STRICT,
docstring='Platform reference position in ECF coordinates used to establish '
'the reference slant plane.') # type: XYZType
VelRef = SerializableDescriptor(
'VelRef', XYZType, _required, strict=DEFAULT_STRICT,
docstring='Platform reference velocity vector in ECF coordinates used to establish '
'the reference slant plane.') # type: XYZType
DopConeAngRef = FloatDescriptor(
'DopConeAngRef', _required, strict=DEFAULT_STRICT,
docstring='Reference Doppler Cone Angle in degrees.') # type: float
def __init__(
self,
PosRef: Union[XYZType, numpy.ndarray, list, tuple] = None,
VelRef: Union[XYZType, numpy.ndarray, list, tuple] = None,
DopConeAngRef: float = None,
**kwargs):
"""
Parameters
----------
PosRef : XYZType|numpy.ndarray|list|tuple
VelRef : XYZType|numpy.ndarray|list|tuple
DopConeAngRef : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.PosRef = PosRef
self.VelRef = VelRef
self.DopConeAngRef = DopConeAngRef
super(RMRefType, self).__init__(**kwargs)
class INCAType(Serializable):
"""Parameters for Imaging Near Closest Approach (INCA) image description."""
_fields = (
'TimeCAPoly', 'R_CA_SCP', 'FreqZero', 'DRateSFPoly', 'DopCentroidPoly', 'DopCentroidCOA')
_required = ('TimeCAPoly', 'R_CA_SCP', 'FreqZero', 'DRateSFPoly')
_numeric_format = {'R_CA_SCP': '0.17E', 'FreqZero': '0.17E'}
# descriptors
TimeCAPoly = SerializableDescriptor(
'TimeCAPoly', Poly1DType, _required, strict=DEFAULT_STRICT,
docstring='Polynomial function that yields *Time of Closest Approach* as function of '
'image column *(azimuth)* coordinate in meters. Time relative to '
'collection start in seconds.') # type: Poly1DType
R_CA_SCP = FloatDescriptor(
'R_CA_SCP', _required, strict=DEFAULT_STRICT,
docstring='*Range at Closest Approach (R_CA)* for the *Scene Center Point (SCP)* in meters.') # type: float
FreqZero = FloatDescriptor(
'FreqZero', _required, strict=DEFAULT_STRICT,
docstring=r'*RF frequency* :\math:`(f_0)` in Hz used for computing '
r'Doppler Centroid values. Typical :math:`f_0` '
r'set equal to center transmit frequency.') # type: float
DRateSFPoly = SerializableDescriptor(
'DRateSFPoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='Polynomial function that yields *Doppler Rate scale factor (DRSF)* '
'as a function of image location. Yields `DRSF` as a function of image '
'range coordinate ``(variable 1)`` and azimuth coordinate ``(variable 2)``. '
'Used to compute Doppler Rate at closest approach.') # type: Poly2DType
DopCentroidPoly = SerializableDescriptor(
'DopCentroidPoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='Polynomial function that yields Doppler Centroid value as a '
'function of image location *(fdop_DC)*. The *fdop_DC* is the '
'Doppler frequency at the peak signal response. The polynomial is a function '
'of image range coordinate ``(variable 1)`` and azimuth coordinate ``(variable 2)``. '
'*Note: Only used for Stripmap and Dynamic Stripmap collections.*') # type: Poly2DType
DopCentroidCOA = BooleanDescriptor(
'DopCentroidCOA', _required, strict=DEFAULT_STRICT,
docstring="""Flag indicating that the COA is at the peak signal :math`fdop_COA = fdop_DC`.
* `True` - if Pixel COA at peak signal for all pixels.
* `False` otherwise.
*Note:* Only used for Stripmap and Dynamic Stripmap.""") # type: bool
def __init__(
self,
TimeCAPoly: Union[Poly1DType, numpy.ndarray, list, tuple] = None,
R_CA_SCP: float = None,
FreqZero: float = None,
DRateSFPoly: Union[Poly2DType, numpy.ndarray, list, tuple] = None,
DopCentroidPoly: Union[None, Poly2DType, numpy.ndarray, list, tuple] = None,
DopCentroidCOA: bool = None,
**kwargs):
"""
Parameters
----------
TimeCAPoly : Poly1DType|numpy.ndarray|list|tuple
R_CA_SCP : float
FreqZero : float
DRateSFPoly : Poly2DType|numpy.ndarray|list|tuple
DopCentroidPoly : Poly2DType|numpy.ndarray|list|tuple
DopCentroidCOA : bool
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TimeCAPoly = TimeCAPoly
self.R_CA_SCP = R_CA_SCP
self.FreqZero = FreqZero
self.DRateSFPoly = DRateSFPoly
self.DopCentroidPoly = DopCentroidPoly
self.DopCentroidCOA = DopCentroidCOA
super(INCAType, self).__init__(**kwargs)
def _apply_reference_frequency(self, reference_frequency: float):
if self.FreqZero is not None:
self.FreqZero += reference_frequency
class RMAType(Serializable):
"""Parameters included when the image is formed using the Range Migration Algorithm."""
_fields = ('RMAlgoType', 'ImageType', 'RMAT', 'RMCR', 'INCA')
_required = ('RMAlgoType', 'ImageType')
_choice = ({'required': True, 'collection': ('RMAT', 'RMCR', 'INCA')}, )
# class variables
_RM_ALGO_TYPE_VALUES = ('OMEGA_K', 'CSA', 'RG_DOP')
# descriptors
RMAlgoType = StringEnumDescriptor(
'RMAlgoType', _RM_ALGO_TYPE_VALUES, _required, strict=DEFAULT_STRICT,
docstring=r"""
Identifies the type of migration algorithm used:
* `OMEGA_K` - Algorithms that employ Stolt interpolation of the Kxt dimension. :math:`Kx = \sqrt{Kf^2 - Ky^2}`
* `CSA` - Wave number algorithm that process two-dimensional chirp signals.
* `RG_DOP` - Range-Doppler algorithms that employ *RCMC* in the compressed range domain.
""") # type: str
RMAT = SerializableDescriptor(
'RMAT', RMRefType, _required, strict=DEFAULT_STRICT,
docstring='Parameters for *RMA with Along Track (RMAT)* motion compensation.') # type: RMRefType
RMCR = SerializableDescriptor(
'RMCR', RMRefType, _required, strict=DEFAULT_STRICT,
docstring='Parameters for *RMA with Cross Range (RMCR)* motion compensation.') # type: RMRefType
INCA = SerializableDescriptor(
'INCA', INCAType, _required, strict=DEFAULT_STRICT,
docstring='Parameters for *Imaging Near Closest Approach (INCA)* image description.') # type: INCAType
def __init__(
self,
RMAlgoType: str = None,
RMAT: Optional[RMRefType] = None,
RMCR: Optional[RMRefType] = None,
INCA: Optional[INCAType] = None,
**kwargs):
"""
Parameters
----------
RMAlgoType : str
RMAT : RMRefType
RMCR : RMRefType
INCA : INCAType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.RMAlgoType = RMAlgoType
self.RMAT = RMAT
self.RMCR = RMCR
self.INCA = INCA
super(RMAType, self).__init__(**kwargs)
@property
def ImageType(self) -> Optional[str]:
"""
str: READ ONLY attribute. Identifies the specific RM image type / metadata type supplied. This is determined by
returning the (first) attribute among `'RMAT', 'RMCR', 'INCA'` which is populated. `None` will be returned if
none of them are populated.
"""
for attribute in self._choice[0]['collection']:
if getattr(self, attribute) is not None:
return attribute
return None
def _derive_parameters(self, SCPCOA, Position, RadarCollection, ImageFormation):
"""
Expected to be called from SICD parent.
Parameters
----------
SCPCOA : sarpy.io.complex.sicd_elements.SCPCOA.SCPCOAType
Position : sarpy.io.complex.sicd_elements.Position.PositionType
RadarCollection : sarpy.io.complex.sicd_elements.RadarCollection.RadarCollectionType
ImageFormation : sarpy.io.complex.sicd_elements.ImageFormation.ImageFormationType
Returns
-------
None
"""
if SCPCOA is None:
return
scp = None if SCPCOA.ARPPos is None else SCPCOA.ARPPos.get_array()
im_type = self.ImageType
if im_type in ['RMAT', 'RMCR']:
rm_ref = getattr(self, im_type) # type: RMRefType
if rm_ref.PosRef is None and SCPCOA.ARPPos is not None:
rm_ref.PosRef = SCPCOA.ARPPos.copy()
if rm_ref.VelRef is None and SCPCOA.ARPVel is not None:
rm_ref.VelRef = SCPCOA.ARPVel.copy()
if scp is not None and rm_ref.PosRef is not None and rm_ref.VelRef is not None:
pos_ref = rm_ref.PosRef.get_array()
vel_ref = rm_ref.VelRef.get_array()
uvel_ref = vel_ref/norm(vel_ref)
ulos = (scp - pos_ref) # it absolutely could be that scp = pos_ref
ulos_norm = norm(ulos)
if ulos_norm > 0:
ulos /= ulos_norm
if rm_ref.DopConeAngRef is None:
rm_ref.DopConeAngRef = numpy.rad2deg(numpy.arccos(numpy.dot(uvel_ref, ulos)))
elif im_type == 'INCA':
if scp is not None and self.INCA.TimeCAPoly is not None and \
Position is not None and Position.ARPPoly is not None:
t_zero = self.INCA.TimeCAPoly.Coefs[0]
ca_pos = Position.ARPPoly(t_zero)
if self.INCA.R_CA_SCP is None:
self.INCA.R_CA_SCP = norm(ca_pos - scp)
if self.INCA.FreqZero is None:
self.INCA.FreqZero = _get_center_frequency(RadarCollection, ImageFormation)
def _apply_reference_frequency(self, reference_frequency: float):
"""
If the reference frequency is used, adjust the necessary fields accordingly.
Expected to be called by SICD parent.
Parameters
----------
reference_frequency : float
The reference frequency.
Returns
-------
None
"""
if self.INCA is not None:
# noinspection PyProtectedMember
self.INCA._apply_reference_frequency(reference_frequency)
| 11,787 | 39.788927 | 119 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/utils.py | """
Common use sicd_elements methods.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
from typing import Optional, Tuple
import numpy
from sarpy.io.general.utils import get_seconds
logger = logging.getLogger(__name__)
def _get_center_frequency(
RadarCollection,
ImageFormation) -> Optional[float]:
"""
Helper method.
Parameters
----------
RadarCollection : sarpy.io.complex.sicd_elements.RadarCollection.RadarCollectionType
ImageFormation : sarpy.io.complex.sicd_elements.ImageFormation.ImageFormationType
Returns
-------
None|float
The center processed frequency, in the event that RadarCollection.RefFreqIndex is `None` or `0`.
"""
if RadarCollection is None or ImageFormation is None or ImageFormation.TxFrequencyProc is None:
return None
if RadarCollection.RefFreqIndex is None or RadarCollection.RefFreqIndex == 0:
return ImageFormation.TxFrequencyProc.center_frequency
else:
return None
def polstring_version_required(str_in: Optional[str]) -> Tuple[int, int, int]:
"""
What SICD version does the pol string require?
Parameters
----------
str_in : None|str
The tx/rcv polarization string.
Returns
-------
tuple
One of `(1, 1, 0)`, `(1, 2, 1)`, `(1, 3, 0)`
"""
if str_in is None or str_in in ['OTHER', 'UNKNOWN']:
return (1, 1, 0)
parts = str_in.split(':')
if len(parts) != 2:
logger.error('Expected polarization string of length 2, '
'but populated as `{}`'.format(len(parts)))
return None
part1, part2 = parts
if part1.startswith('OTHER') or part2.startswith('OTHER'):
return (1, 3, 0)
if part1 in ['S', 'E', 'X', 'Y'] or part2 in ['S', 'E', 'X', 'Y']:
return (1, 3, 0)
elif (part1 in ['V', 'H'] and part2 in ['RHC', 'LHC']) or \
(part2 in ['V', 'H'] and part1 in ['RHC', 'LHC']):
return (1, 2, 1)
else:
return (1, 1, 0)
################
# SICD comparison and matching methods
def is_same_size(sicd1, sicd2) -> bool:
"""
Are the two SICD structures the same size in pixels?
Parameters
----------
sicd1 : sarpy.io.complex.sicd_elements.SICD.SICDType
sicd2 : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if sicd1 is sicd2:
return True
try:
return (sicd1.ImageData.NumRows == sicd2.ImageData.NumRows) and \
(sicd1.ImageData.NumCols == sicd2.ImageData.NumCols)
except AttributeError:
return False
def is_same_sensor(sicd1, sicd2) -> bool:
"""
Are the two SICD structures from the same sensor?
Parameters
----------
sicd1 : sarpy.io.complex.sicd_elements.SICD.SICDType
sicd2 : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if sicd1 is sicd2:
return True
try:
return sicd1.CollectionInfo.CollectorName == sicd2.CollectionInfo.CollectorName
except AttributeError:
return False
def is_same_start_time(sicd1, sicd2) -> bool:
"""
Do the two SICD structures have the same start time with millisecond resolution?
Parameters
----------
sicd1 : sarpy.io.complex.sicd_elements.SICD.SICDType
sicd2 : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if sicd1 is sicd2:
return True
try:
return abs(get_seconds(sicd1.Timeline.CollectStart, sicd2.Timeline.CollectStart, precision='ms')) < 2e-3
except AttributeError:
return False
def is_same_duration(sicd1, sicd2) -> bool:
"""
Do the two SICD structures have the same duration, with millisecond resolution?
Parameters
----------
sicd1 : sarpy.io.complex.sicd_elements.SICD.SICDType
sicd2 : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if sicd1 is sicd2:
return True
try:
return abs(sicd1.Timeline.CollectDuration - sicd2.Timeline.CollectDuration) < 2e-3
except AttributeError:
return False
def is_same_band(sicd1, sicd2) -> bool:
"""
Are the two SICD structures the same band?
Parameters
----------
sicd1 : sarpy.io.complex.sicd_elements.SICD.SICDType
sicd2 : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if sicd1 is sicd2:
return True
try:
return abs(sicd1.Grid.Row.KCtr - sicd2.Grid.Row.KCtr) <= 1./(sicd1.Grid.Row.SS*sicd1.ImageData.NumRows)
except AttributeError:
return False
def is_same_scp(sicd1, sicd2) -> bool:
"""
Do the two SICD structures share the same SCP, with resolution of one meter
in each ECF coordinate?
Parameters
----------
sicd1 : sarpy.io.complex.sicd_elements.SICD.SICDType
sicd2 : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if sicd1 is sicd2:
return True
try:
ecf1 = sicd1.GeoData.SCP.ECF.get_array()
ecf2 = sicd2.GeoData.SCP.ECF.get_array()
return numpy.all(numpy.abs(ecf1 - ecf2) < 1)
except AttributeError:
return False
def is_general_match(sicd1, sicd2) -> bool:
"""
Do the two SICD structures seem to form a basic match? This necessarily
establishes and equivalence relation between sicds.
Parameters
----------
sicd1 : sarpy.io.complex.sicd_elements.SICD.SICDType
sicd2 : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if sicd1 is sicd2:
return True
return is_same_size(sicd1, sicd2) and is_same_sensor(sicd1, sicd2) and \
is_same_start_time(sicd1, sicd2) and is_same_duration(sicd1, sicd2) and \
is_same_band(sicd1, sicd2) and is_same_scp(sicd1, sicd2)
| 5,931 | 23.411523 | 112 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/PFA.py | """
The PFAType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, Optional, Tuple
import numpy
from numpy.linalg import norm
from numpy.polynomial import polynomial
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import BooleanDescriptor, FloatDescriptor, \
SerializableDescriptor, UnitVectorDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from .blocks import Poly1DType, Poly2DType, XYZType
from sarpy.geometry import geocoords
class STDeskewType(Serializable):
"""
Parameters to describe image domain ST Deskew processing.
"""
_fields = ('Applied', 'STDSPhasePoly')
_required = _fields
# descriptors
Applied = BooleanDescriptor(
'Applied', _required, strict=DEFAULT_STRICT,
docstring='Parameter indicating if slow time *(ST)* Deskew Phase function has been applied.') # type: bool
STDSPhasePoly = SerializableDescriptor(
'STDSPhasePoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='Slow time deskew phase function to perform the *ST/Kaz* shift. Two-dimensional phase '
'(cycles) polynomial function of image range coordinate *(variable 1)* and '
'azimuth coordinate *(variable 2)*.') # type: Poly2DType
def __init__(
self,
Applied: bool = None,
STDSPhasePoly: Union[Poly2DType, numpy.ndarray, list, tuple] = None,
**kwargs):
"""
Parameters
----------
Applied : bool
STDSPhasePoly : Poly2DType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Applied = Applied
# noinspection PytypeChecker
self.STDSPhasePoly = STDSPhasePoly
super(STDeskewType, self).__init__(**kwargs)
class PFAType(Serializable):
"""Parameters for the Polar Formation Algorithm."""
_fields = (
'FPN', 'IPN', 'PolarAngRefTime', 'PolarAngPoly', 'SpatialFreqSFPoly', 'Krg1', 'Krg2', 'Kaz1', 'Kaz2',
'STDeskew')
_required = ('FPN', 'IPN', 'PolarAngRefTime', 'PolarAngPoly', 'SpatialFreqSFPoly', 'Krg1', 'Krg2', 'Kaz1', 'Kaz2')
_numeric_format = {
'PolarAngRefTime': FLOAT_FORMAT, 'Krg1': FLOAT_FORMAT, 'Krg2': FLOAT_FORMAT,
'Kaz1': FLOAT_FORMAT, 'Kaz2': FLOAT_FORMAT}
# descriptors
FPN = UnitVectorDescriptor(
'FPN', XYZType, _required, strict=DEFAULT_STRICT,
docstring='Focus Plane unit normal in ECF coordinates. Unit vector FPN points away from the center of '
'the Earth.') # type: XYZType
IPN = UnitVectorDescriptor(
'IPN', XYZType, _required, strict=DEFAULT_STRICT,
docstring='Image Formation Plane unit normal in ECF coordinates. Unit vector IPN points away from the '
'center of the Earth.') # type: XYZType
PolarAngRefTime = FloatDescriptor(
'PolarAngRefTime', _required, strict=DEFAULT_STRICT,
docstring='Polar image formation reference time *(in seconds)*. Polar Angle = 0 at the reference time. '
'Measured relative to collection start. *Note: Reference time is typically set equal to the SCP '
'COA time but may be different.*') # type: float
PolarAngPoly = SerializableDescriptor(
'PolarAngPoly', Poly1DType, _required, strict=DEFAULT_STRICT,
docstring='Polynomial function that yields Polar Angle *(in radians)* as function of time '
'relative to Collection Start.') # type: Poly1DType
SpatialFreqSFPoly = SerializableDescriptor(
'SpatialFreqSFPoly', Poly1DType, _required, strict=DEFAULT_STRICT,
docstring='Polynomial that yields the *Spatial Frequency Scale Factor (KSF)* as a function of Polar '
r'Angle. That is, :math:`Polar Angle[radians] \to KSF[dimensionless]`. Used to scale RF '
'frequency *(fx, Hz)* to aperture spatial frequency *(Kap, cycles/m)*. Where,'
r':math:`Kap = fx\cdot (2/c)\cdot KSF`, and `Kap` is the effective spatial '
'frequency in the polar aperture.') # type: Poly1DType
Krg1 = FloatDescriptor(
'Krg1', _required, strict=DEFAULT_STRICT,
docstring='Minimum *range spatial frequency (Krg)* output from the polar to rectangular '
'resampling.') # type: float
Krg2 = FloatDescriptor(
'Krg2', _required, strict=DEFAULT_STRICT,
docstring='Maximum *range spatial frequency (Krg)* output from the polar to rectangular '
'resampling.') # type: float
Kaz1 = FloatDescriptor(
'Kaz1', _required, strict=DEFAULT_STRICT,
docstring='Minimum *azimuth spatial frequency (Kaz)* output from the polar to rectangular '
'resampling.') # type: float
Kaz2 = FloatDescriptor(
'Kaz2', _required, strict=DEFAULT_STRICT,
docstring='Maximum *azimuth spatial frequency (Kaz)* output from the polar to rectangular '
'resampling.') # type: float
STDeskew = SerializableDescriptor(
'STDeskew', STDeskewType, _required, strict=DEFAULT_STRICT,
docstring='Parameters to describe image domain slow time *(ST)* Deskew processing.') # type: STDeskewType
def __init__(
self,
FPN: Union[XYZType, numpy.ndarray, list, tuple] = None,
IPN: Union[XYZType, numpy.ndarray, list, tuple] = None,
PolarAngRefTime: float = None,
PolarAngPoly: Union[Poly1DType, numpy.ndarray, list, tuple] = None,
SpatialFreqSFPoly: Union[Poly1DType, numpy.ndarray, list, tuple] = None,
Krg1: float = None,
Krg2: float = None,
Kaz1: float = None,
Kaz2: float = None,
STDeskew: Optional[STDeskewType] = None,
**kwargs):
"""
Parameters
----------
FPN : XYZType|numpy.ndarray|list|tuple
IPN : XYZType|numpy.ndarray|list|tuple
PolarAngRefTime : float
PolarAngPoly : Poly1DType|numpy.ndarray|list|tuple
SpatialFreqSFPoly : Poly1DType|numpy.ndarray|list|tuple
Krg1 : float
Krg2 : float
Kaz1 : float
Kaz2 : float
STDeskew : STDeskewType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.FPN = FPN
self.IPN = IPN
self.PolarAngRefTime = PolarAngRefTime
self.PolarAngPoly = PolarAngPoly
self.SpatialFreqSFPoly = SpatialFreqSFPoly
self.Krg1, self.Krg2 = Krg1, Krg2
self.Kaz1, self.Kaz2 = Kaz1, Kaz2
self.STDeskew = STDeskew
super(PFAType, self).__init__(**kwargs)
def pfa_polar_coords(
self,
Position,
SCP: numpy.ndarray,
times: Union[float, int, numpy.ndarray]) -> Tuple[
Union[None, float, numpy.ndarray], Union[None, float, numpy.ndarray]]:
"""
Calculate the PFA parameters necessary for mapping phase history to polar coordinates.
Parameters
----------
Position : sarpy.io.complex.sicd_elements.Position.PositionType
SCP : numpy.ndarray
times : numpy.ndarray|float|int
Returns
-------
k_a : None|float|numpy.ndarray
The polar angle
k_sf : None|float|numpy.ndarray
The spatial frequency scale factor. The shape of the return arrays
will match the shape of the `times` array (or scalar).
"""
def project_to_image_plane(points):
# type: (numpy.ndarray) -> numpy.ndarray
# project into the image plane along line normal to the focus plane
offset = (SCP - points).dot(ipn)/fpn.dot(ipn)
if offset.ndim == 0:
return points + offset*fpn
else:
return points + numpy.outer(offset, fpn)
if self.IPN is None or self.FPN is None:
return None, None
ipn = self.IPN.get_array(dtype='float64')
fpn = self.FPN.get_array(dtype='float64')
if isinstance(times, (float, int)) or times.ndim == 0:
o_shape = None
times = numpy.array([times, ], dtype='float64')
else:
o_shape = times.shape
times = numpy.reshape(times, (-1, ))
positions = Position.ARPPoly(times)
reference_position = Position.ARPPoly(self.PolarAngRefTime)
image_plane_positions = project_to_image_plane(positions)
image_plane_coa = project_to_image_plane(reference_position)
# establish image plane coordinate system
ip_x = image_plane_coa - SCP
ip_x /= numpy.linalg.norm(ip_x)
ip_y = numpy.cross(ip_x, ipn)
# compute polar angle of sensor position in image plane
ip_range = image_plane_positions - SCP
ip_range /= numpy.linalg.norm(ip_range, axis=1)[:, numpy.newaxis]
k_a = -numpy.arctan2(ip_range.dot(ip_y), ip_range.dot(ip_x))
# compute the spatial frequency scale factor
range_vectors = positions - SCP
range_vectors /= numpy.linalg.norm(range_vectors, axis=1)[:, numpy.newaxis]
sin_graze = range_vectors.dot(fpn)
sin_graze_ip = ip_range.dot(fpn)
k_sf = numpy.sqrt((1 - sin_graze*sin_graze)/(1 - sin_graze_ip*sin_graze_ip))
if o_shape is None:
return k_a[0], k_sf[0]
elif len(o_shape) > 1:
return numpy.reshape(k_a, o_shape), numpy.reshape(k_sf, o_shape)
else:
return k_a, k_sf
def _derive_parameters(self, Grid, SCPCOA, GeoData, Position, Timeline):
"""
Expected to be called from SICD parent.
Parameters
----------
Grid : sarpy.io.complex.sicd_elements.Grid.GridType
SCPCOA : sarpy.io.complex.sicd_elements.SCPCOA.SCPCOAType
GeoData : sarpy.io.complex.sicd_elements.GeoData.GeoDataType
Position : sarpy.io.complex.sicd_elements.Position.PositionType
Timeline : sarpy.io.complex.sicd_elements.Timeline.TimelineType
Returns
-------
None
"""
if self.PolarAngRefTime is None and SCPCOA.SCPTime is not None:
self.PolarAngRefTime = SCPCOA.SCPTime
if GeoData is None or GeoData.SCP is None or GeoData.SCP.ECF is None:
return
scp = GeoData.SCP.ECF.get_array()
if SCPCOA.ARPPos is not None and SCPCOA.ARPVel is not None:
scp = GeoData.SCP.ECF.get_array()
etp = geocoords.wgs_84_norm(scp)
arp = SCPCOA.ARPPos.get_array()
los = (scp - arp)
ulos = los/norm(los)
look = SCPCOA.look
arp_vel = SCPCOA.ARPVel.get_array()
uspz = look*numpy.cross(arp_vel, ulos)
uspz /= norm(uspz)
if Grid is not None and Grid.ImagePlane is not None:
if self.IPN is None:
if Grid.ImagePlane == 'SLANT':
self.IPN = XYZType.from_array(uspz)
elif Grid.ImagePlane == 'GROUND':
self.IPN = XYZType.from_array(etp)
elif self.IPN is None:
self.IPN = XYZType.from_array(uspz) # assuming slant -> most common
if self.FPN is None:
self.FPN = XYZType.from_array(etp)
if Position is not None and \
Timeline is not None and Timeline.CollectDuration is not None and \
(self.PolarAngPoly is None or self.SpatialFreqSFPoly is None):
pol_ref_pos = Position.ARPPoly(self.PolarAngRefTime)
# fit the PFA polynomials
times = numpy.linspace(0, Timeline.CollectDuration, 15)
k_a, k_sf = self.pfa_polar_coords(Position, scp, times)
self.PolarAngPoly = Poly1DType(Coefs=polynomial.polyfit(times, k_a, 5, full=False))
self.SpatialFreqSFPoly = Poly1DType(Coefs=polynomial.polyfit(k_a, k_sf, 5, full=False))
if Grid is not None and Grid.Row is not None and \
Grid.Row.KCtr is not None and Grid.Row.ImpRespBW is not None:
if self.Krg1 is None:
self.Krg1 = Grid.Row.KCtr - 0.5*Grid.Row.ImpRespBW
if self.Krg2 is None:
self.Krg2 = Grid.Row.KCtr + 0.5*Grid.Row.ImpRespBW
if Grid is not None and Grid.Col is not None and \
Grid.Col.KCtr is not None and Grid.Col.ImpRespBW is not None:
if self.Kaz1 is None:
self.Kaz1 = Grid.Col.KCtr - 0.5*Grid.Col.ImpRespBW
if self.Kaz2 is None:
self.Kaz2 = Grid.Col.KCtr + 0.5*Grid.Col.ImpRespBW
def _check_polar_ang_ref(self):
"""
Checks the polar angle origin makes sense.
Returns
-------
bool
"""
if self.PolarAngPoly is None or self.PolarAngRefTime is None:
return True
cond = True
polar_angle_ref = self.PolarAngPoly(self.PolarAngRefTime)
if abs(polar_angle_ref) > 1e-4:
self.log_validity_error(
'The PolarAngPoly evaluated at PolarAngRefTime yields {}, which should be 0'.format(polar_angle_ref))
cond = False
return cond
def _basic_validity_check(self) -> bool:
condition = super(PFAType, self)._basic_validity_check()
condition &= self._check_polar_ang_ref()
return condition
| 13,752 | 40.424699 | 118 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/ImageFormation.py | """
The ImageFormationType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import List, Union, Optional, Dict, Tuple
from datetime import datetime, date
import numpy
from sarpy.io.xml.base import Serializable, Arrayable, ParametersCollection
from sarpy.io.xml.descriptors import StringDescriptor, StringEnumDescriptor, \
FloatDescriptor, IntegerDescriptor, IntegerListDescriptor, BooleanDescriptor, \
ComplexDescriptor, DateTimeDescriptor, SerializableDescriptor, \
SerializableListDescriptor, ParametersDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from .blocks import DUAL_POLARIZATION_VALUES
from .RadarCollection import get_band_name
from .utils import polstring_version_required
class RcvChanProcType(Serializable):
"""The Received Processed Channels."""
_fields = ('NumChanProc', 'PRFScaleFactor', 'ChanIndices')
_required = ('NumChanProc', 'ChanIndices')
_collections_tags = {
'ChanIndices': {'array': False, 'child_tag': 'ChanIndex'}}
_numeric_format = {'PRFScaleFactor': FLOAT_FORMAT}
# descriptors
NumChanProc = IntegerDescriptor(
'NumChanProc', _required, strict=DEFAULT_STRICT,
docstring='Number of receive data channels processed to form the image.') # type: int
PRFScaleFactor = FloatDescriptor(
'PRFScaleFactor', _required, strict=DEFAULT_STRICT,
docstring='Factor indicating the ratio of the effective PRF '
'to the actual PRF.') # type: Optional[float]
ChanIndices = IntegerListDescriptor(
'ChanIndices', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Index of a data channel that was processed.') # type: List[int]
def __init__(
self,
NumChanProc: int = None,
PRFScaleFactor: Optional[float] = None,
ChanIndices: List[int] = None,
**kwargs):
"""
Parameters
----------
NumChanProc : int
PRFScaleFactor : float
ChanIndices : List[int]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.NumChanProc = NumChanProc
self.PRFScaleFactor = PRFScaleFactor
self.ChanIndices = ChanIndices
super(RcvChanProcType, self).__init__(**kwargs)
class TxFrequencyProcType(Serializable, Arrayable):
"""The transmit frequency range."""
_fields = ('MinProc', 'MaxProc')
_required = _fields
_numeric_format = {'MinProc': '0.17E', 'MaxProc': '0.17E'}
# descriptors
MinProc = FloatDescriptor(
'MinProc', _required, strict=DEFAULT_STRICT,
docstring='The minimum transmit frequency processed to form the image, in Hz.') # type: float
MaxProc = FloatDescriptor(
'MaxProc', _required, strict=DEFAULT_STRICT,
docstring='The maximum transmit frequency processed to form the image, in Hz.') # type: float
def __init__(
self,
MinProc: float = None,
MaxProc: float = None,
**kwargs):
"""
Parameters
----------
MinProc : float
MaxProc : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.MinProc, self.MaxProc = MinProc, MaxProc
super(TxFrequencyProcType, self).__init__(**kwargs)
@property
def center_frequency(self) -> Optional[float]:
"""
None|float: The center frequency.
"""
if self.MinProc is None or self.MaxProc is None:
return None
return 0.5*(self.MinProc + self.MaxProc)
@property
def bandwidth(self) -> Optional[float]:
"""
None|float: The bandwidth in Hz.
"""
if self.MinProc is None or self.MaxProc is None:
return None
return self.MaxProc - self.MinProc
def _apply_reference_frequency(
self,
reference_frequency: float):
if self.MinProc is not None:
self.MinProc += reference_frequency
if self.MaxProc is not None:
self.MaxProc += reference_frequency
def _basic_validity_check(self) -> bool:
condition = super(TxFrequencyProcType, self)._basic_validity_check()
if self.MinProc is not None and self.MaxProc is not None and self.MaxProc < self.MinProc:
self.log_validity_error(
'Invalid frequency bounds MinProc ({}) > MaxProc ({})'.format(self.MinProc, self.MaxProc))
condition = False
return condition
def get_band_name(self) -> str:
"""
Gets the band name.
Returns
-------
str
"""
return get_band_name(self.center_frequency)
def get_array(self, dtype=numpy.float64) -> numpy.ndarray:
"""
Gets an array representation of the data.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
data type of the return
Returns
-------
numpy.ndarray
data array with appropriate entry order
"""
return numpy.array([self.MinProc, self.MaxProc], dtype=dtype)
@classmethod
def from_array(cls, array: Union[numpy.ndarray, list, tuple]):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed [MinProc, MaxProc]
Returns
-------
LatLonType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError('Expected array to be of length 2, and received {}'.format(array))
return cls(MinProc=array[0], MaxProc=array[1])
raise ValueError('Expected array to be numpy.ndarray, list, or tuple, got {}'.format(type(array)))
class ProcessingType(Serializable):
"""The transmit frequency range"""
_fields = ('Type', 'Applied', 'Parameters')
_required = ('Type', 'Applied')
_collections_tags = {'Parameters': {'array': False, 'child_tag': 'Parameter'}}
# descriptors
Type = StringDescriptor(
'Type', _required, strict=DEFAULT_STRICT,
docstring='The processing type identifier.') # type: str
Applied = BooleanDescriptor(
'Applied', _required, strict=DEFAULT_STRICT,
docstring='Indicates whether the given processing type has been applied.') # type: bool
Parameters = ParametersDescriptor(
'Parameters', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='The parameters collection.') # type: ParametersCollection
def __init__(
self,
Type: str = None,
Applied: bool = None,
Parameters: Union[None, ParametersCollection, Dict] = None,
**kwargs):
"""
Parameters
----------
Type : str
Applied : bool
Parameters : ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Type = Type
self.Applied = Applied
self.Parameters = Parameters
super(ProcessingType, self).__init__(**kwargs)
class DistortionType(Serializable):
"""Distortion"""
_fields = (
'CalibrationDate', 'A', 'F1', 'Q1', 'Q2', 'F2', 'Q3', 'Q4',
'GainErrorA', 'GainErrorF1', 'GainErrorF2', 'PhaseErrorF1', 'PhaseErrorF2')
_required = ('A', 'F1', 'Q1', 'Q2', 'F2', 'Q3', 'Q4')
_numeric_format = {key: FLOAT_FORMAT for key in _fields[1:]}
# descriptors
CalibrationDate = DateTimeDescriptor(
'CalibrationDate', _required, strict=DEFAULT_STRICT,
docstring='The calibration date.')
A = FloatDescriptor(
'A', _required, strict=DEFAULT_STRICT,
docstring='Absolute amplitude scale factor.') # type: float
# receive distortion matrix
F1 = ComplexDescriptor(
'F1', _required, strict=DEFAULT_STRICT,
docstring='Receive distortion element (2,2).') # type: complex
Q1 = ComplexDescriptor(
'Q1', _required, strict=DEFAULT_STRICT,
docstring='Receive distortion element (1,2).') # type: complex
Q2 = ComplexDescriptor(
'Q2', _required, strict=DEFAULT_STRICT,
docstring='Receive distortion element (2,1).') # type: complex
# transmit distortion matrix
F2 = ComplexDescriptor(
'F2', _required, strict=DEFAULT_STRICT,
docstring='Transmit distortion element (2,2).') # type: complex
Q3 = ComplexDescriptor(
'Q3', _required, strict=DEFAULT_STRICT,
docstring='Transmit distortion element (2,1).') # type: complex
Q4 = ComplexDescriptor(
'Q4', _required, strict=DEFAULT_STRICT,
docstring='Transmit distortion element (1,2).') # type: complex
# gain estimation error
GainErrorA = FloatDescriptor(
'GainErrorA', _required, strict=DEFAULT_STRICT,
docstring='Gain estimation error standard deviation (in dB) for parameter A.') # type: float
GainErrorF1 = FloatDescriptor(
'GainErrorF1', _required, strict=DEFAULT_STRICT,
docstring='Gain estimation error standard deviation (in dB) for parameter F1.') # type: float
GainErrorF2 = FloatDescriptor(
'GainErrorF2', _required, strict=DEFAULT_STRICT,
docstring='Gain estimation error standard deviation (in dB) for parameter F2.') # type: float
PhaseErrorF1 = FloatDescriptor(
'PhaseErrorF1', _required, strict=DEFAULT_STRICT,
docstring='Phase estimation error standard deviation (in dB) for parameter F1.') # type: float
PhaseErrorF2 = FloatDescriptor(
'PhaseErrorF2', _required, strict=DEFAULT_STRICT,
docstring='Phase estimation error standard deviation (in dB) for parameter F2.') # type: float
def __init__(
self,
CalibrationDate: Union[None, numpy.datetime64, datetime, date, str] = None,
A: float = None,
F1: complex = None,
Q1: complex = None,
Q2: complex = None,
F2: complex = None,
Q3: complex = None,
Q4: complex = None,
GainErrorA: Optional[float] = None,
GainErrorF1: Optional[float] = None,
GainErrorF2: Optional[float] = None,
PhaseErrorF1: Optional[float] = None,
PhaseErrorF2: Optional[float] = None,
**kwargs):
"""
Parameters
----------
CalibrationDate : numpy.datetime64|datetime|date|str
A : float
F1 : complex
Q1 : complex
Q2 : complex
F2 : complex
Q3 : complex
Q4 : complex
GainErrorA : float
GainErrorF1 : float
GainErrorF2 : float
PhaseErrorF1 : float
PhaseErrorF2 : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.CalibrationDate = CalibrationDate
self.A = A
self.F1, self.Q1, self.Q2 = F1, Q1, Q2
self.F2, self.Q3, self.Q4 = F2, Q3, Q4
self.GainErrorA = GainErrorA
self.GainErrorF1, self.GainErrorF2 = GainErrorF1, GainErrorF2
self.PhaseErrorF1, self.PhaseErrorF2 = PhaseErrorF1, PhaseErrorF2
super(DistortionType, self).__init__(**kwargs)
class PolarizationCalibrationType(Serializable):
"""The polarization calibration"""
_fields = ('DistortCorrectApplied', 'Distortion')
_required = _fields
# descriptors
DistortCorrectApplied = BooleanDescriptor(
'DistortCorrectApplied', _required, strict=DEFAULT_STRICT,
docstring='Indicates whether the polarization calibration has been applied.') # type: bool
Distortion = SerializableDescriptor(
'Distortion', DistortionType, _required, strict=DEFAULT_STRICT,
docstring='The distortion parameters.') # type: DistortionType
def __init__(
self,
DistortCorrectApplied: bool = None,
Distortion: DistortionType = None,
**kwargs):
"""
Parameters
----------
DistortCorrectApplied : bool
Distortion : DistortionType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.DistortCorrectApplied = DistortCorrectApplied
self.Distortion = Distortion
super(PolarizationCalibrationType, self).__init__(**kwargs)
class ImageFormationType(Serializable):
"""The image formation process parameters."""
_fields = (
'RcvChanProc', 'TxRcvPolarizationProc', 'TStartProc', 'TEndProc',
'TxFrequencyProc', 'SegmentIdentifier', 'ImageFormAlgo', 'STBeamComp',
'ImageBeamComp', 'AzAutofocus', 'RgAutofocus', 'Processings',
'PolarizationCalibration')
_required = (
'RcvChanProc', 'TxRcvPolarizationProc', 'TStartProc', 'TEndProc', 'TxFrequencyProc',
'ImageFormAlgo', 'STBeamComp', 'ImageBeamComp', 'AzAutofocus', 'RgAutofocus')
_collections_tags = {'Processings': {'array': False, 'child_tag': 'Processing'}}
_numeric_format = {'TStartProc': FLOAT_FORMAT, 'EndProc': FLOAT_FORMAT}
# class variables
_IMG_FORM_ALGO_VALUES = ('PFA', 'RMA', 'RGAZCOMP', 'OTHER')
_ST_BEAM_COMP_VALUES = ('NO', 'GLOBAL', 'SV')
_IMG_BEAM_COMP_VALUES = ('NO', 'SV')
_AZ_AUTOFOCUS_VALUES = _ST_BEAM_COMP_VALUES
_RG_AUTOFOCUS_VALUES = _ST_BEAM_COMP_VALUES
# descriptors
RcvChanProc = SerializableDescriptor(
'RcvChanProc', RcvChanProcType, _required, strict=DEFAULT_STRICT,
docstring='The received processed channels.') # type: RcvChanProcType
TxRcvPolarizationProc = StringEnumDescriptor(
'TxRcvPolarizationProc', DUAL_POLARIZATION_VALUES, _required, strict=DEFAULT_STRICT,
docstring='The combined transmit/receive polarization processed to form the image.') # type: str
TStartProc = FloatDescriptor(
'TStartProc', _required, strict=DEFAULT_STRICT,
docstring='Earliest slow time value for data processed to form the image '
'from `CollectionStart`.') # type: float
TEndProc = FloatDescriptor(
'TEndProc', _required, strict=DEFAULT_STRICT,
docstring='Latest slow time value for data processed to form the image from `CollectionStart`.') # type: float
TxFrequencyProc = SerializableDescriptor(
'TxFrequencyProc', TxFrequencyProcType, _required, strict=DEFAULT_STRICT,
docstring='The range of transmit frequency processed to form the image.') # type: TxFrequencyProcType
SegmentIdentifier = StringDescriptor(
'SegmentIdentifier', _required, strict=DEFAULT_STRICT,
docstring='Identifier that describes the image that was processed. '
'Must be included when `SICD.RadarCollection.Area.Plane.SegmentList` is included.') # type: str
ImageFormAlgo = StringEnumDescriptor(
'ImageFormAlgo', _IMG_FORM_ALGO_VALUES, _required, strict=DEFAULT_STRICT,
docstring="""
The image formation algorithm used:
* `PFA` - Polar Format Algorithm
* `RMA` - Range Migration (Omega-K, Chirp Scaling, Range-Doppler)
* `RGAZCOMP` - Simple range, Doppler compression.
""") # type: str
STBeamComp = StringEnumDescriptor(
'STBeamComp', _ST_BEAM_COMP_VALUES, _required, strict=DEFAULT_STRICT,
docstring="""
Indicates if slow time beam shape compensation has been applied.
* `NO` - No ST beam shape compensation.
* `GLOBAL` - Global ST beam shape compensation applied.
* `SV` - Spatially variant beam shape compensation applied.
""") # type: str
ImageBeamComp = StringEnumDescriptor(
'ImageBeamComp', _IMG_BEAM_COMP_VALUES, _required, strict=DEFAULT_STRICT,
docstring="""
Indicates if image domain beam shape compensation has been applied.
* `NO` - No image domain beam shape compensation.
* `SV` - Spatially variant image domain beam shape compensation applied.
""") # type: str
AzAutofocus = StringEnumDescriptor(
'AzAutofocus', _AZ_AUTOFOCUS_VALUES, _required, strict=DEFAULT_STRICT,
docstring='Indicates if azimuth autofocus correction has been applied, with similar '
'interpretation as `STBeamComp`.') # type: str
RgAutofocus = StringEnumDescriptor(
'RgAutofocus', _RG_AUTOFOCUS_VALUES, _required, strict=DEFAULT_STRICT,
docstring='Indicates if range autofocus correction has been applied, with similar '
'interpretation as `STBeamComp`.') # type: str
Processings = SerializableListDescriptor(
'Processings', ProcessingType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Parameters to describe types of specific processing that may have been applied '
'such as additional compensations.') # type: Optional[List[ProcessingType]]
PolarizationCalibration = SerializableDescriptor(
'PolarizationCalibration', PolarizationCalibrationType, _required, strict=DEFAULT_STRICT,
docstring='The polarization calibration details.') # type: Optional[PolarizationCalibrationType]
def __init__(
self,
RcvChanProc: RcvChanProcType = None,
TxRcvPolarizationProc: str = None,
TStartProc: float = None,
TEndProc: float = None,
TxFrequencyProc: Union[TxFrequencyProcType, numpy.ndarray, list, tuple] = None,
SegmentIdentifier: Optional[str] = None,
ImageFormAlgo: str = None,
STBeamComp: str = None,
ImageBeamComp: str = None,
AzAutofocus: str = None,
RgAutofocus: str = None,
Processings: Union[None, List[ProcessingType]] = None,
PolarizationCalibration: Optional[PolarizationCalibrationType] = None,
**kwargs):
"""
Parameters
----------
RcvChanProc : RcvChanProcType
TxRcvPolarizationProc : str
TStartProc : float
TEndProc : float
TxFrequencyProc : TxFrequencyProcType|numpy.ndarray|list|tuple
SegmentIdentifier : None|str
ImageFormAlgo : str
STBeamComp : str
ImageBeamComp :str
AzAutofocus : str
RgAutofocus : str
Processings : None|List[ProcessingType]
PolarizationCalibration : None|PolarizationCalibrationType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.RcvChanProc = RcvChanProc
self.TxRcvPolarizationProc = TxRcvPolarizationProc
self.TStartProc, self.TEndProc = TStartProc, TEndProc
if isinstance(TxFrequencyProc, (numpy.ndarray, list, tuple)) and len(TxFrequencyProc) >= 2:
self.TxFrequencyProc = TxFrequencyProcType(MinProc=TxFrequencyProc[0], MaxProc=TxFrequencyProc[1])
else:
self.TxFrequencyProc = TxFrequencyProc
self.SegmentIdentifier = SegmentIdentifier
self.ImageFormAlgo = ImageFormAlgo
self.STBeamComp, self.ImageBeamComp = STBeamComp, ImageBeamComp
self.AzAutofocus, self.RgAutofocus = AzAutofocus, RgAutofocus
self.Processings = Processings
self.PolarizationCalibration = PolarizationCalibration
super(ImageFormationType, self).__init__(**kwargs)
def _basic_validity_check(self) -> bool:
condition = super(ImageFormationType, self)._basic_validity_check()
if self.TStartProc is not None and self.TEndProc is not None and self.TEndProc < self.TStartProc:
self.log_validity_error(
'Invalid time processing bounds TStartProc ({}) > TEndProc ({})'.format(
self.TStartProc, self.TEndProc))
condition = False
return condition
def _derive_tx_frequency_proc(self, RadarCollection):
"""
Populate a default for processed frequency values, based on the assumption that the entire
transmitted bandwidth was processed. This is expected to be called by SICD parent.
Parameters
----------
RadarCollection : sarpy.io.complex.sicd_elements.RadarCollection.RadarCollectionType
Returns
-------
None
"""
if RadarCollection is not None and RadarCollection.TxFrequency is not None and \
RadarCollection.TxFrequency.Min is not None and RadarCollection.TxFrequency.Max is not None:
# this is based on the assumption that the entire transmitted bandwidth was processed.
if self.TxFrequencyProc is None:
self.TxFrequencyProc = TxFrequencyProcType(
MinProc=RadarCollection.TxFrequency.Min, MaxProc=RadarCollection.TxFrequency.Max)
# how would it make sense to set only one end?
elif self.TxFrequencyProc.MinProc is None:
self.TxFrequencyProc.MinProc = RadarCollection.TxFrequency.Min
elif self.TxFrequencyProc.MaxProc is None:
self.TxFrequencyProc.MaxProc = RadarCollection.TxFrequency.Max
def _apply_reference_frequency(self, reference_frequency: float):
"""
If the reference frequency is used, adjust the necessary fields accordingly.
Expected to be called by SICD parent.
Parameters
----------
reference_frequency : float
The reference frequency.
Returns
-------
None
"""
if self.TxFrequencyProc is not None:
# noinspection PyProtectedMember
self.TxFrequencyProc._apply_reference_frequency(reference_frequency)
def get_polarization(self) -> str:
"""
Gets the transmit/receive polarization.
Returns
-------
str
"""
return self.TxRcvPolarizationProc if self.TxRcvPolarizationProc is not None else 'UNKNOWN'
def get_polarization_abbreviation(self) -> str:
"""
Gets the transmit/receive polarization abbreviation for the suggested name.
Returns
-------
str
"""
pol = self.TxRcvPolarizationProc
if pol is None or pol in ('OTHER', 'UNKNOWN'):
return 'UN'
fp, sp = pol.split(':')
return fp[0]+sp[0]
def get_transmit_band_name(self) -> str:
"""
Gets the transmit band name.
Returns
-------
str
"""
if self.TxFrequencyProc is not None:
return self.TxFrequencyProc.get_band_name()
else:
return 'UN'
def version_required(self) -> Tuple[int, int, int]:
"""
What SICD version is required?
Returns
-------
tuple
"""
return polstring_version_required(self.TxRcvPolarizationProc)
| 23,582 | 37.098546 | 119 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/validation_checks.py | """
The detailed and involved validity checks for the sicd structure.
Note: These checks were originally implemented in the SICD component objects,
but separating this implementation is probably less confusing in the long run.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import numpy
from scipy.constants import speed_of_light
from sarpy.geometry import geocoords
from sarpy.geometry.geometry_elements import LinearRing
##############
# RgAzComp image formation parameter checks
def _rgazcomp_check_kaz_poly(
RgAzComp,
Timeline,
Grid,
SCPCOA,
look,
ARP_Vel) -> bool:
"""
Check the KAZ polynomial value.
Parameters
----------
RgAzComp : sarpy.io.complex.sicd_elements.RgAzComp.RgAzCompType
Timeline : sarpy.io.complex.sicd_elements.Timeline.TimelineType
Grid : sarpy.io.complex.sicd_elements.Grid.GridType
SCPCOA : sarpy.io.complex.sicd_elements.SCPCOA.SCPCOAType
look : int
ARP_Vel : numpy.ndarray
Returns
-------
bool
"""
cond = True
if Timeline.IPP is not None and len(Timeline.IPP) == 1:
try:
st_rate_coa = Timeline.IPP[0].IPPPoly.derivative_eval(SCPCOA.SCPTime, der_order=1)
if Grid.Row.DeltaKCOAPoly is not None:
krg_coa = Grid.Row.KCtr + Grid.Row.DeltaKCOAPoly.get_array(dtype='float64')
else:
krg_coa = Grid.Row.KCtr
delta_kaz_per_deltav = look * krg_coa * numpy.linalg.norm(ARP_Vel) * numpy.sin(
numpy.deg2rad(SCPCOA.DopplerConeAng)) / (SCPCOA.SlantRange * st_rate_coa)
if isinstance(delta_kaz_per_deltav, numpy.ndarray):
derived_kaz_poly = delta_kaz_per_deltav.dot(Timeline.IPP[0].IPPPoly.get_array(dtype='float64'))
else:
derived_kaz_poly = delta_kaz_per_deltav * Timeline.IPP[0].IPPPoly.get_array(dtype='float64')
kaz_populated = RgAzComp.KazPoly.get_array(dtype='float64')
if numpy.linalg.norm(kaz_populated - derived_kaz_poly) > 1e-3:
RgAzComp.log_validity_error(
'Timeline.IPP has one element, the RgAzComp.KazPoly populated as\n\t{}\n\t'
'but the expected value is\n\t{}'.format(kaz_populated, derived_kaz_poly))
cond = False
except (AttributeError, ValueError, TypeError):
pass
return cond
def _rgazcomp_check_row_deltakcoa(
RgAzComp,
Grid,
RadarCollection,
ImageFormation) -> bool:
"""
Parameters
----------
RgAzComp : sarpy.io.complex.sicd_elements.RgAzComp.RgAzCompType
Grid : sarpy.io.complex.sicd_elements.Grid.GridType
RadarCollection : sarpy.io.complex.sicd_elements.RadarCollection.RadarCollectionType
ImageFormation : sarpy.io.complex.sicd_elements.ImageFormation.ImageFormationType
Returns
-------
bool
"""
if Grid.Row.DeltaKCOAPoly is None:
return True
cond = True
row_deltakcoa = Grid.Row.DeltaKCOAPoly.get_array(dtype='float64')
if numpy.any(row_deltakcoa != row_deltakcoa[0, 0]):
RgAzComp.log_validity_error(
'Grid.Row.DeltaKCOAPoly is defined, '
'but all entries are not constant\n\t{}'.format(row_deltakcoa))
cond = False
if RadarCollection.RefFreqIndex is None:
try:
fc_proc = ImageFormation.TxFrequencyProc.center_frequency
k_f_c = fc_proc*2/speed_of_light
if row_deltakcoa.shape == (1, 1):
if abs(Grid.Row.KCtr - (k_f_c - row_deltakcoa[0, 0])) > 1e-6:
RgAzComp.log_validity_error(
'the Grid.Row.DeltaCOAPoly is scalar, '
'and not in agreement with Grid.Row.KCtr and center frequency')
cond = False
else:
if abs(Grid.Row.KCtr - k_f_c) > 1e-6:
RgAzComp.log_validity_error(
'the Grid.Row.DeltaCOAPoly is not scalar, '
'and Grid.Row.KCtr not in agreement with center frequency')
cond = False
except (AttributeError, ValueError, TypeError):
pass
return cond
def _rgazcomp_check_col_deltacoa(
RgAzComp,
Grid) -> bool:
"""
Parameters
----------
RgAzComp : sarpy.io.complex.sicd_elements.RgAzComp.RgAzCompType
Grid : sarpy.io.complex.sicd_elements.Grid.GridType
Returns
-------
bool
"""
cond = True
if Grid.Col.DeltaKCOAPoly is None:
if Grid.Col.KCtr != 0:
RgAzComp.log_validity_error(
'the Grid.Col.DeltaKCOAPoly is not defined, '
'and Grid.Col.KCtr is non-zero.')
cond = False
else:
col_deltakcoa = Grid.Col.DeltaKCOAPoly.get_array(dtype='float64')
if numpy.any(col_deltakcoa != col_deltakcoa[0, 0]):
RgAzComp.log_validity_error(
'the Grid.Col.DeltaKCOAPoly is defined, '
'but all entries are not constant\n\t{}'.format(col_deltakcoa))
cond = False
if col_deltakcoa.shape == (1, 1) and abs(Grid.Col.KCtr + col_deltakcoa[0, 0]) > 1e-6:
RgAzComp.log_validity_error(
'the Grid.Col.DeltaCOAPoly is scalar, '
'and not in agreement with Grid.Col.KCtr')
cond = False
return cond
def _rgazcomp_checks(the_sicd) -> bool:
"""
Perform the RgAzComp structure validation checks.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
RgAzComp = the_sicd.RgAzComp
Grid = the_sicd.Grid
GeoData = the_sicd.GeoData
SCPCOA = the_sicd.SCPCOA
cond = True
if Grid.ImagePlane != 'SLANT':
the_sicd.log_validity_error(
'The image formation algorithm is RGAZCOMP,\n\t'
'and Grid.ImagePlane is populated as "{}",\n\t'
'but should be "SLANT"'.format(Grid.ImagePlane))
cond = False
if Grid.Type != 'RGAZIM':
the_sicd.log_validity_error(
'The image formation algorithm is RGAZCOMP, and Grid.Type is populated as "{}",\n\t'
'but should be "RGAZIM"'.format(Grid.Type))
cond = False
try:
SCP = GeoData.SCP.ECF.get_array(dtype='float64')
row_uvect = Grid.Row.UVectECF.get_array(dtype='float64')
col_uvect = Grid.Col.UVectECF.get_array(dtype='float64')
ARP_Pos = SCPCOA.ARPPos.get_array(dtype='float64')
ARP_Vel = SCPCOA.ARPVel.get_array(dtype='float64')
uRG = SCP - ARP_Pos
uRG /= numpy.linalg.norm(uRG)
left = numpy.cross(ARP_Pos / numpy.linalg.norm(ARP_Pos), ARP_Vel / numpy.linalg.norm(ARP_Vel))
look = numpy.sign(numpy.dot(left, uRG))
Spn = -look * numpy.cross(uRG, ARP_Vel)
uSpn = Spn / numpy.linalg.norm(Spn)
derived_col_vec = numpy.cross(uSpn, uRG)
except (AttributeError, ValueError, TypeError):
return cond
derived_AzSF = -look * numpy.sin(numpy.deg2rad(SCPCOA.DopplerConeAng)) / SCPCOA.SlantRange
if abs(RgAzComp.AzSF - derived_AzSF) > 1e-6:
RgAzComp.log_validity_error(
'AzSF is populated as {}, '
'but expected value is {}'.format(RgAzComp.AzSF, derived_AzSF))
cond = False
if numpy.linalg.norm(uRG - row_uvect) > 1e-3:
RgAzComp.log_validity_error(
'Grid.Row.UVectECF is populated as \n\t{}\n\t'
'which should agree with the unit range vector\n\t{}'.format(row_uvect, uRG))
cond = False
if numpy.linalg.norm(derived_col_vec - col_uvect) > 1e-3:
RgAzComp.log_validity_error(
'Grid.Col.UVectECF is populated as \n\t{}\n\t'
'which should agree with derived vector\n\t{}'.format(col_uvect, derived_col_vec))
cond = False
cond &= _rgazcomp_check_kaz_poly(RgAzComp, the_sicd.Timeline, Grid, SCPCOA, look, ARP_Vel)
cond &= _rgazcomp_check_row_deltakcoa(RgAzComp, Grid, the_sicd.RadarCollection, the_sicd.ImageFormation)
cond &= _rgazcomp_check_col_deltacoa(RgAzComp, Grid)
return cond
##############
# PFA image formation parameter checks
def _pfa_check_kaz_krg(PFA, Grid) -> bool:
"""
Check the validity of the Kaz and Krg values.
Parameters
----------
PFA : sarpy.io.complex.sicd_elements.PFA.PFAType
Grid : sarpy.io.complex.sicd_elements.Grid.GridType
Returns
-------
bool
"""
cond = True
if PFA.STDeskew is None or not PFA.STDeskew.Applied:
try:
if PFA.Kaz2 - Grid.Col.KCtr > 0.5/Grid.Col.SS + 1e-10:
PFA.log_validity_error(
'PFA.Kaz2 - Grid.Col.KCtr ({}) > 0.5/Grid.Col.SS ({})'.format(
PFA.Kaz2 - Grid.Col.KCtr, 0.5/Grid.Col.SS))
cond = False
except (AttributeError, TypeError, ValueError):
pass
try:
if PFA.Kaz1 - Grid.Col.KCtr < -0.5/Grid.Col.SS - 1e-10:
PFA.log_validity_error(
'PFA.Kaz1 - Grid.Col.KCtr ({}) < -0.5/Grid.Col.SS ({})'.format(
PFA.Kaz1 - Grid.Col.KCtr, -0.5/Grid.Col.SS))
cond = False
except (AttributeError, TypeError, ValueError):
pass
try:
if PFA.Krg2 - Grid.Row.KCtr > 0.5/Grid.Row.SS + 1e-10:
PFA.log_validity_error(
'PFA.Krg2 - Grid.Row.KCtr ({}) > 0.5/Grid.Row.SS ({})'.format(
PFA.Krg2 - Grid.Row.KCtr, 0.5/Grid.Row.SS))
cond = False
except (AttributeError, TypeError, ValueError):
pass
try:
if PFA.Krg1 - Grid.Row.KCtr < -0.5/Grid.Row.SS - 1e-10:
PFA.log_validity_error(
'PFA.Krg1 - Grid.Row.KCtr ({}) < -0.5/Grid.Row.SS ({})'.format(
PFA.Krg1 - Grid.Row.KCtr, -0.5/Grid.Row.SS))
cond = False
except (AttributeError, TypeError, ValueError):
pass
try:
if Grid.Row.ImpRespBW > (PFA.Krg2 - PFA.Krg1) + 1e-10:
PFA.log_validity_error(
'Grid.Row.ImpRespBW ({}) > PFA.Krg2 - PFA.Krg1 ({})'.format(
Grid.Row.ImpRespBW, PFA.Krg2 - PFA.Krg1))
cond = False
except (AttributeError, TypeError, ValueError):
pass
try:
if abs(Grid.Col.KCtr) > 1e-5 and abs(Grid.Col.KCtr - 0.5*(PFA.Kaz2 + PFA.Kaz1)) > 1e-5:
PFA.log_validity_error(
'Grid.Col.KCtr ({}) not within 1e-5 of 0.5*(PFA.Kaz2 + PFA.Kaz1) ({})'.format(
Grid.Col.KCtr, 0.5*(PFA.Kaz2 + PFA.Kaz1)))
cond = False
except (AttributeError, TypeError, ValueError):
pass
return cond
def _pfa_check_polys(PFA, Position, Timeline, SCP) -> bool:
"""
Parameters
----------
PFA : sarpy.io.complex.sicd_elements.PFA.PFAType
Position : sarpy.io.complex.sicd_elements.Position.PositionType
Timeline : sarpy.io.complex.sicd_elements.Timeline.TimelineType
SCP : numpy.ndarray
Returns
-------
bool
"""
cond = True
num_samples = max(PFA.PolarAngPoly.Coefs.size, 40)
times = numpy.linspace(0, Timeline.CollectDuration, num_samples)
k_a, k_sf = PFA.pfa_polar_coords(Position, SCP, times)
if k_a is None:
return True
# check for agreement with k_a and k_sf derived from the polynomials
k_a_derived = PFA.PolarAngPoly(times)
k_sf_derived = PFA.SpatialFreqSFPoly(k_a)
k_a_diff = numpy.amax(numpy.abs(k_a_derived - k_a))
k_sf_diff = numpy.amax(numpy.abs(k_sf_derived - k_sf))
if k_a_diff > 5e-3:
PFA.log_validity_error(
'the PolarAngPoly evaluated values do not agree with actual calculated values')
cond = False
if k_sf_diff > 5e-3:
PFA.log_validity_error(
'the SpatialFreqSFPoly evaluated values do not agree with actual calculated values')
cond = False
return cond
def _pfa_check_uvects(PFA, Position, Grid, SCP) -> bool:
"""
Parameters
----------
PFA : sarpy.io.complex.sicd_elements.PFA.PFAType
Position : sarpy.io.complex.sicd_elements.Position.PositionType
Grid : sarpy.io.complex.sicd_elements.Grid.GridType
SCP : numpy.ndarray
Returns
-------
bool
"""
if PFA.IPN is None or PFA.FPN is None:
return True
cond = True
ipn = PFA.IPN.get_array(dtype='float64')
fpn = PFA.FPN.get_array(dtype='float64')
row_uvect = Grid.Row.UVectECF.get_array(dtype='float64')
col_uvect = Grid.Col.UVectECF.get_array(dtype='float64')
pol_ref_point = Position.ARPPoly(PFA.PolarAngRefTime)
offset = (SCP - pol_ref_point).dot(ipn)/(fpn.dot(ipn))
ref_position_ipn = pol_ref_point + offset*fpn
slant_range = ref_position_ipn - SCP
u_slant_range = slant_range/numpy.linalg.norm(slant_range)
derived_row_vector = -u_slant_range
if numpy.linalg.norm(derived_row_vector - row_uvect) > 1e-3:
PFA.log_validity_error(
'the Grid.Row.UVectECF ({}) is not in good agreement with\n\t'
'the expected value derived from PFA parameters ({})'.format(row_uvect, derived_row_vector))
cond = False
derived_col_vector = numpy.cross(ipn, derived_row_vector)
if numpy.linalg.norm(derived_col_vector - col_uvect) > 1e-3:
PFA.log_validity_error(
'the Grid.Col.UVectECF ({}) is not in good agreement with\n\t'
'the expected value derived from the PFA parameters ({})'.format(col_uvect, derived_col_vector))
cond = False
return cond
def _pfa_check_stdeskew(PFA, Grid) -> bool:
"""
Parameters
----------
PFA : sarpy.io.complex.sicd_elements.PFA.PFAType
Grid : sarpy.io.complex.sicd_elements.Grid.GridType
Returns
-------
bool
"""
if PFA.STDeskew is None or not PFA.STDeskew.Applied:
return True
cond = True
if Grid.TimeCOAPoly is not None:
timecoa_poly = Grid.TimeCOAPoly.get_array(dtype='float64')
if timecoa_poly.shape == (1, 1) or numpy.all(timecoa_poly.flatten()[1:] < 1e-6):
PFA.log_validity_error(
'PFA.STDeskew.Applied is True, and the Grid.TimeCOAPoly is essentially constant.')
cond = False
# the Row DeltaKCOAPoly and STDSPhasePoly should be essentially identical
if Grid.Row is not None and Grid.Row.DeltaKCOAPoly is not None and \
PFA.STDeskew.STDSPhasePoly is not None:
stds_phase_poly = PFA.STDeskew.STDSPhasePoly.get_array(dtype='float64')
delta_kcoa = Grid.Row.DeltaKCOAPoly.get_array(dtype='float64')
rows = max(stds_phase_poly.shape[0], delta_kcoa.shape[0])
cols = max(stds_phase_poly.shape[1], delta_kcoa.shape[1])
exp_stds_phase_poly = numpy.zeros((rows, cols), dtype='float64')
exp_delta_kcoa = numpy.zeros((rows, cols), dtype='float64')
exp_stds_phase_poly[:stds_phase_poly.shape[0], :stds_phase_poly.shape[1]] = stds_phase_poly
exp_delta_kcoa[:delta_kcoa.shape[0], :delta_kcoa.shape[1]] = delta_kcoa
if numpy.max(numpy.abs(exp_delta_kcoa - exp_stds_phase_poly)) > 1e-6:
PFA.log_validity_warning(
'PFA.STDeskew.Applied is True,\n\t'
'and the Grid.Row.DeltaKCOAPoly ({}) and PFA.STDeskew.STDSPhasePoly ({})\n\t'
'are not in good agreement.'.format(delta_kcoa, stds_phase_poly))
cond = False
return cond
def _pfa_check_kctr(
PFA,
RadarCollection,
ImageFormation,
Grid) -> bool:
"""
Parameters
----------
PFA : sarpy.io.complex.sicd_elements.PFA.PFAType
RadarCollection : sarpy.io.complex.sicd_elements.RadarCollection.RadarCollectionType
ImageFormation : sarpy.io.complex.sicd_elements.ImageFormation.ImageFormationType
Grid : sarpy.io.complex.sicd_elements.Grid.GridType
Returns
-------
bool
"""
if RadarCollection.RefFreqIndex is not None:
return True
cond = True
try:
center_freq = ImageFormation.TxFrequencyProc.center_frequency
kap_ctr = center_freq*PFA.SpatialFreqSFPoly.Coefs[0]*2/speed_of_light
theta = numpy.arctan(0.5*Grid.Col.ImpRespBW/Grid.Row.KCtr) # aperture angle
kctr_total = max(1e-2, 1 - numpy.cos(theta)) # difference between Krg and Kap
if abs(Grid.Row.KCtr/kap_ctr - 1) > kctr_total:
PFA.log_validity_error(
'the Grid.Row.KCtr value ({}) is not in keeping with\n\t'
'the expected derived from PFA parameters ({})'.format(Grid.Row.KCtr, kap_ctr))
cond = False
except (AttributeError, ValueError, TypeError):
pass
return cond
def _pfa_check_image_plane(PFA, Grid, SCPCOA, SCP) -> bool:
"""
Parameters
----------
PFA : sarpy.io.complex.sicd_elements.PFA.PFAType
Grid : sarpy.io.complex.sicd_elements.Grid.GridType
SCPCOA : sarpy.io.complex.sicd_elements.SCPCOA.SCPCOAType
SCP : numpy.ndarray
Returns
-------
bool
"""
if PFA.IPN is None or PFA.FPN is None:
return True
cond = True
ipn = PFA.IPN.get_array(dtype='float64')
fpn = PFA.FPN.get_array(dtype='float64')
ETP = geocoords.wgs_84_norm(SCP)
if Grid.ImagePlane == 'SLANT':
try:
ARP_Pos = SCPCOA.ARPPos.get_array(dtype='float64')
ARP_Vel = SCPCOA.ARPVel.get_array(dtype='float64')
uRG = SCP - ARP_Pos
uRG /= numpy.linalg.norm(uRG)
left = numpy.cross(ARP_Pos/numpy.linalg.norm(ARP_Pos), ARP_Vel/numpy.linalg.norm(ARP_Vel))
look = numpy.sign(numpy.dot(left, uRG))
Spn = -look*numpy.cross(uRG, ARP_Vel)
uSpn = Spn/numpy.linalg.norm(Spn)
if numpy.arccos(ipn.dot(uSpn)) > numpy.deg2rad(1):
PFA.log_validity_error(
'the Grid.ImagePlane is "SLANT",\n\t'
'but COA slant plane and provided IPN are not within one degree of each other')
cond = False
except (AttributeError, ValueError, TypeError):
pass
elif Grid.ImagePlane == 'GROUND':
if numpy.arccos(ipn.dot(ETP)) > numpy.deg2rad(3):
PFA.log_validity_error(
'the Grid.ImagePlane is "Ground",\n\t'
'but the Earth Tangent Plane at SCP and provided IPN\n\t'
'are not within three degrees of each other.')
cond = False
# verify that fpn points outwards
if fpn.dot(SCP) < 0:
PFA.log_validity_error(
'the focus plane unit normal does not appear to be outward pointing')
cond = False
# check agreement between focus plane and ground plane
if numpy.arccos(fpn.dot(ETP)) > numpy.deg2rad(3):
PFA.log_validity_warning(
'the focus plane unit normal is not within three degrees of the earth Tangent Plane')
return cond
def _pfa_check_polar_angle_consistency(
PFA,
CollectionInfo,
ImageFormation) -> bool:
"""
Parameters
----------
PFA : sarpy.io.complex.sicd_elements.PFA.PFAType
CollectionInfo : sarpy.io.complex.sicd_elements.CollectionInfo.CollectionInfoType
ImageFormation : sarpy.io.complex.sicd_elements.ImageFormation.ImageFormationType
Returns
-------
bool
"""
if CollectionInfo.RadarMode is None or CollectionInfo.RadarMode.ModeType != 'SPOTLIGHT':
return True
cond = True
if PFA.Kaz1 is not None and PFA.Kaz2 is not None and PFA.Krg1 is not None:
polar_angle_bounds = numpy.sort(PFA.PolarAngPoly(numpy.array([ImageFormation.TStartProc, ImageFormation.TEndProc], dtype='float64')))
derived_pol_angle_bounds = numpy.arctan(numpy.array([PFA.Kaz1, PFA.Kaz2], dtype='float64')/PFA.Krg1)
pol_angle_bounds_diff = numpy.rad2deg(numpy.amax(numpy.abs(polar_angle_bounds - derived_pol_angle_bounds)))
if pol_angle_bounds_diff > 0.1:
PFA.log_validity_warning(
'the derived polar angle bounds ({})\n\t'
'are not consistent with the provided ImageFormation processing times\n\t'
'(expected bounds {}).'.format(polar_angle_bounds, derived_pol_angle_bounds))
cond = False
return cond
def _pfa_checks(the_sicd) -> bool:
"""
Perform the PFA structure validation checks.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
PFA = the_sicd.PFA
Grid = the_sicd.Grid
SCPCOA = the_sicd.SCPCOA
cond = True
if Grid.Type != 'RGAZIM':
Grid.log_validity_warning(
'The image formation algorithm is PFA,\n\t'
'and Grid.Type is populated as "{}",\n\t'
'but should be "RGAZIM"'.format(Grid.Type))
cond = False
if abs(PFA.PolarAngRefTime - SCPCOA.SCPTime) > 1e-6:
PFA.log_validity_warning(
'the PFA.PolarAngRefTime ({})\n\t'
'does not agree with the SCPCOA.SCPTime ({})'.format(PFA.PolarAngRefTime, SCPCOA.SCPTime))
cond = False
cond &= _pfa_check_kaz_krg(PFA, Grid)
cond &= _pfa_check_stdeskew(PFA, Grid)
cond &= _pfa_check_kctr(PFA, the_sicd.RadarCollection, the_sicd.ImageFormation, Grid)
try:
SCP = the_sicd.GeoData.SCP.ECF.get_array(dtype='float64')
except (AttributeError, ValueError, TypeError):
return cond
cond &= _pfa_check_polys(PFA, the_sicd.Position, the_sicd.Timeline, SCP)
cond &= _pfa_check_uvects(PFA, the_sicd.Position, Grid, SCP)
cond &= _pfa_check_image_plane(PFA, Grid, SCPCOA, SCP)
cond &= _pfa_check_polar_angle_consistency(PFA, the_sicd.CollectionInfo, the_sicd.ImageFormation)
return cond
##############
# PFA image formation parameter checks
def _rma_check_rmat(
RMA,
Grid,
GeoData,
RadarCollection,
ImageFormation) -> bool:
"""
Parameters
----------
RMA : sarpy.io.complex.sicd_elements.RMA.RMAType
Grid : sarpy.io.complex.sicd_elements.Grid.GridType
GeoData : sarpy.io.complex.sicd_elements.GeoData.GeoDataType
RadarCollection : sarpy.io.complex.sicd_elements.RadarCollection.RadarCollectionType
ImageFormation : sarpy.io.complex.sicd_elements.ImageFormation.ImageFormationType
Returns
-------
bool
"""
cond = True
RMAT = RMA.RMAT
if Grid.Type != 'XCTYAT':
Grid.log_validity_error(
'The image formation algorithm is RMA/RMAT, which should yield '
'Grid.Type == "XCTYAT", but Grid.Type is populated as "{}"'.format(Grid.Type))
cond = False
try:
SCP = GeoData.SCP.ECF.get_array(dtype='float64')
row_uvect = Grid.Row.UVectECF.get_array(dtype='float64')
col_uvect = Grid.Col.UVectECF.get_array(dtype='float64')
position_ref = RMAT.PosRef.get_array(dtype='float64')
velocity_ref = RMAT.VelRef.get_array(dtype='float64')
LOS = (SCP - position_ref)
uLOS = LOS/numpy.linalg.norm(LOS)
left = numpy.cross(
position_ref/numpy.linalg.norm(position_ref),
velocity_ref/numpy.linalg.norm(velocity_ref))
look = numpy.sign(left.dot(uLOS))
uYAT = -look*velocity_ref/numpy.linalg.norm(velocity_ref)
uSPN = numpy.cross(uLOS, uYAT)
uSPN /= numpy.linalg.norm(uSPN)
uXCT = numpy.cross(uYAT, uSPN)
except (AttributeError, ValueError, TypeError):
return cond
if numpy.linalg.norm(row_uvect - uXCT) > 1e-3:
RMAT.log_validity_error(
'the Grid.Row.UVectECF is populated as {},\n\t'
'but derived from the RMAT parameters is expected to be {}'.format(row_uvect, uXCT))
cond = False
if numpy.linalg.norm(col_uvect - uYAT) > 1e-3:
RMAT.log_validity_error(
'the Grid.Col.UVectECF is populated as {},\n\t'
'but derived from the RMAT parameters is expected to be {}'.format(col_uvect, uYAT))
cond = False
exp_doppler_cone = numpy.rad2deg(numpy.arccos(uLOS.dot(velocity_ref/numpy.linalg.norm(velocity_ref))))
if abs(exp_doppler_cone - RMAT.DopConeAngRef) > 1e-6:
RMAT.log_validity_error(
'the RMAT.DopConeAngRef is populated as {},\n\t'
'but derived from the RMAT parameters is expected to be {}'.format(RMAT.DopConeAngRef, exp_doppler_cone))
cond = False
if RadarCollection.RefFreqIndex is None:
center_freq = ImageFormation.TxFrequencyProc.center_frequency
k_f_c = center_freq*2/speed_of_light
exp_row_kctr = k_f_c*numpy.sin(numpy.deg2rad(RMAT.DopConeAngRef))
exp_col_kctr = k_f_c*numpy.cos(numpy.deg2rad(RMAT.DopConeAngRef))
try:
if abs(exp_row_kctr/Grid.Row.KCtr - 1) > 1e-3:
RMAT.log_validity_warning(
'the Grid.Row.KCtr is populated as {},\n\t'
'and derived from the RMAT parameters is expected to be {}'.format(Grid.Row.KCtr, exp_row_kctr))
cond = False
if abs(exp_col_kctr/Grid.Col.KCtr - 1) > 1e-3:
RMAT.log_validity_warning(
'the Grid.Col.KCtr is populated as {},\n\t'
'and derived from the RMAT parameters is expected to be {}'.format(Grid.Col.KCtr, exp_col_kctr))
cond = False
except (AttributeError, ValueError, TypeError):
pass
return cond
def _rma_check_rmcr(
RMA,
Grid,
GeoData,
RadarCollection,
ImageFormation) -> bool:
"""
Parameters
----------
RMA : sarpy.io.complex.sicd_elements.RMA.RMAType
Grid : sarpy.io.complex.sicd_elements.Grid.GridType
GeoData : sarpy.io.complex.sicd_elements.GeoData.GeoDataType
RadarCollection : sarpy.io.complex.sicd_elements.RadarCollection.RadarCollectionType
ImageFormation : sarpy.io.complex.sicd_elements.ImageFormation.ImageFormationType
Returns
-------
bool
"""
cond = True
RMCR = RMA.RMCR
if Grid.Type != 'XRGYCR':
Grid.log_validity_error(
'The image formation algorithm is RMA/RMCR, which should yield '
'Grid.Type == "XRGYCR", but Grid.Type is populated as "{}"'.format(Grid.Type))
cond = False
try:
SCP = GeoData.SCP.ECF.get_array(dtype='float64')
row_uvect = Grid.Row.UVectECF.get_array(dtype='float64')
col_uvect = Grid.Col.UVectECF.get_array(dtype='float64')
position_ref = RMCR.PosRef.get_array(dtype='float64')
velocity_ref = RMCR.VelRef.get_array(dtype='float64')
uXRG = SCP - position_ref
uXRG /= numpy.linalg.norm(uXRG)
left = numpy.cross(
position_ref/numpy.linalg.norm(position_ref),
velocity_ref/numpy.linalg.norm(velocity_ref))
look = numpy.sign(left.dot(uXRG))
uSPN = look*numpy.cross(velocity_ref/numpy.linalg.norm(velocity_ref), uXRG)
uSPN /= numpy.linalg.norm(uSPN)
uYCR = numpy.cross(uSPN, uXRG)
except (AttributeError, ValueError, TypeError):
return cond
if numpy.linalg.norm(row_uvect - uXRG) > 1e-3:
RMCR.log_validity_error(
'the Grid.Row.UVectECF is populated as {},\n\t'
'but derived from the RMCR parameters is expected to be {}'.format(row_uvect, uXRG))
cond = False
if numpy.linalg.norm(col_uvect - uYCR) > 1e-3:
RMCR.log_validity_error(
'the Grid.Col.UVectECF is populated as {},\n\t'
'but derived from the RMCR parameters is expected to be {}'.format(col_uvect, uYCR))
cond = False
exp_doppler_cone = numpy.rad2deg(numpy.arccos(uXRG.dot(velocity_ref/numpy.linalg.norm(velocity_ref))))
if abs(exp_doppler_cone - RMCR.DopConeAngRef) > 1e-6:
RMCR.log_validity_error(
'the RMCR.DopConeAngRef is populated as {},\n\t'
'but derived from the RMCR parameters is expected to be {}'.format(RMCR.DopConeAngRef, exp_doppler_cone))
cond = False
if abs(Grid.Col.KCtr) > 1e-6:
Grid.log_validity_error(
'The image formation algorithm is RMA/RMCR,\n\t'
'but Grid.Col.KCtr is non-zero ({}).'.format(Grid.Col.KCtr))
cond = False
if RadarCollection.RefFreqIndex is None:
center_freq = ImageFormation.TxFrequencyProc.center_frequency
k_f_c = center_freq*2/speed_of_light
try:
if abs(k_f_c/Grid.Row.KCtr - 1) > 1e-3:
RMCR.log_validity_warning(
'the Grid.Row.KCtr is populated as {},\n\t'
'and derived from the RMCR parameters is expected to be {}'.format(Grid.Row.KCtr, k_f_c))
cond = False
except (AttributeError, ValueError, TypeError):
pass
return cond
def _rma_check_inca(
RMA,
Grid,
GeoData,
RadarCollection,
CollectionInfo,
Position) -> bool:
"""
Parameters
----------
RMA : sarpy.io.complex.sicd_elements.RMA.RMAType
Grid : sarpy.io.complex.sicd_elements.Grid.GridType
GeoData : sarpy.io.complex.sicd_elements.GeoData.GeoDataType
RadarCollection : sarpy.io.complex.sicd_elements.RadarCollection.RadarCollectionType
CollectionInfo : sarpy.io.complex.sicd_elements.CollectionInfo.CollectionInfoType
Position : sarpy.io.complex.sicd_elements.Position.PositionType
Returns
-------
bool
"""
cond = True
INCA = RMA.INCA
if Grid.Type != 'RGZERO':
Grid.log_validity_warning(
'The image formation algorithm is RMA/INCA, which should yield '
'Grid.Type == "RGZERO", but Grid.Type is populated as "{}"'.format(Grid.Type))
cond = False
if CollectionInfo.RadarMode.ModeType == 'SPOTLIGHT':
if INCA.DopCentroidPoly is not None:
INCA.log_validity_error(
'the CollectionInfo.RadarMode.ModeType == "SPOTLIGHT",\n\t'
'and INCA.DopCentroidPoly is populated.')
cond = False
if INCA.DopCentroidCOA is True:
INCA.log_validity_error(
'the CollectionInfo.RadarMode.ModeType == "SPOTLIGHT",\n\t'
'and INCA.DopCentroidCOA == True.')
cond = False
else:
if INCA.DopCentroidPoly is None:
INCA.log_validity_error(
'the CollectionInfo.RadarMode.ModeType == "{}",\n\t'
'and INCA.DopCentroidPoly is not populated.'.format(CollectionInfo.RadarMode.ModeType))
cond = False
if INCA.DopCentroidCOA is not True:
INCA.log_validity_error(
'the CollectionInfo.RadarMode.ModeType == "{}",\n\t'
'and INCA.DopCentroidCOA is not True.'.format(CollectionInfo.RadarMode.ModeType))
cond = False
if Grid.Col.DeltaKCOAPoly is not None and INCA.DopCentroidPoly is not None:
col_deltakcoa = Grid.Col.DeltaKCOAPoly.get_array(dtype='float64')
dop_centroid = INCA.DopCentroidPoly.get_array(dtype='float64')
rows = max(col_deltakcoa.shape[0], dop_centroid.shape[0])
cols = max(col_deltakcoa.shape[1], dop_centroid.shape[1])
exp_deltakcoa1 = numpy.zeros((rows, cols), dtype='float64')
exp_deltakcoa2 = numpy.zeros((rows, cols), dtype='float64')
exp_deltakcoa1[:col_deltakcoa.shape[0], :col_deltakcoa.shape[1]] = col_deltakcoa
exp_deltakcoa2[:dop_centroid.shape[0], :dop_centroid.shape[1]] = dop_centroid*INCA.TimeCAPoly[1]
if numpy.max(numpy.abs(exp_deltakcoa1 - exp_deltakcoa2)) > 1e-6:
INCA.log_validity_error(
'the Grid.Col.DeltaKCOAPoly ({}),\n\t'
'INCA.DopCentroidPoly ({}), and INCA.TimeCAPoly ({}) '
'are inconsistent.'.format(col_deltakcoa,
dop_centroid,
INCA.TimeCAPoly.get_array(dtype='float64')))
cond = False
center_freq = RadarCollection.TxFrequency.center_frequency
if abs(center_freq/INCA.FreqZero - 1) > 1e-5:
INCA.log_validity_error(
'the INCA.FreqZero ({}) should typically agree with center '
'transmit frequency ({})'.format(INCA.FreqZero, center_freq))
cond = False
if abs(Grid.Col.KCtr) > 1e-8:
Grid.log_validity_error(
'The image formation algorithm is RMA/INCA, but Grid.Col.KCtr is '
'non-zero ({})'.format(Grid.Col.KCtr))
cond = False
if RadarCollection.RefFreqIndex is None:
exp_row_kctr = INCA.FreqZero*2/speed_of_light
if abs(exp_row_kctr/Grid.Row.KCtr - 1) > 1e-8:
INCA.log_validity_error(
'the Grid.Row.KCtr is populated as ({}),\n\t'
'which is not consistent with INCA.FreqZero ({})'.format(Grid.Row.KCtr, INCA.FreqZero))
cond = False
try:
SCP = GeoData.SCP.ECF.get_array(dtype='float64')
row_uvect = Grid.Row.UVectECF.get_array(dtype='float64')
col_uvect = Grid.Col.UVectECF.get_array(dtype='float64')
scp_time = INCA.TimeCAPoly[0]
ca_pos = Position.ARPPoly(scp_time)
ca_vel = Position.ARPPoly.derivative_eval(scp_time, der_order=1)
RG = SCP - ca_pos
uRG = RG/numpy.linalg.norm(RG)
left = numpy.cross(ca_pos/numpy.linalg.norm(ca_pos), ca_vel/numpy.linalg.norm(ca_vel))
look = numpy.sign(left.dot(uRG))
uSPN = -look*numpy.cross(uRG, ca_vel)
uSPN /= numpy.linalg.norm(uSPN)
uAZ = numpy.cross(uSPN, uRG)
except (AttributeError, ValueError, TypeError):
return cond
if numpy.linalg.norm(row_uvect - uRG) > 1e-3:
INCA.log_validity_error(
'the Grid.Row.UVectECF is populated as {},\n\t'
'but derived from the INCA parameters is expected to be {}'.format(row_uvect, uRG))
cond = False
if numpy.linalg.norm(col_uvect - uAZ) > 1e-3:
INCA.log_validity_error(
'the Col.UVectECF is populated as {},\n\t'
'but derived from the INCA parameters is expected to be {}'.format(col_uvect, uAZ))
cond = False
exp_R_CA_SCP = numpy.linalg.norm(RG)
if abs(exp_R_CA_SCP - INCA.R_CA_SCP) > 1e-2:
INCA.log_validity_error(
'the INCA.R_CA_SCP is populated as {},\n\t'
'but derived from the INCA parameters is expected to be {}'.format(INCA.R_CA_SCP, exp_R_CA_SCP))
cond = False
drate_const = INCA.DRateSFPoly[0, 0]
exp_drate_const = 1./abs(INCA.TimeCAPoly[1]*numpy.linalg.norm(ca_vel))
if abs(exp_drate_const - drate_const) > 1e-3:
INCA.log_validity_error(
'the populated INCA.DRateSFPoly constant term ({})\n\t'
'and expected constant term ({}) are not consistent.'.format(drate_const, exp_drate_const))
cond = False
return cond
def _rma_checks(the_sicd) -> bool:
"""
Perform the RMA structure validation checks.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
RMA = the_sicd.RMA
if RMA.ImageType == 'RMAT':
return _rma_check_rmat(RMA, the_sicd.Grid, the_sicd.GeoData, the_sicd.RadarCollection, the_sicd.ImageFormation)
elif RMA.ImageType == 'RMCR':
return _rma_check_rmcr(RMA, the_sicd.Grid, the_sicd.GeoData, the_sicd.RadarCollection, the_sicd.ImageFormation)
elif RMA.ImageType == 'INCA':
return _rma_check_inca(RMA, the_sicd.Grid, the_sicd.GeoData, the_sicd.RadarCollection, the_sicd.CollectionInfo, the_sicd.Position)
return True
##############
# SICD checks
def _validate_scp_time(the_sicd) -> bool:
"""
Validate the SCPTime.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if the_sicd.SCPCOA is None or the_sicd.SCPCOA.SCPTime is None or \
the_sicd.Grid is None or the_sicd.Grid.TimeCOAPoly is None:
return True
cond = True
val1 = the_sicd.SCPCOA.SCPTime
val2 = the_sicd.Grid.TimeCOAPoly[0, 0]
if abs(val1 - val2) > 1e-6:
the_sicd.log_validity_error(
'SCPTime populated as {},\n\t'
'and constant term of TimeCOAPoly populated as {}'.format(val1, val2))
cond = False
return cond
def _validate_image_form_parameters(
the_sicd,
alg_type: str) -> bool:
"""
Validate the image formation parameter specifics.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
alg_type : str
Returns
-------
bool
"""
cond = True
if the_sicd.ImageFormation.ImageFormAlgo is None:
the_sicd.log_validity_warning(
'Image formation algorithm(s) `{}` populated,\n\t'
'but ImageFormation.ImageFormAlgo was not set.\n\t'
'ImageFormation.ImageFormAlgo has been set HERE,\n\t'
'but the incoming structure was incorrect.'.format(alg_type))
the_sicd.ImageFormation.ImageFormAlgo = alg_type.upper()
cond = False
elif the_sicd.ImageFormation.ImageFormAlgo == 'OTHER':
the_sicd.log_validity_warning(
'Image formation algorithm `{0:s}` populated,\n\t'
'but ImageFormation.ImageFormAlgo populated as `OTHER`.\n\t'
'Possibly non-applicable validity checks will '
'be performed using the `{0:s}` object'.format(alg_type))
elif the_sicd.ImageFormation.ImageFormAlgo != alg_type:
the_sicd.log_validity_warning(
'Image formation algorithm {} populated,\n\t'
'but ImageFormation.ImageFormAlgo populated as {}.\n\t'
'ImageFormation.ImageFormAlgo has been set properly HERE,\n\t'
'but the incoming structure was incorrect.'.format(alg_type, the_sicd.ImageFormation.ImageFormAlgo))
the_sicd.ImageFormation.ImageFormAlgo = alg_type.upper()
cond = False
# verify that the referenced received channels are consistent with radar collection
if the_sicd.ImageFormation.RcvChanProc is not None:
channels = the_sicd.ImageFormation.RcvChanProc.ChanIndices
if channels is None or len(channels) < 1:
the_sicd.ImageFormation.RcvChanProc.log_validity_error('No ChanIndex values populated')
cond = False
else:
rcv_channels = the_sicd.RadarCollection.RcvChannels
if rcv_channels is None:
the_sicd.ImageFormation.RcvChanProc.log_validity_error(
'Some ChanIndex values are populated,\n\t'
'but no RadarCollection.RcvChannels is populated.')
cond = False
else:
for i, entry in enumerate(channels):
if not (0 < entry <= len(rcv_channels)):
the_sicd.ImageFormation.RcvChanProc.log_validity_error(
'ChanIndex entry {} is populated as {},\n\tbut must be in '
'the range [1, {}]'.format(i, entry, len(rcv_channels)))
cond = False
if the_sicd.Grid is None:
return cond
if alg_type == 'RgAzComp':
cond &= _rgazcomp_checks(the_sicd)
elif alg_type == 'PFA':
cond &= _pfa_checks(the_sicd)
elif alg_type == 'RMA':
cond &= _rma_checks(the_sicd)
elif the_sicd.ImageFormation.ImageFormAlgo == 'OTHER':
the_sicd.log_validity_warning(
'Image formation algorithm populated as "OTHER", which inherently limits SICD analysis capability')
cond = False
return cond
def _validate_image_formation(the_sicd) -> bool:
"""
Validate the image formation.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if the_sicd.ImageFormation is None:
the_sicd.log_validity_error(
'ImageFormation attribute is not populated, and ImageFormType is {}. This '
'cannot be valid.'.format(the_sicd.ImageFormType))
return False # nothing more to be done.
alg_types = []
for alg in ['RgAzComp', 'PFA', 'RMA']:
if getattr(the_sicd, alg) is not None:
alg_types.append(alg)
if len(alg_types) > 1:
the_sicd.log_validity_error(
'ImageFormation.ImageFormAlgo is set as {}, and multiple SICD image formation parameters {} are set.\n\t'
'Only one image formation algorithm should be set, and ImageFormation.ImageFormAlgo '
'should match.'.format(the_sicd.ImageFormation.ImageFormAlgo, alg_types))
return False
elif len(alg_types) == 0:
if the_sicd.ImageFormation.ImageFormAlgo is None:
the_sicd.log_validity_warning(
'ImageFormation.ImageFormAlgo is not set, and there is no corresponding\n\t'
'RgAzComp, PFA, or RMA SICD parameters set. Setting ImageFormAlgo to "OTHER".')
the_sicd.ImageFormation.ImageFormAlgo = 'OTHER'
elif the_sicd.ImageFormation.ImageFormAlgo == 'OTHER':
the_sicd.log_validity_warning(
'Image formation algorithm populated as "OTHER", which inherently limits SICD analysis capability')
else:
the_sicd.log_validity_error(
'No RgAzComp, PFA, or RMA SICD parameters populated, but ImageFormation.ImageFormAlgo '
'is set as {}.'.format(the_sicd.ImageFormation.ImageFormAlgo))
return False
return True
# there is exactly one algorithm type populated
return _validate_image_form_parameters(the_sicd, alg_types[0])
def _validate_antenna(the_sicd) -> bool:
"""
Validate the Antenna Node
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if the_sicd.Antenna is None:
return True
valid = True
for branch_name in ['Tx', 'Rcv', 'TwoWay']:
branch = getattr(the_sicd.Antenna, branch_name)
if branch is None:
continue
for pattern_name in ['Array', 'Elem']:
pattern = getattr(branch, pattern_name)
if pattern is not None:
if pattern.GainPoly is not None:
gbs = pattern.GainPoly(0, 0)
if gbs != 0.0:
branch.log_validity_error(f'The constant coefficient of the {branch_name}.{pattern_name}.GainPoly is {gbs} and should be 0.0')
valid = False
if pattern.PhasePoly is not None:
pbs = pattern.PhasePoly(0, 0)
if pbs != 0.0:
branch.log_validity_error(f'The constant coefficient of the {branch_name}.{pattern_name}.PhasePoly is {pbs} and should be 0.0')
valid = False
if branch.GainBSPoly is not None:
gz = branch.GainBSPoly(0)
if gz != 0.0:
branch.log_validity_error(f'The constant coefficient of the {branch_name}.GainBSPoly is {gz} and should be 0.0')
return valid
def _validate_ippsets(the_sicd) -> bool:
"""
Validate IPP sets
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if the_sicd.Timeline is None:
return True
if the_sicd.Timeline.IPP is None:
return True
ippsets = the_sicd.Timeline.IPP
tstarts = [x.TStart for x in ippsets]
tends = [x.TEnd for x in ippsets]
ippstarts = [x.IPPStart for x in ippsets]
ippends = [x.IPPEnd for x in ippsets]
ipppolys = [x.IPPPoly for x in ippsets]
ippstart_from_poly = [round(x.IPPPoly(x.TStart)) for x in ippsets]
ippend_from_poly = [round(x.IPPPoly(x.TEnd) - 1) for x in ippsets]
valid = True
if tstarts != sorted(tstarts):
the_sicd.Timeline.log_validity_error(f'The IPPSets are not in start time order. TStart: {tstarts}')
valid = False
if ippstarts != ippstart_from_poly:
the_sicd.Timeline.log_validity_error(f'The IPPSet IPPStart do not match the polynomials. IPPStart: {ippstarts}'
f'IPPPoly(TStart): {ippstart_from_poly}')
if ippends != ippend_from_poly:
the_sicd.Timeline.log_validity_error(f'The IPPSet IPPEnd do not match the polynomials. IPPEnd: {ippends} '
f'IPPPoly(TEnd) - 1: {ippend_from_poly}')
if tends != sorted(tends):
the_sicd.Timeline.log_validity_error(f'The IPPSets are not in end time order. TEnd: {tends}')
valid = False
for iset in range(len(ippsets)):
if tstarts[iset] > tends[iset]:
the_sicd.Timeline.log_validity_error(f'IPPSet[index={iset+1}] ends ({tends[iset]}) '
f'before it starts ({tstarts[iset]}) in time')
valid = False
if ippstarts[iset] > ippends[iset]:
the_sicd.Timeline.log_validity_error(f'IPPSet[index={iset+1}] ends ({ippends[iset]}) '
f'before it starts ({ippstarts[iset]}) in index')
valid = False
prf = ipppolys[iset].derivative_eval((tstarts[iset] + tends[iset])/2)
if prf < 0:
the_sicd.Timeline.log_validity_error(f'IPPSet[index={iset+1}] has a negative PRF: {prf}')
valid = False
if prf > 100e3:
the_sicd.Timeline.log_validity_warning(f'IPPSet[index={iset+1}] has an unreasonable PRF: {prf}')
valid = False
if len(ippsets) > 1:
tgaps = [ts - te for ts, te in zip(tstarts[1:], tends[:-1])]
for ig, g in enumerate(tgaps):
if g > 0:
the_sicd.Timeline.log_validity_error(f'There is a gap between IPPSet[index={ig+1}] and '
f'IPPSet[index={ig+2}] of {g} seconds')
valid = False
if g < 0:
the_sicd.Timeline.log_validity_error(f'There is overlap between IPPSet[index={ig+1}] and '
f'IPPSet[index={ig+2}] of {-g} seconds')
valid = False
igaps = [i_s - i_e for i_s, i_e in zip(ippstarts[1:], ippends[:-1])]
for ig, g in enumerate(igaps):
if g > 1:
the_sicd.Timeline.log_validity_error(f'There is a gap between IPPSet[index={ig+1}] and '
f'IPPSet[index={ig+2}] of {g-1} IPPs')
valid = False
if g < 1:
the_sicd.Timeline.log_validity_error(f'There is overlap between IPPSet[index={ig+1}] and '
f'IPPSet[index={ig+2}] of {1-g} IPPs')
valid = False
return valid
def _validate_acp(the_sicd) -> bool:
"""
Validate the RadarCollection/Area/Corner/ACP nodes
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if (the_sicd.RadarCollection is None
or the_sicd.RadarCollection.Area is None
or the_sicd.RadarCollection.Area.Corner is None):
return False
acp = the_sicd.RadarCollection.Area.Corner.get_array(dtype='float64')
linear_ring = LinearRing(coordinates=acp)
area = linear_ring.get_area()
if area == 0:
the_sicd.GeoData.log_validity_error(
'Corner encloses no area.\n\t'
'**disregard if crosses the +/-180 boundary')
return False
elif area < 0:
the_sicd.GeoData.log_validity_error(
"Corner must be traversed in clockwise direction.\n\t"
"**disregard if crosses the +/-180 boundary")
return False
return True
def _validate_image_segment_id(the_sicd) -> bool:
"""
Validate the image segment id.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if the_sicd.ImageFormation is None or the_sicd.RadarCollection is None:
return False
# get the segment identifier
seg_id = the_sicd.ImageFormation.SegmentIdentifier
# get the segment list
try:
seg_list = the_sicd.RadarCollection.Area.Plane.SegmentList
except AttributeError:
seg_list = None
if seg_id is None:
if seg_list is not None:
the_sicd.log_validity_error(
'ImageFormation.SegmentIdentifier is not populated, but\n\t'
'RadarCollection.Area.Plane.SegmentList is populated.\n\t'
'ImageFormation.SegmentIdentifier should be set to identify the appropriate segment.')
return False
return True
if seg_list is None:
the_sicd.log_validity_error(
'ImageFormation.SegmentIdentifier is populated as {},\n\t'
'but RadarCollection.Area.Plane.SegmentList is not populated.'.format(seg_id))
return False
# let's double-check that seg_id is sensibly populated
the_ids = [entry.Identifier for entry in seg_list]
if seg_id not in the_ids:
the_sicd.log_validity_error(
'ImageFormation.SegmentIdentifier is populated as {},\n\t'
'but this is not one of the possible identifiers in the\n\t'
'RadarCollection.Area.Plane.SegmentList definition {}.\n\t'
'ImageFormation.SegmentIdentifier should be set to identify the '
'appropriate segment.'.format(seg_id, the_ids))
return False
return True
def _validate_spotlight_mode(the_sicd) -> bool:
"""
Validate the spotlight mode situation.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if the_sicd.CollectionInfo is None or the_sicd.CollectionInfo.RadarMode is None \
or the_sicd.CollectionInfo.RadarMode.ModeType is None:
return True
if the_sicd.Grid is None or the_sicd.Grid.TimeCOAPoly is None:
return True
if the_sicd.CollectionInfo.RadarMode.ModeType == 'SPOTLIGHT' and \
the_sicd.Grid.TimeCOAPoly.Coefs.shape != (1, 1):
the_sicd.log_validity_error(
'CollectionInfo.RadarMode.ModeType is SPOTLIGHT,\n\t'
'but the Grid.TimeCOAPoly is not scalar - {}.\n\t'
'This cannot be valid.'.format(the_sicd.Grid.TimeCOAPoly.Coefs))
return False
elif the_sicd.Grid.TimeCOAPoly.Coefs.shape == (1, 1) and \
the_sicd.CollectionInfo.RadarMode.ModeType != 'SPOTLIGHT':
the_sicd.log_validity_warning(
'The Grid.TimeCOAPoly is scalar,\n\t'
'but the CollectionInfo.RadarMode.ModeType is not SPOTLIGHT - {}.\n\t'
'This is likely not valid.'.format(the_sicd.CollectionInfo.RadarMode.ModeType))
return True
return True
def _validate_valid_data(the_sicd) -> bool:
"""
Check that either both ValidData fields are populated, or neither.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if the_sicd.ImageData is None or the_sicd.GeoData is None:
return True
if the_sicd.ImageData.ValidData is not None and the_sicd.GeoData.ValidData is None:
the_sicd.log_validity_error('ValidData is populated in ImageData, but not GeoData')
return False
if the_sicd.GeoData.ValidData is not None and the_sicd.ImageData.ValidData is None:
the_sicd.log_validity_error('ValidData is populated in GeoData, but not ImageData')
return False
if the_sicd.GeoData.ValidData is not None and the_sicd.ImageData.ValidData is not None:
num_geo_vert = the_sicd.GeoData.ValidData.get_array().shape[0]
num_image_vert = the_sicd.ImageData.ValidData.get_array().shape[-1]
if num_geo_vert != num_image_vert:
the_sicd.log_validity_error('ValidData has different number of vertices in '
f'the ImageData ({num_image_vert}) '
f'and GeoData ({num_geo_vert})')
return True
def _validate_polygons(the_sicd) -> bool:
"""
Checks that the polygons appear to be appropriate.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
def orientation(linear_ring, the_name):
area = linear_ring.get_area()
if area == 0:
the_sicd.GeoData.log_validity_error(
'{} encloses no area.\n\t'
'**disregard if crosses the +/-180 boundary'.format(the_name))
return False
elif area < 0:
the_sicd.GeoData.log_validity_error(
"{} must be traversed in clockwise direction.\n\t"
"**disregard if crosses the +/-180 boundary".format(the_name))
return False
return True
if the_sicd.GeoData is None:
return True
if the_sicd.GeoData.ImageCorners is None:
return True # checked elsewhere
image_corners = the_sicd.GeoData.ImageCorners.get_array(dtype='float64')
if numpy.any(~numpy.isfinite(image_corners)):
the_sicd.GeoData.log_validity_error('ImageCorners populated with some infinite or NaN values')
return False
value = True
lin_ring = LinearRing(coordinates=image_corners)
value &= orientation(lin_ring, 'ImageCorners')
if the_sicd.GeoData.ValidData is not None:
valid_data = the_sicd.GeoData.ValidData.get_array(dtype='float64')
if numpy.all(valid_data[0] == valid_data[-1]):
the_sicd.GeoData.log_validity_warning(
'ValidData has the same value for the first and last point')
return False
if numpy.any(~numpy.isfinite(valid_data)):
the_sicd.GeoData.log_validity_error(
'ValidData populated with some infinite or NaN values')
value = False
else:
value &= orientation(LinearRing(coordinates=valid_data), 'ValidData')
for i, entry in enumerate(valid_data):
contained = lin_ring.contain_coordinates(entry[0], entry[1])
close = (lin_ring.get_minimum_distance(entry[:2]) < 1e-7)
if not (contained or close):
the_sicd.GeoData.log_validity_error(
'ValidData entry {} is not contained ImageCorners.\n\t'
'**disregard if crosses the +/-180 boundary'.format(i))
value = False
if not the_sicd.can_project_coordinates():
the_sicd.log_validity_warning(
'This sicd does not permit coordinate projection,\n\t'
'and image corner points can not be evaluated')
return False
origin_loc = the_sicd.project_image_to_ground_geo([0, 0])
if numpy.abs(origin_loc[0] - image_corners[0, 0]) > 1e-3 or numpy.abs(origin_loc[1] - image_corners[0, 1]) > 1e-3:
the_sicd.GeoData.log_validity_error(
'The pixel coordinate [0, 0] projects to {},\n\t'
'which is not in good agreement with the first corner point {}'.format(origin_loc, image_corners[0]))
value = False
return value
def _validate_polarization(the_sicd) -> bool:
"""
Validate the polarization.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if the_sicd.ImageFormation is None or the_sicd.ImageFormation.TxRcvPolarizationProc is None:
return True
if the_sicd.RadarCollection is None or the_sicd.RadarCollection.RcvChannels is None:
return True
pol_form = the_sicd.ImageFormation.TxRcvPolarizationProc
rcv_pols = [entry.TxRcvPolarization for entry in the_sicd.RadarCollection.RcvChannels]
if pol_form not in rcv_pols:
the_sicd.log_validity_error(
'ImageFormation.TxRcvPolarizationProc is populated as {},\n\t'
'but it not one of the tx/rcv polarizations populated for '
'the collect {}'.format(pol_form, rcv_pols))
return False
return True
def _check_deltak(the_sicd) -> bool:
"""
Checks the deltak parameters.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if the_sicd.Grid is None:
return True
x_coords, y_coords = None, None
try:
valid_vertices = the_sicd.ImageData.get_valid_vertex_data()
if valid_vertices is None:
valid_vertices = the_sicd.ImageData.get_full_vertex_data()
x_coords = the_sicd.Grid.Row.SS * (
valid_vertices[:, 0] - (the_sicd.ImageData.SCPPixel.Row - the_sicd.ImageData.FirstRow))
y_coords = the_sicd.Grid.Col.SS * (
valid_vertices[:, 1] - (the_sicd.ImageData.SCPPixel.Col - the_sicd.ImageData.FirstCol))
except (AttributeError, ValueError):
pass
return the_sicd.Grid.check_deltak(x_coords, y_coords)
def _check_projection(the_sicd) -> None:
"""
Checks the projection ability.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
"""
if not the_sicd.can_project_coordinates():
the_sicd.log_validity_warning(
'No projection can be performed for this SICD.\n'
'In particular, no derived products can be produced.')
def _validate_radiometric(
Radiometric,
Grid,
SCPCOA) -> bool:
"""
Validate the radiometric parameters.
Parameters
----------
Radiometric : sarpy.io.complex.sicd_elements.Radiometric.RadiometricType
Grid : sarpy.io.complex.sicd_elements.Grid.GridType
SCPCOA : sarpy.io.complex.sicd_elements.SCPCOA.SCPCOAType
Returns
-------
bool
"""
if Grid is None or Grid.Row is None or Grid.Col is None:
return True
cond = True
area_sp = Grid.get_slant_plane_area()
if Radiometric.RCSSFPoly is not None:
rcs_sf = Radiometric.RCSSFPoly.get_array(dtype='float64')
if Radiometric.BetaZeroSFPoly is not None:
beta_sf = Radiometric.BetaZeroSFPoly.get_array(dtype='float64')
if abs(rcs_sf[0, 0] / (beta_sf[0, 0] * area_sp) - 1) > 5e-2:
Radiometric.log_validity_error(
'The BetaZeroSFPoly and RCSSFPoly are not consistent.')
cond = False
if SCPCOA is not None:
if Radiometric.SigmaZeroSFPoly is not None:
sigma_sf = Radiometric.SigmaZeroSFPoly.get_array(dtype='float64')
mult = area_sp / numpy.cos(numpy.deg2rad(SCPCOA.SlopeAng))
if (rcs_sf[0, 0] / (sigma_sf[0, 0] * mult) - 1) > 5e-2:
Radiometric.log_validity_error('The SigmaZeroSFPoly and RCSSFPoly are not consistent.')
cond = False
if Radiometric.GammaZeroSFPoly is not None:
gamma_sf = Radiometric.GammaZeroSFPoly.get_array(dtype='float64')
mult = area_sp / (numpy.cos(numpy.deg2rad(SCPCOA.SlopeAng)) * numpy.sin(numpy.deg2rad(SCPCOA.GrazeAng)))
if (rcs_sf[0, 0] / (gamma_sf[0, 0] * mult) - 1) > 5e-2:
Radiometric.log_validity_error('The GammaZeroSFPoly and RCSSFPoly are not consistent.')
cond = False
if Radiometric.BetaZeroSFPoly is not None:
beta_sf = Radiometric.BetaZeroSFPoly.get_array(dtype='float64')
if SCPCOA is not None:
if Radiometric.SigmaZeroSFPoly is not None:
sigma_sf = Radiometric.SigmaZeroSFPoly.get_array(dtype='float64')
mult = 1. / numpy.cos(numpy.deg2rad(SCPCOA.SlopeAng))
if (beta_sf[0, 0] / (sigma_sf[0, 0] * mult) - 1) > 5e-2:
Radiometric.log_validity_error('The SigmaZeroSFPoly and BetaZeroSFPoly are not consistent.')
cond = False
if Radiometric.GammaZeroSFPoly is not None:
gamma_sf = Radiometric.GammaZeroSFPoly.get_array(dtype='float64')
mult = 1. / (numpy.cos(numpy.deg2rad(SCPCOA.SlopeAng)) * numpy.sin(numpy.deg2rad(SCPCOA.GrazeAng)))
if (beta_sf[0, 0] / (gamma_sf[0, 0] * mult) - 1) > 5e-2:
Radiometric.log_validity_error('The GammaZeroSFPoly and BetaZeroSFPoly are not consistent.')
cond = False
return cond
def _check_radiometric_recommended(radiometric) -> None:
"""
Checks the recommended fields for the radiometric object.
Parameters
----------
radiometric : sarpy.io.complex.sicd_elements.Radiometric.RadiometricType
"""
for attribute in ['RCSSFPoly', 'BetaZeroSFPoly', 'SigmaZeroSFPoly', 'GammaZeroSFPoly']:
value = getattr(radiometric, attribute)
if value is None:
radiometric.log_validity_warning(
'No {} field provided, and associated RCS measurements '
'will not be possible'.format(attribute))
if radiometric.NoiseLevel is None:
radiometric.log_validity_warning('No Radiometric.NoiseLevel provided, so noise estimates will not be possible.')
elif radiometric.NoiseLevel.NoiseLevelType != 'ABSOLUTE':
radiometric.log_validity_warning(
'Radiometric.NoiseLevel provided are not ABSOLUTE, so noise estimates '
'are not easily available.')
def _check_recommended_attributes(the_sicd) -> None:
"""
Checks recommended attributes.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
"""
if the_sicd.Radiometric is None:
the_sicd.log_validity_warning('No Radiometric data provided.')
else:
_check_radiometric_recommended(the_sicd.Radiometric)
if the_sicd.Timeline is not None and the_sicd.Timeline.IPP is None:
the_sicd.log_validity_warning(
'No Timeline.IPP provided, so no PRF/PRI available '
'for analysis of ambiguities.')
if the_sicd.RadarCollection is not None and the_sicd.RadarCollection.Area is None:
the_sicd.log_validity_info(
'No RadarCollection.Area provided, and some tools prefer using\n\t'
'a pre-determined output plane for consistent product definition.')
if the_sicd.ImageData is not None and the_sicd.ImageData.ValidData is None:
the_sicd.log_validity_info(
'No ImageData.ValidData is defined. It is recommended to populate\n\t'
'this data, if validity of pixels/areas is known.')
if the_sicd.RadarCollection is not None and the_sicd.RadarCollection.RefFreqIndex is not None:
the_sicd.log_validity_warning(
'A reference frequency is being used. This may affect the results of\n\t'
'this validation test, because a number tests could not be performed.')
def detailed_validation_checks(the_sicd) -> bool:
"""
Assembles the suite of detailed sicd validation checks.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
out = _validate_scp_time(the_sicd)
out &= _validate_image_formation(the_sicd)
out &= _validate_image_segment_id(the_sicd)
out &= _validate_spotlight_mode(the_sicd)
out &= _validate_valid_data(the_sicd)
out &= _validate_polygons(the_sicd)
out &= _validate_polarization(the_sicd)
out &= _check_deltak(the_sicd)
out &= _validate_acp(the_sicd)
out &= _validate_ippsets(the_sicd)
out &= _validate_antenna(the_sicd)
if the_sicd.SCPCOA is not None:
out &= the_sicd.SCPCOA.check_values(the_sicd.GeoData)
if the_sicd.Radiometric is not None:
out &= _validate_radiometric(the_sicd.Radiometric, the_sicd.Grid, the_sicd.SCPCOA)
_check_projection(the_sicd)
_check_recommended_attributes(the_sicd)
return out
| 64,276 | 36.94392 | 151 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/Timeline.py | """
The TimelineType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import List, Union, Optional
from datetime import datetime, date
import numpy
from sarpy.io.xml.base import Serializable, SerializableArray
from sarpy.io.xml.descriptors import FloatDescriptor, IntegerDescriptor, \
DateTimeDescriptor, SerializableDescriptor, SerializableArrayDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from .blocks import Poly1DType
class IPPSetType(Serializable):
"""
The Inter-Pulse Parameter array element container.
"""
# NOTE that this is simply defined as a child class ("Set") of the TimelineType in the SICD standard
# Defining it at root level clarifies the documentation, and giving it a more descriptive name is
# appropriate.
_fields = ('TStart', 'TEnd', 'IPPStart', 'IPPEnd', 'IPPPoly', 'index')
_required = _fields
_set_as_attribute = ('index', )
_numeric_format = {'TStart': FLOAT_FORMAT, 'TEnd': FLOAT_FORMAT, }
# descriptors
TStart = FloatDescriptor(
'TStart', _required, strict=DEFAULT_STRICT,
docstring='IPP start time relative to collection start time, i.e. offsets in seconds.') # type: float
TEnd = FloatDescriptor(
'TEnd', _required, strict=DEFAULT_STRICT,
docstring='IPP end time relative to collection start time, i.e. offsets in seconds.') # type: float
IPPStart = IntegerDescriptor(
'IPPStart', _required, strict=True, docstring='Starting IPP index for the period described.') # type: int
IPPEnd = IntegerDescriptor(
'IPPEnd', _required, strict=True, docstring='Ending IPP index for the period described.') # type: int
IPPPoly = SerializableDescriptor(
'IPPPoly', Poly1DType, _required, strict=DEFAULT_STRICT,
docstring='IPP index polynomial coefficients yield IPP index as a function of time.') # type: Poly1DType
index = IntegerDescriptor(
'index', _required, strict=DEFAULT_STRICT, docstring='The element array index.') # type: int
def __init__(
self,
TStart: float = None,
TEnd: float = None,
IPPStart: int = None,
IPPEnd: int = None,
IPPPoly: Union[Poly1DType, numpy.ndarray, list, tuple] = None,
index: int = None,
**kwargs):
"""
Parameters
----------
TStart : float
TEnd : float
IPPStart : int
IPPEnd : int
IPPPoly : Poly1DType|numpy.ndarray|list|tuple
index : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TStart, self.TEnd = TStart, TEnd
self.IPPStart, self.IPPEnd = IPPStart, IPPEnd
self.IPPPoly = IPPPoly
self.index = index
super(IPPSetType, self).__init__(**kwargs)
def _basic_validity_check(self) -> bool:
condition = super(IPPSetType, self)._basic_validity_check()
if self.TStart >= self.TEnd:
self.log_validity_error(
'TStart ({}) >= TEnd ({})'.format(self.TStart, self.TEnd))
condition = False
if self.IPPStart >= self.IPPEnd:
self.log_validity_error(
'IPPStart ({}) >= IPPEnd ({})'.format(self.IPPStart, self.IPPEnd))
condition = False
exp_ipp_start = self.IPPPoly(self.TStart)
exp_ipp_end = self.IPPPoly(self.TEnd)
if abs(exp_ipp_start - self.IPPStart) > 1:
self.log_validity_error(
'IPPStart populated as {}, inconsistent with value ({}) '
'derived from IPPPoly and TStart'.format(exp_ipp_start, self.IPPStart))
if abs(exp_ipp_end - self.IPPEnd) > 1:
self.log_validity_error(
'IPPEnd populated as {}, inconsistent with value ({}) '
'derived from IPPPoly and TEnd'.format(self.IPPEnd, exp_ipp_end))
return condition
class TimelineType(Serializable):
"""
The details for the imaging collection timeline.
"""
_fields = ('CollectStart', 'CollectDuration', 'IPP')
_required = ('CollectStart', 'CollectDuration', )
_collections_tags = {'IPP': {'array': True, 'child_tag': 'Set'}}
_numeric_format = {'CollectDuration': FLOAT_FORMAT, }
# descriptors
CollectStart = DateTimeDescriptor(
'CollectStart', _required, strict=DEFAULT_STRICT, numpy_datetime_units='us',
docstring='The collection start time. The default precision will be microseconds.') # type: numpy.datetime64
CollectDuration = FloatDescriptor(
'CollectDuration', _required, strict=DEFAULT_STRICT,
docstring='The duration of the collection in seconds.') # type: float
IPP = SerializableArrayDescriptor(
'IPP', IPPSetType, _collections_tags, _required, strict=DEFAULT_STRICT, minimum_length=1,
docstring="The Inter-Pulse Period (IPP) parameters array.") # type: Union[SerializableArray, List[IPPSetType]]
def __init__(
self,
CollectStart: Union[numpy.datetime64, datetime, date, str] = None,
CollectDuration: float = None,
IPP: Union[None, SerializableArray, List[IPPSetType]] = None,
**kwargs):
"""
Parameters
----------
CollectStart : numpy.datetime64|datetime|date|str
CollectDuration : float
IPP : None|List[IPPSetType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.CollectStart = CollectStart
self.CollectDuration = CollectDuration
self.IPP = IPP
super(TimelineType, self).__init__(**kwargs)
@property
def CollectEnd(self) -> Optional[numpy.datetime64]:
"""
None|numpy.datetime64: The collection end time, inferred from `CollectEnd` and `CollectDuration`,
provided that both are populated.
"""
if self.CollectStart is None or self.CollectDuration is None:
return None
return self.CollectStart + numpy.timedelta64(int(self.CollectDuration*1e6), 'us')
def _check_ipp_consecutive(self) -> bool:
if self.IPP is None or len(self.IPP) < 2:
return True
cond = True
for i in range(len(self.IPP)-1):
el1 = self.IPP[i]
el2 = self.IPP[i+1]
if el1.IPPEnd + 1 != el2.IPPStart:
self.log_validity_error(
'IPP entry {} IPPEnd ({}) is not consecutive with '
'entry {} IPPStart ({})'.format(i, el1.IPPEnd, i+1, el2.IPPStart))
cond = False
if el1.TEnd >= el2.TStart:
self.log_validity_error(
'IPP entry {} TEnd ({}) is greater than entry {} TStart ({})'.format(i, el1.TEnd, i+1, el2.TStart))
return cond
def _check_ipp_times(self) -> bool:
if self.IPP is None:
return True
cond = True
min_time = self.IPP[0].TStart
max_time = self.IPP[0].TEnd
for i in range(len(self.IPP)):
entry = self.IPP[i]
if entry.TStart < 0:
self.log_validity_error('IPP entry {} has negative TStart ({})'.format(i, entry.TStart))
cond = False
if entry.TEnd > self.CollectDuration + 1e-2:
self.log_validity_error(
'IPP entry {} has TEnd ({}) appreciably larger than '
'CollectDuration ({})'.format(i, entry.TEnd, self.CollectDuration))
cond = False
min_time = min(min_time, entry.TStart)
max_time = max(max_time, entry.TEnd)
if abs(max_time - min_time - self.CollectDuration) > 1e-2:
self.log_validity_error(
'time range in IPP entries ({}) not in keeping with populated '
'CollectDuration ({})'.format(max_time-min_time, self.CollectDuration))
cond = False
return cond
def _basic_validity_check(self) -> bool:
condition = super(TimelineType, self)._basic_validity_check()
condition &= self._check_ipp_consecutive()
condition &= self._check_ipp_times()
return condition
| 8,454 | 38.694836 | 119 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/Grid.py | """
The GridType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
from typing import Union, Optional, Dict, Tuple
import numpy
from numpy.linalg import norm
from scipy.constants import speed_of_light
from sarpy.processing.sicd.windows import general_hamming, taylor, kaiser, find_half_power, \
get_hamming_broadening_factor
from sarpy.io.xml.base import Serializable, ParametersCollection, find_first_child
from sarpy.io.xml.descriptors import StringDescriptor, StringEnumDescriptor, \
FloatDescriptor, FloatArrayDescriptor, IntegerEnumDescriptor, \
SerializableDescriptor, UnitVectorDescriptor, ParametersDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from .blocks import XYZType, Poly2DType
from .utils import _get_center_frequency
logger = logging.getLogger(__name__)
# module variable
DEFAULT_WEIGHT_SIZE = 512
"""
int: the default size when generating WgtFunct from a named WgtType.
"""
class WgtTypeType(Serializable):
"""
The weight type parameters of the direction parameters.
"""
_fields = ('WindowName', 'Parameters')
_required = ('WindowName',)
_collections_tags = {'Parameters': {'array': False, 'child_tag': 'Parameter'}}
# descriptors
WindowName = StringDescriptor(
'WindowName', _required, strict=DEFAULT_STRICT,
docstring='Type of aperture weighting applied in the spatial frequency domain (Krow) to yield '
'the impulse response in the row direction. '
'*Example values - "UNIFORM", "TAYLOR", "UNKNOWN", "HAMMING"*') # type: str
Parameters = ParametersDescriptor(
'Parameters', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Free form parameters list.') # type: ParametersCollection
def __init__(
self,
WindowName: str = None,
Parameters: Union[ParametersCollection, Dict] = None,
**kwargs):
"""
Parameters
----------
WindowName : str
Parameters : ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.WindowName = WindowName
self.Parameters = Parameters
super(WgtTypeType, self).__init__(**kwargs)
def get_parameter_value(
self,
param_name: str,
default: Optional[str] = None) -> Optional[str]:
"""
Gets the value (first value found) associated with a given parameter name.
Returns `default` if not found.
Parameters
----------
param_name : str
the parameter name for which to search.
default : None|str
the default value to return if lookup fails.
Returns
-------
str
the associated parameter value, or `default`.
"""
if self.Parameters is None:
return default
the_dict = self.Parameters.get_collection()
if len(the_dict) == 0:
return default
if param_name is None:
# get the first value - this is dumb, but appears a use case. Leaving undocumented.
return list(the_dict.values())[0]
return the_dict.get(param_name, default)
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
win_key = cls._child_xml_ns_key.get('WindowName', ns_key)
win_name = find_first_child(node, 'WindowName', xml_ns, win_key)
if win_name is None:
# SICD 0.4 standard compliance, this could just be a space delimited string of the form
# "<WindowName> <name1>=<value1> <name2>=<value2> ..."
if kwargs is None:
kwargs = {}
values = node.text.strip().split()
kwargs['WindowName'] = values[0]
params = {}
for entry in values[1:]:
try:
name, val = entry.split('=')
params[name] = val
except ValueError:
continue
kwargs['Parameters'] = params
return cls.from_dict(kwargs)
else:
return super(WgtTypeType, cls).from_node(node, xml_ns, ns_key=ns_key, kwargs=kwargs)
class DirParamType(Serializable):
"""The direction parameters container"""
_fields = (
'UVectECF', 'SS', 'ImpRespWid', 'Sgn', 'ImpRespBW', 'KCtr', 'DeltaK1', 'DeltaK2', 'DeltaKCOAPoly',
'WgtType', 'WgtFunct')
_required = ('UVectECF', 'SS', 'ImpRespWid', 'Sgn', 'ImpRespBW', 'KCtr', 'DeltaK1', 'DeltaK2')
_numeric_format = {
'SS': FLOAT_FORMAT, 'ImpRespWid': FLOAT_FORMAT, 'Sgn': '+d',
'ImpRespBW': FLOAT_FORMAT, 'KCtr': FLOAT_FORMAT,
'DeltaK1': FLOAT_FORMAT, 'DeltaK2': FLOAT_FORMAT, 'WgtFunct': FLOAT_FORMAT}
_collections_tags = {'WgtFunct': {'array': True, 'child_tag': 'Wgt'}}
# descriptors
UVectECF = UnitVectorDescriptor(
'UVectECF', XYZType, _required, strict=DEFAULT_STRICT,
docstring='Unit vector in the increasing ``(row/col)`` direction *(ECF)* at '
'the SCP pixel.') # type: XYZType
SS = FloatDescriptor(
'SS', _required, strict=DEFAULT_STRICT,
docstring='Sample spacing in the increasing ``(row/col)`` direction. Precise spacing '
'at the SCP.') # type: float
ImpRespWid = FloatDescriptor(
'ImpRespWid', _required, strict=DEFAULT_STRICT,
docstring='Half power impulse response width in the increasing ``(row/col)`` direction. '
'Measured at the scene center point.') # type: float
Sgn = IntegerEnumDescriptor(
'Sgn', (1, -1), _required, strict=DEFAULT_STRICT,
docstring='Sign for exponent in the DFT to transform the ``(row/col)`` dimension to '
'spatial frequency dimension.') # type: int
ImpRespBW = FloatDescriptor(
'ImpRespBW', _required, strict=DEFAULT_STRICT,
docstring='Spatial bandwidth in ``(row/col)`` used to form the impulse response in '
'the ``(row/col)`` direction. Measured at the center of '
'support for the SCP.') # type: float
KCtr = FloatDescriptor(
'KCtr', _required, strict=DEFAULT_STRICT,
docstring='Center spatial frequency in the given dimension. '
'Corresponds to the zero frequency of the DFT in the given ``(row/col)`` '
'direction.') # type: float
DeltaK1 = FloatDescriptor(
'DeltaK1', _required, strict=DEFAULT_STRICT,
docstring='Minimum ``(row/col)`` offset from KCtr of the spatial frequency support '
'for the image.') # type: float
DeltaK2 = FloatDescriptor(
'DeltaK2', _required, strict=DEFAULT_STRICT,
docstring='Maximum ``(row/col)`` offset from KCtr of the spatial frequency '
'support for the image.') # type: float
DeltaKCOAPoly = SerializableDescriptor(
'DeltaKCOAPoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='Offset from KCtr of the center of support in the given ``(row/col)`` spatial frequency. '
'The polynomial is a function of image given ``(row/col)`` coordinate ``(variable 1)`` and '
'column coordinate ``(variable 2)``.') # type: Poly2DType
WgtType = SerializableDescriptor(
'WgtType', WgtTypeType, _required, strict=DEFAULT_STRICT,
docstring='Parameters describing aperture weighting type applied in the spatial frequency domain '
'to yield the impulse response in the given ``(row/col)`` direction.') # type: WgtTypeType
WgtFunct = FloatArrayDescriptor(
'WgtFunct', _collections_tags, _required, strict=DEFAULT_STRICT, minimum_length=2,
docstring='Sampled aperture amplitude weighting function (array) applied to form the SCP impulse '
'response in the given ``(row/col)`` direction.') # type: numpy.ndarray
def __init__(
self,
UVectECF: Union[XYZType, numpy.ndarray, list, tuple] = None,
SS: float = None,
ImpRespWid: float = None,
Sgn: int = None,
ImpRespBW: float = None,
KCtr: float = None,
DeltaK1: float = None,
DeltaK2: float = None,
DeltaKCOAPoly: Union[None, Poly2DType, numpy.ndarray, list, tuple] = None,
WgtType: Optional[WgtTypeType] = None,
WgtFunct: Union[None, numpy.ndarray, list, tuple] = None,
**kwargs):
"""
Parameters
----------
UVectECF : XYZType|numpy.ndarray|list|tuple
SS : float
ImpRespWid : float
Sgn : int
ImpRespBW : float
KCtr : float
DeltaK1 : float
DeltaK2 : float
DeltaKCOAPoly : Poly2DType|numpy.ndarray|list|tuple
WgtType : WgtTypeType
WgtFunct : None|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.UVectECF = UVectECF
self.SS = SS
self.ImpRespWid, self.ImpRespBW = ImpRespWid, ImpRespBW
self.Sgn = Sgn
self.KCtr, self.DeltaK1, self.DeltaK2 = KCtr, DeltaK1, DeltaK2
self.DeltaKCOAPoly = DeltaKCOAPoly
self.WgtType = WgtType
self.WgtFunct = WgtFunct
super(DirParamType, self).__init__(**kwargs)
def define_weight_function(
self,
weight_size: int = DEFAULT_WEIGHT_SIZE,
populate: bool = False) -> Optional[numpy.ndarray]:
"""
Try to derive WgtFunct from WgtType, if necessary. This should likely be called from the `GridType` parent.
Parameters
----------
weight_size : int
the size of the `WgtFunct` to generate.
populate : bool
Overwrite any populated WgtFunct value?
Returns
-------
None|numpy.ndarray
"""
if self.WgtType is None or self.WgtType.WindowName is None:
return # nothing to be done
value = None
window_name = self.WgtType.WindowName.upper()
if window_name == 'HAMMING':
# A Hamming window is defined in many places as a raised cosine of weight .54, so this is the default.
# Some data use a generalized raised cosine and call it HAMMING, so we allow for both uses.
try:
# noinspection PyTypeChecker
coef = float(self.WgtType.get_parameter_value(None, 0.54)) # just get first parameter - name?
except ValueError:
coef = 0.54
value = general_hamming(weight_size, coef, sym=True)
elif window_name == 'HANNING':
value = general_hamming(weight_size, 0.5, sym=True)
elif window_name == 'KAISER':
beta = 14.0 # suggested default in literature/documentation
try:
# noinspection PyTypeChecker
beta = float(self.WgtType.get_parameter_value(None, beta)) # just get first parameter - name?
except ValueError:
pass
value = kaiser(weight_size, beta, sym=True)
elif window_name == 'TAYLOR':
# noinspection PyTypeChecker
sidelobes = int(self.WgtType.get_parameter_value('NBAR', 4)) # apparently the matlab argument name
# noinspection PyTypeChecker
max_sidelobe_level = float(self.WgtType.get_parameter_value('SLL', -30)) # same
value = taylor(weight_size, nbar=sidelobes, sll=max_sidelobe_level, norm=True, sym=True)
elif window_name == 'UNIFORM':
value = numpy.ones((32, ), dtype='float64')
if self.WgtFunct is None or (populate and value is not None):
self.WgtFunct = value
return value
def get_oversample_rate(self) -> float:
"""
Gets the oversample rate. *Added in version 1.2.35.*
Returns
-------
float
"""
if self.SS is None or self.ImpRespBW is None:
raise AttributeError('Both SS and ImpRespBW must be populated.')
return max(1., 1./(self.SS*self.ImpRespBW))
def _get_broadening_factor(self) -> float:
"""
Gets the *broadening factor*, assuming that `WgtFunct` has been properly populated.
Returns
-------
float
the broadening factor
"""
if self.WgtType is not None and self.WgtType.WindowName is not None:
window_name = self.WgtType.WindowName.upper()
coef = None
if window_name == 'UNIFORM':
coef = 1.0
elif window_name == 'HAMMING':
try:
# noinspection PyTypeChecker
coef = float(self.WgtType.get_parameter_value(None, 0.54)) # just get first parameter - name?
except ValueError:
coef = 0.54
elif window_name == 'HANNING':
coef = 0.5
if coef is not None:
return get_hamming_broadening_factor(coef)
return find_half_power(self.WgtFunct, oversample=1024)
def define_response_widths(self, populate: bool = False) -> Optional[Tuple[float, float]]:
"""
Assuming that `WgtFunct` has been properly populated, define the response widths.
This should likely be called by `GridType` parent.
Parameters
----------
populate : bool
Overwrite populated ImpRespWid and/or ImpRespBW?
Returns
-------
None|(float, float)
None or `(ImpRespBw, ImpRespWid)`
"""
broadening_factor = self._get_broadening_factor()
if broadening_factor is None:
return None
if self.ImpRespBW is not None:
resp_width = broadening_factor/self.ImpRespBW
if populate or self.ImpRespWid is None:
self.ImpRespWid = resp_width
return self.ImpRespBW, resp_width
elif self.ImpRespWid is not None:
resp_bw = broadening_factor/self.ImpRespWid
if populate or self.ImpRespBW is None:
self.ImpRespBW = resp_bw
return resp_bw, self.ImpRespWid
return None
def estimate_deltak(
self,
x_coords: Optional[numpy.ndarray],
y_coords: Optional[numpy.ndarray],
populate: bool = False) -> Optional[Tuple[float, float]]:
"""
The `DeltaK1` and `DeltaK2` parameters can be estimated from `DeltaKCOAPoly`, if necessary.
This should likely be called by the `GridType` parent.
Parameters
----------
x_coords : None|numpy.ndarray
The physical vertex coordinates to evaluate DeltaKCOAPoly
y_coords : None|numpy.ndarray
The physical vertex coordinates to evaluate DeltaKCOAPoly
populate : bool
Overwite any populated DeltaK1 and DeltaK2?
Returns
-------
min_deltak: float
max_deltak: float
"""
if self.ImpRespBW is None or self.SS is None:
return None # nothing can be done
if self.DeltaKCOAPoly is not None and x_coords is not None:
deltaks = self.DeltaKCOAPoly(x_coords, y_coords)
min_deltak = numpy.amin(deltaks) - 0.5*self.ImpRespBW
max_deltak = numpy.amax(deltaks) + 0.5*self.ImpRespBW
else:
min_deltak = -0.5*self.ImpRespBW
max_deltak = 0.5*self.ImpRespBW
if (min_deltak < -0.5/abs(self.SS)) or (max_deltak > 0.5/abs(self.SS)):
min_deltak = -0.5/abs(self.SS)
max_deltak = -min_deltak
if populate or (self.DeltaK1 is None or self.DeltaK2 is None):
self.DeltaK1 = min_deltak
self.DeltaK2 = max_deltak
return min_deltak, max_deltak
def check_deltak(
self,
x_coords: Optional[numpy.ndarray],
y_coords: Optional[numpy.ndarray]) -> bool:
"""
Checks the DeltaK values for validity.
Parameters
----------
x_coords : None|numpy.ndarray
The physical vertex coordinates to evaluate DeltaKCOAPoly
y_coords : None|numpy.ndarray
The physical vertex coordinates to evaluate DeltaKCOAPoly
Returns
-------
bool
"""
out = True
try:
if self.DeltaK2 <= self.DeltaK1 + 1e-10:
self.log_validity_error(
'DeltaK2 ({}) must be greater than DeltaK1 ({})'.format(self.DeltaK2, self.DeltaK1))
out = False
except (AttributeError, TypeError, ValueError):
pass
try:
if self.DeltaK2 > 1./(2*self.SS) + 1e-10:
self.log_validity_error(
'DeltaK2 ({}) must be <= 1/(2*SS) ({})'.format(self.DeltaK2, 1./(2*self.SS)))
out = False
except (AttributeError, TypeError, ValueError):
pass
try:
if self.DeltaK1 < -1./(2*self.SS) - 1e-10:
self.log_validity_error(
'DeltaK1 ({}) must be >= -1/(2*SS) ({})'.format(self.DeltaK1, -1./(2*self.SS)))
out = False
except (AttributeError, TypeError, ValueError):
pass
min_deltak, max_deltak = self.estimate_deltak(x_coords, y_coords, populate=False)
try:
if abs(self.DeltaK1/min_deltak - 1) > 1e-2:
self.log_validity_error(
'The DeltaK1 value is populated as {}, but estimated to be {}'.format(self.DeltaK1, min_deltak))
out = False
except (AttributeError, TypeError, ValueError):
pass
try:
if abs(self.DeltaK2/max_deltak - 1) > 1e-2:
self.log_validity_error(
'The DeltaK2 value is populated as {}, but estimated to be {}'.format(self.DeltaK2, max_deltak))
out = False
except (AttributeError, TypeError, ValueError):
pass
return out
def _check_bw(self) -> bool:
out = True
try:
if self.ImpRespBW > (self.DeltaK2 - self.DeltaK1) + 1e-10:
self.log_validity_error(
'ImpRespBW ({}) must be <= DeltaK2 - DeltaK1 '
'({})'.format(self.ImpRespBW, self.DeltaK2 - self.DeltaK1))
out = False
except (AttributeError, TypeError, ValueError):
pass
return out
def _check_wgt(self) -> bool:
cond = True
if self.WgtType is None:
return cond
wgt_size = self.WgtFunct.size if self.WgtFunct is not None else None
if self.WgtType.WindowName not in ['UNIFORM', 'UNKNOWN'] and (wgt_size is None or wgt_size < 2):
self.log_validity_error(
'Non-uniform weighting indicated, but WgtFunct not properly defined')
return False
if wgt_size is not None and wgt_size > 1024:
self.log_validity_warning(
'WgtFunct with {} elements is provided.\n'
'The recommended number of elements is 512, '
'and many more is likely needlessly excessive.'.format(wgt_size))
result = self.define_response_widths(populate=False)
if result is None:
return cond
resp_bw, resp_wid = result
if abs(resp_bw/self.ImpRespBW - 1) > 1e-2:
self.log_validity_error(
'ImpRespBW expected as {} from weighting,\n'
'but populated as {}'.format(resp_bw, self.ImpRespBW))
cond = False
if abs(resp_wid/self.ImpRespWid - 1) > 1e-2:
self.log_validity_error(
'ImpRespWid expected as {} from weighting,\n'
'but populated as {}'.format(resp_wid, self.ImpRespWid))
cond = False
return cond
def _basic_validity_check(self) -> bool:
condition = super(DirParamType, self)._basic_validity_check()
if (self.WgtFunct is not None) and (self.WgtFunct.size < 2):
self.log_validity_error(
'The WgtFunct array has been defined in DirParamType, '
'but there are fewer than 2 entries.')
condition = False
for attribute in ['SS', 'ImpRespBW', 'ImpRespWid']:
value = getattr(self, attribute)
if value is not None and value <= 0:
self.log_validity_error(
'attribute {} is populated as {}, '
'but should be strictly positive.'.format(attribute, value))
condition = False
condition &= self._check_bw()
condition &= self._check_wgt()
return condition
class GridType(Serializable):
"""
Collection grid details container
"""
_fields = ('ImagePlane', 'Type', 'TimeCOAPoly', 'Row', 'Col')
_required = _fields
_IMAGE_PLANE_VALUES = ('SLANT', 'GROUND', 'OTHER')
_TYPE_VALUES = ('RGAZIM', 'RGZERO', 'XRGYCR', 'XCTYAT', 'PLANE')
# descriptors
ImagePlane = StringEnumDescriptor(
'ImagePlane', _IMAGE_PLANE_VALUES, _required, strict=DEFAULT_STRICT,
docstring="Defines the type of image plane that the best describes the sample grid. Precise plane "
"defined by Row Direction and Column Direction unit vectors.") # type: str
Type = StringEnumDescriptor(
'Type', _TYPE_VALUES, _required, strict=DEFAULT_STRICT,
docstring="""
Defines the type of spatial sampling grid represented by the image sample grid.
Row coordinate first, column coordinate second:
* `RGAZIM` - Grid for a simple range, Doppler image. Also, the natural grid for images formed with the Polar
Format Algorithm.
* `RGZERO` - A grid for images formed with the Range Migration Algorithm. Used only for imaging near closest
approach (i.e. near zero Doppler).
* `XRGYCR` - Orthogonal slant plane grid oriented range and cross range relative to the ARP at a
reference time.
* `XCTYAT` - Orthogonal slant plane grid with X oriented cross track.
* `PLANE` - Arbitrary plane with orientation other than the specific `XRGYCR` or `XCTYAT`.
\n\n
""") # type: str
TimeCOAPoly = SerializableDescriptor(
'TimeCOAPoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring="*Time of Center Of Aperture* as a polynomial function of image coordinates. "
"The polynomial is a function of image row coordinate ``(variable 1)`` and column coordinate "
"``(variable 2)``.") # type: Poly2DType
Row = SerializableDescriptor(
'Row', DirParamType, _required, strict=DEFAULT_STRICT,
docstring="Row direction parameters.") # type: DirParamType
Col = SerializableDescriptor(
'Col', DirParamType, _required, strict=DEFAULT_STRICT,
docstring="Column direction parameters.") # type: DirParamType
def __init__(
self,
ImagePlane: str = None,
Type: str = None,
TimeCOAPoly: Union[Poly2DType, numpy.ndarray, list, tuple] = None,
Row: DirParamType = None,
Col: DirParamType = None,
**kwargs):
"""
Parameters
----------
ImagePlane : str
Type : str
TimeCOAPoly : Poly2DType|numpy.ndarray|list|tuple
Row : DirParamType
Col : DirParamType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.ImagePlane = ImagePlane
self.Type = Type
self.TimeCOAPoly = TimeCOAPoly
self.Row, self.Col = Row, Col
super(GridType, self).__init__(**kwargs)
def derive_direction_params(
self,
ImageData,
populate: bool = False):
"""
Populate the ``Row/Col`` direction parameters from ImageData, if necessary.
Expected to be called from SICD parent.
Parameters
----------
ImageData : sarpy.io.complex.sicd_elements.ImageData.ImageDataType
populate : bool
Repopulates any present values?
Returns
-------
None
"""
valid_vertices = None
if ImageData is not None:
valid_vertices = ImageData.get_valid_vertex_data()
if valid_vertices is None:
valid_vertices = ImageData.get_full_vertex_data()
x_coords, y_coords = None, None
if valid_vertices is not None:
try:
x_coords = self.Row.SS*(valid_vertices[:, 0] - (ImageData.SCPPixel.Row - ImageData.FirstRow))
y_coords = self.Col.SS*(valid_vertices[:, 1] - (ImageData.SCPPixel.Col - ImageData.FirstCol))
except (AttributeError, ValueError):
pass
for attribute in ['Row', 'Col']:
value = getattr(self, attribute, None)
if value is not None:
value.define_weight_function(populate=populate)
value.define_response_widths(populate=populate)
value.estimate_deltak(x_coords, y_coords, populate=populate)
def _derive_time_coa_poly(
self,
CollectionInfo,
SCPCOA):
"""
Expected to be called from SICD parent.
Parameters
----------
CollectionInfo : sarpy.io.complex.sicd_elements.CollectionInfo.CollectionInfoType
SCPCOA : sarpy.io.complex.sicd_elements.SCPCOA.SCPCOAType
Returns
-------
None
"""
if self.TimeCOAPoly is not None:
return # nothing needs to be done
try:
if CollectionInfo.RadarMode.ModeType == 'SPOTLIGHT':
self.TimeCOAPoly = Poly2DType(Coefs=[[SCPCOA.SCPTime, ], ])
except (AttributeError, ValueError):
return
def _derive_rg_az_comp(
self,
GeoData,
SCPCOA,
RadarCollection,
ImageFormation):
"""
Expected to be called by SICD parent.
Parameters
----------
GeoData : sarpy.io.complex.sicd_elements.GeoData.GeoDataType
SCPCOA : sarpy.io.complex.sicd_elements.SCPCOA.SCPCOAType
RadarCollection : sarpy.io.complex.sicd_elements.RadarCollection.RadarCollectionType
ImageFormation : sarpy.io.complex.sicd_elements.ImageFormation.ImageFormationType
Returns
-------
None
"""
if self.Row is None:
self.Row = DirParamType()
if self.Col is None:
self.Col = DirParamType()
if self.ImagePlane is None:
self.ImagePlane = 'SLANT'
elif self.ImagePlane != 'SLANT':
logger.warning(
'The Grid.ImagePlane is set to {},\n\t'
'but Image Formation Algorithm is RgAzComp, which requires "SLANT".\n\t'
'Resetting.'.format(self.ImagePlane))
self.ImagePlane = 'SLANT'
if self.Type is None:
self.Type = 'RGAZIM'
elif self.Type != 'RGAZIM':
logger.warning(
'The Grid.Type is set to {},\n\t'
'but Image Formation Algorithm is RgAzComp, which requires "RGAZIM".\n\t'
'Resetting.'.format(self.Type))
self.Type = 'RGAZIM'
if GeoData is not None and GeoData.SCP is not None and GeoData.SCP.ECF is not None and \
SCPCOA.ARPPos is not None and SCPCOA.ARPVel is not None:
scp = GeoData.SCP.ECF.get_array()
arp = SCPCOA.ARPPos.get_array()
los = (scp - arp)
ulos = los/norm(los)
if self.Row.UVectECF is None:
self.Row.UVectECF = XYZType.from_array(ulos)
look = SCPCOA.look
arp_vel = SCPCOA.ARPVel.get_array()
uspz = look*numpy.cross(arp_vel, ulos)
uspz /= norm(uspz)
uaz = numpy.cross(uspz, ulos)
if self.Col.UVectECF is None:
self.Col.UVectECF = XYZType.from_array(uaz)
center_frequency = _get_center_frequency(RadarCollection, ImageFormation)
if center_frequency is not None:
if self.Row.KCtr is None:
kctr = 2*center_frequency/speed_of_light
if self.Row.DeltaKCOAPoly is not None: # assume it's 0 otherwise?
kctr -= self.Row.DeltaKCOAPoly.Coefs[0, 0]
self.Row.KCtr = kctr
elif self.Row.DeltaKCOAPoly is None:
self.Row.DeltaKCOAPoly = Poly2DType(Coefs=[[2*center_frequency/speed_of_light - self.Row.KCtr, ], ])
if self.Col.KCtr is None:
kctr = 0.0
if self.Col.DeltaKCOAPoly is not None:
kctr -= self.Col.DeltaKCOAPoly.Coefs[0, 0]
self.Col.KCtr = kctr
elif self.Col.DeltaKCOAPoly is None:
self.Col.DeltaKCOAPoly = Poly2DType(Coefs=[[-self.Col.KCtr, ], ])
def _derive_pfa(
self,
GeoData,
RadarCollection,
ImageFormation,
Position,
PFA):
"""
Expected to be called by SICD parent.
Parameters
----------
GeoData : sarpy.io.complex.sicd_elements.GeoData.GeoDataType
RadarCollection : sarpy.io.complex.sicd_elements.RadarCollection.RadarCollectionType
ImageFormation : sarpy.io.complex.sicd_elements.ImageFormation.ImageFormationType
Position : sarpy.io.complex.sicd_elements.Position.PositionType
PFA : sarpy.io.complex.sicd_elements.PFA.PFAType
Returns
-------
None
"""
if self.Type is None:
self.Type = 'RGAZIM' # the natural result for PFA
if PFA is None:
return # nothing to be done
if GeoData is None or GeoData.SCP is None:
return # nothing to be done
scp = GeoData.SCP.ECF.get_array()
if Position is not None and Position.ARPPoly is not None \
and PFA.PolarAngRefTime is not None:
polar_ref_pos = Position.ARPPoly(PFA.PolarAngRefTime)
else:
polar_ref_pos = scp
if PFA.IPN is not None and PFA.FPN is not None and \
self.Row.UVectECF is None and self.Col.UVectECF is None:
ipn = PFA.IPN.get_array()
fpn = PFA.FPN.get_array()
dist = numpy.dot((scp - polar_ref_pos), ipn) / numpy.dot(fpn, ipn)
ref_pos_ipn = polar_ref_pos + (dist * fpn)
urg = scp - ref_pos_ipn
urg /= norm(urg)
uaz = numpy.cross(ipn, urg) # already unit
self.Row.UVectECF = XYZType.from_array(urg)
self.Col.UVectECF = XYZType.from_array(uaz)
if self.Col is not None and self.Col.KCtr is None:
self.Col.KCtr = 0 # almost always 0 for PFA
if self.Row is not None and self.Row.KCtr is None:
center_frequency = _get_center_frequency(RadarCollection, ImageFormation)
if PFA.Krg1 is not None and PFA.Krg2 is not None:
self.Row.KCtr = 0.5*(PFA.Krg1 + PFA.Krg2)
elif center_frequency is not None and PFA.SpatialFreqSFPoly is not None:
# APPROXIMATION: may not be quite right, due to rectangular inscription loss in PFA.
self.Row.KCtr = 2*center_frequency/speed_of_light + PFA.SpatialFreqSFPoly.Coefs[0]
def _derive_rma(
self,
RMA,
GeoData,
RadarCollection,
ImageFormation,
Position):
"""
Parameters
----------
RMA : sarpy.io.complex.sicd_elements.RMA.RMAType
GeoData : sarpy.io.complex.sicd_elements.GeoData.GeoDataType
RadarCollection : sarpy.io.complex.sicd_elements.RadarCollection.RadarCollectionType
ImageFormation : sarpy.io.complex.sicd_elements.ImageFormation.ImageFormationType
Position : sarpy.io.complex.sicd_elements.Position.PositionType
Returns
-------
None
"""
if RMA is None:
return # nothing can be derived
im_type = RMA.ImageType
if im_type is None:
return
if im_type == 'INCA':
self._derive_rma_inca(RMA, GeoData, Position)
else:
if im_type == 'RMAT':
self._derive_rma_rmat(RMA, GeoData, RadarCollection, ImageFormation)
elif im_type == 'RMCR':
self._derive_rma_rmcr(RMA, GeoData, RadarCollection, ImageFormation)
@staticmethod
def _derive_unit_vector_params(GeoData, RMAParam):
"""
Helper method.
Parameters
----------
GeoData : sarpy.io.complex.sicd_elements.GeoData.GeoDataType
RMAParam : sarpy.io.complex.sicd_elements.RMA.RMATType|sarpy.io.complex.sicd_elements.RMA.RMCRType
Returns
-------
Tuple[numpy.ndarray,...]
"""
if GeoData is None or GeoData.SCP is None:
return None
scp = GeoData.SCP.ECF.get_array()
pos_ref = RMAParam.PosRef.get_array()
upos_ref = pos_ref / norm(pos_ref)
vel_ref = RMAParam.VelRef.get_array()
uvel_ref = vel_ref / norm(vel_ref)
los = (scp - pos_ref) # it absolutely could be that scp = pos_ref
los_norm = norm(los)
if los_norm < 1:
logger.error(
msg="Row/Col UVectECF cannot be derived from RMA,\n\t"
"because the Reference Position is too close (less than 1 meter) to the SCP.")
ulos = los/los_norm
left = numpy.cross(upos_ref, uvel_ref)
look = numpy.sign(numpy.dot(left, ulos))
return scp, upos_ref, uvel_ref, ulos, left, look
def _derive_rma_rmat(
self,
RMA,
GeoData,
RadarCollection,
ImageFormation):
"""
Parameters
----------
RMA : sarpy.io.complex.sicd_elements.RMA.RMAType
GeoData : sarpy.io.complex.sicd_elements.GeoData.GeoDataType
RadarCollection : sarpy.io.complex.sicd_elements.RadarCollection.RadarCollectionType
ImageFormation : sarpy.io.complex.sicd_elements.ImageFormation.ImageFormationType
Returns
-------
None
"""
if RMA.RMAT is None:
return
if self.ImagePlane is None:
self.ImagePlane = 'SLANT'
if self.Type is None:
self.Type = 'XCTYAT'
if self.Row.UVectECF is None and self.Col.UVectECF is None:
params = self._derive_unit_vector_params(GeoData, RMA.RMAT)
if params is not None:
scp, upos_ref, uvel_ref, ulos, left, look = params
uyat = -look*uvel_ref
uspz = numpy.cross(ulos, uyat)
uspz /= norm(uspz)
uxct = numpy.cross(uyat, uspz)
self.Row.UVectECF = XYZType.from_array(uxct)
self.Col.UVectECF = XYZType.from_array(uyat)
center_frequency = _get_center_frequency(RadarCollection, ImageFormation)
if center_frequency is not None and RMA.RMAT.DopConeAngRef is not None:
if self.Row.KCtr is None:
self.Row.KCtr = (2*center_frequency/speed_of_light)*numpy.sin(numpy.deg2rad(RMA.RMAT.DopConeAngRef))
if self.Col.KCtr is None:
self.Col.KCtr = (2*center_frequency/speed_of_light)*numpy.cos(numpy.deg2rad(RMA.RMAT.DopConeAngRef))
def _derive_rma_rmcr(
self,
RMA,
GeoData,
RadarCollection,
ImageFormation):
"""
Parameters
----------
RMA : sarpy.io.complex.sicd_elements.RMA.RMAType
GeoData : sarpy.io.complex.sicd_elements.GeoData.GeoDataType
RadarCollection : sarpy.io.complex.sicd_elements.RadarCollection.RadarCollectionType
ImageFormation : sarpy.io.complex.sicd_elements.ImageFormation.ImageFormationType
Returns
-------
None
"""
if RMA.RMCR is None:
return
if self.ImagePlane is None:
self.ImagePlane = 'SLANT'
if self.Type is None:
self.Type = 'XRGYCR'
if self.Row.UVectECF is None and self.Col.UVectECF is None:
params = self._derive_unit_vector_params(GeoData, RMA.RMCR)
if params is not None:
scp, upos_ref, uvel_ref, ulos, left, look = params
uxrg = ulos
uspz = look*numpy.cross(uvel_ref, uxrg)
uspz /= norm(uspz)
uycr = numpy.cross(uspz, uxrg)
self.Row.UVectECF = XYZType.from_array(uxrg)
self.Col.UVectECF = XYZType.from_array(uycr)
center_frequency = _get_center_frequency(RadarCollection, ImageFormation)
if center_frequency is not None:
if self.Row.KCtr is None:
self.Row.KCtr = 2*center_frequency/speed_of_light
if self.Col.KCtr is None:
self.Col.KCtr = 2*center_frequency/speed_of_light
def _derive_rma_inca(
self,
RMA,
GeoData,
Position):
"""
Parameters
----------
RMA : sarpy.io.complex.sicd_elements.RMA.RMAType
GeoData : sarpy.io.complex.sicd_elements.GeoData.GeoDataType
Position : sarpy.io.complex.sicd_elements.Position.PositionType
Returns
-------
None
"""
if RMA.INCA is None:
return
if self.Type is None:
self.Type = 'RGZERO'
if RMA.INCA.TimeCAPoly is not None and Position is not None and Position.ARPPoly is not None and \
self.Row.UVectECF is None and self.Col.UVectECF is None and \
GeoData is not None and GeoData.SCP is not None:
scp = GeoData.SCP.ECF.get_array()
t_zero = RMA.INCA.TimeCAPoly.Coefs[0]
ca_pos = Position.ARPPoly(t_zero)
ca_vel = Position.ARPPoly.derivative_eval(t_zero, der_order=1)
uca_pos = ca_pos/norm(ca_pos)
uca_vel = ca_vel/norm(ca_vel)
urg = (scp - ca_pos)
urg_norm = norm(urg)
if urg_norm > 0:
urg /= urg_norm
left = numpy.cross(uca_pos, uca_vel)
look = numpy.sign(numpy.dot(left, urg))
uspz = -look*numpy.cross(urg, uca_vel)
uspz /= norm(uspz)
uaz = numpy.cross(uspz, urg)
self.Row.UVectECF = XYZType.from_array(urg)
self.Col.UVectECF = XYZType.from_array(uaz)
if self.Row is not None and self.Row.KCtr is None and RMA.INCA.FreqZero is not None:
self.Row.KCtr = 2*RMA.INCA.FreqZero/speed_of_light
if self.Col is not None and self.Col.KCtr is None:
self.Col.KCtr = 0
def _basic_validity_check(self) -> bool:
condition = super(GridType, self)._basic_validity_check()
if self.Row is not None and self.Row.Sgn is not None and self.Col is not None \
and self.Col.Sgn is not None and self.Row.Sgn != self.Col.Sgn:
self.log_validity_warning(
'Row.Sgn ({}) and Col.Sgn ({}) should almost certainly be the '
'same value'.format(self.Row.Sgn, self.Col.Sgn))
return condition
def check_deltak(
self,
x_coords: Optional[numpy.ndarray],
y_coords: Optional[numpy.ndarray]) -> bool:
"""
Checks the validity of DeltaK values.
Parameters
----------
x_coords : None|numpy.ndarray
y_coords : None|numpy.ndarray
Returns
-------
bool
"""
cond = True
if self.Row is not None:
cond &= self.Row.check_deltak(x_coords, y_coords)
if self.Col is not None:
cond &= self.Col.check_deltak(x_coords, y_coords)
return cond
def get_resolution_abbreviation(self) -> str:
"""
Gets the resolution abbreviation for the suggested name.
Returns
-------
str
"""
if self.Row is None or self.Row.ImpRespWid is None or \
self.Col is None or self.Col.ImpRespWid is None:
return '0000'
else:
value = int(100*(abs(self.Row.ImpRespWid)*abs(self.Col.ImpRespWid))**0.5)
if value > 9999:
return '9999'
else:
return '{0:04d}'.format(value)
def get_slant_plane_area(self) -> float:
"""
Get the weighted slant plane area.
Returns
-------
float
"""
range_weight_f = azimuth_weight_f = 1.0
if self.Row.WgtFunct is not None:
var = numpy.var(self.Row.WgtFunct)
mean = numpy.mean(self.Row.WgtFunct)
range_weight_f += var/(mean*mean)
if self.Col.WgtFunct is not None:
var = numpy.var(self.Col.WgtFunct)
mean = numpy.mean(self.Col.WgtFunct)
azimuth_weight_f += var/(mean*mean)
return (range_weight_f * azimuth_weight_f)/(self.Row.ImpRespBW*self.Col.ImpRespBW)
| 41,928 | 36.807935 | 117 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/__init__.py | """
**This sub-package is a work in progress to encapsulate pythonic object-oriented SICD structure 1.1 (2014-09-30).**
This purpose of doing it this way is to encourage effective documentation and streamlined use of the SICD information.
This provides more robustness than using structures with no built-in validation, and more flexibility than using the
rigidity of C++ based standards validation.
"""
__classification__ = "UNCLASSIFIED"
| 442 | 43.3 | 118 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/Radiometric.py | """
The RadiometricType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, Optional
import numpy
from sarpy.io.xml.base import Serializable, find_first_child
from sarpy.io.xml.descriptors import StringEnumDescriptor, SerializableDescriptor
from .base import DEFAULT_STRICT
from .blocks import Poly2DType
# noinspection PyPep8Naming
class NoiseLevelType_(Serializable):
"""
Noise level structure.
"""
_fields = ('NoiseLevelType', 'NoisePoly')
_required = _fields
# class variables
_NOISE_LEVEL_TYPE_VALUES = ('ABSOLUTE', 'RELATIVE')
# descriptors
NoiseLevelType = StringEnumDescriptor(
'NoiseLevelType', _NOISE_LEVEL_TYPE_VALUES, _required, strict=DEFAULT_STRICT,
docstring='Indicates that the noise power polynomial yields either absolute power level or power '
'level relative to the *SCP* pixel location.') # type: str
NoisePoly = SerializableDescriptor(
'NoisePoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='Polynomial coefficients that yield thermal noise power *(in dB)* in a pixel as a function of '
'image row coordinate *(variable 1)* and column coordinate *(variable 2)*.') # type: Poly2DType
def __init__(
self,
NoiseLevelType: str = None,
NoisePoly: Union[Poly2DType, numpy.ndarray, list, tuple] = None,
**kwargs):
"""
Parameters
----------
NoiseLevelType : str
NoisePoly : Poly2DType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.NoiseLevelType = NoiseLevelType
self.NoisePoly = NoisePoly
super(NoiseLevelType_, self).__init__(**kwargs)
self._derive_noise_level()
def _derive_noise_level(self):
if self.NoiseLevelType is not None:
return
if self.NoisePoly is None:
return # nothing to be done
scp_val = self.NoisePoly.Coefs[0, 0] # the value at SCP
if scp_val == 1:
# the relative noise levels should be 1 at SCP
self.NoiseLevelType = 'RELATIVE'
else:
# it seems safe that it's not absolute, in this case?
self.NoiseLevelType = 'ABSOLUTE'
class RadiometricType(Serializable):
"""The radiometric calibration parameters."""
_fields = ('NoiseLevel', 'RCSSFPoly', 'SigmaZeroSFPoly', 'BetaZeroSFPoly', 'GammaZeroSFPoly')
_required = ()
# descriptors
NoiseLevel = SerializableDescriptor(
'NoiseLevel', NoiseLevelType_, _required, strict=DEFAULT_STRICT,
docstring='Noise level structure.') # type: NoiseLevelType_
RCSSFPoly = SerializableDescriptor(
'RCSSFPoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='Polynomial that yields a scale factor to convert pixel power to RCS *(m^2)* '
'as a function of image row coordinate *(variable 1)* and column coordinate *(variable 2)*. '
'Scale factor computed for a target at `HAE = SCP_HAE`.') # type: Poly2DType
SigmaZeroSFPoly = SerializableDescriptor(
'SigmaZeroSFPoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='Polynomial that yields a scale factor to convert pixel power to clutter parameter '
'Sigma-Zero as a function of image row coordinate *(variable 1)* and column coordinate '
'*(variable 2)*. Scale factor computed for a clutter cell at `HAE = SCP_HAE`.') # type: Poly2DType
BetaZeroSFPoly = SerializableDescriptor(
'BetaZeroSFPoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='Polynomial that yields a scale factor to convert pixel power to radar brightness '
'or Beta-Zero as a function of image row coordinate *(variable 1)* and column coordinate '
'*(variable 2)*. Scale factor computed for a clutter cell at `HAE = SCP_HAE`.') # type: Poly2DType
GammaZeroSFPoly = SerializableDescriptor(
'GammaZeroSFPoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='Polynomial that yields a scale factor to convert pixel power to clutter parameter '
'Gamma-Zero as a function of image row coordinate *(variable 1)* and column coordinate '
'*(variable 2)*. Scale factor computed for a clutter cell at `HAE = SCP_HAE`.') # type: Poly2DType
def __init__(
self,
NoiseLevel: Optional[NoiseLevelType_] = None,
RCSSFPoly: Union[None, Poly2DType, numpy.ndarray, list, tuple] = None,
SigmaZeroSFPoly: Union[None, Poly2DType, numpy.ndarray, list, tuple] = None,
BetaZeroSFPoly: Union[None, Poly2DType, numpy.ndarray, list, tuple] = None,
GammaZeroSFPoly: Union[None, Poly2DType, numpy.ndarray, list, tuple] = None,
**kwargs):
"""
Parameters
----------
NoiseLevel : NoiseLevelType_
RCSSFPoly : Poly2DType|numpy.ndarray|list|tuple
SigmaZeroSFPoly : Poly2DType|numpy.ndarray|list|tuple
BetaZeroSFPoly : Poly2DType|numpy.ndarray|list|tuple
GammaZeroSFPoly : Poly2DType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.NoiseLevel = NoiseLevel
self.RCSSFPoly = RCSSFPoly
self.SigmaZeroSFPoly = SigmaZeroSFPoly
self.BetaZeroSFPoly = BetaZeroSFPoly
self.GammaZeroSFPoly = GammaZeroSFPoly
super(RadiometricType, self).__init__(**kwargs)
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
if kwargs is not None:
kwargs = {}
# NoiseLevelType and NoisePoly used to be at this level prior to SICD 1.0.
nkey = cls._child_xml_ns_key.get('NoiseLevelType', ns_key)
nlevel = find_first_child(node, 'NoiseLevelType', xml_ns, nkey)
if nlevel is not None:
kwargs['NoiseLevel'] = NoiseLevelType_.from_node(nlevel, xml_ns, ns_key=ns_key, kwargs=kwargs)
return super(RadiometricType, cls).from_node(node, xml_ns, ns_key=ns_key, kwargs=kwargs)
def _derive_parameters(self, Grid, SCPCOA):
"""
Expected to be called by SICD parent.
Parameters
----------
Grid : sarpy.io.complex.sicd_elements.Grid.GridType
SCPCOA : sarpy.io.complex.sicd_elements.SCPCOA.SCPCOAType
Returns
-------
None
"""
if Grid is None or Grid.Row is None or Grid.Col is None:
return
area_sp = Grid.get_slant_plane_area()
# We can define any SF polynomial from any other SF polynomial by just
# scaling the coefficient array. If any are defined, use BetaZeroSFPolynomial
# as the root, and derive them all
if self.BetaZeroSFPoly is None:
if self.RCSSFPoly is not None:
self.BetaZeroSFPoly = Poly2DType(Coefs=self.RCSSFPoly.Coefs/area_sp)
elif self.SigmaZeroSFPoly is not None:
self.BetaZeroSFPoly = Poly2DType(
Coefs=self.SigmaZeroSFPoly.Coefs/numpy.cos(numpy.deg2rad(SCPCOA.SlopeAng)))
elif self.GammaZeroSFPoly is not None:
self.BetaZeroSFPoly = Poly2DType(
Coefs=self.GammaZeroSFPoly.Coefs*(numpy.sin(numpy.deg2rad(SCPCOA.GrazeAng)) /
numpy.cos(numpy.deg2rad(SCPCOA.SlopeAng))))
if self.BetaZeroSFPoly is not None:
# In other words, none of the SF polynomials are populated.
if self.RCSSFPoly is None:
self.RCSSFPoly = Poly2DType(Coefs=self.BetaZeroSFPoly.Coefs*area_sp)
if self.SigmaZeroSFPoly is None:
self.SigmaZeroSFPoly = Poly2DType(
Coefs=self.BetaZeroSFPoly.Coefs*numpy.cos(numpy.deg2rad(SCPCOA.SlopeAng)))
if self.GammaZeroSFPoly is None:
self.GammaZeroSFPoly = Poly2DType(
Coefs=self.BetaZeroSFPoly.Coefs*(numpy.cos(numpy.deg2rad(SCPCOA.SlopeAng)) /
numpy.sin(numpy.deg2rad(SCPCOA.GrazeAng))))
| 8,534 | 43.453125 | 117 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/SICD.py | """
The SICDType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
from copy import deepcopy
import re
from collections import OrderedDict
from typing import Optional, Dict, Union, Tuple
import numpy
from sarpy.geometry import point_projection
from sarpy.io.complex.naming.utils import get_sicd_name
from sarpy.io.complex.sicd_schema import get_urn_details, get_default_tuple, \
get_default_version_string, get_specification_identifier, WRITABLE_VERSIONS
from sarpy.io.xml.base import Serializable, parse_xml_from_file, parse_xml_from_string
from sarpy.io.xml.descriptors import SerializableDescriptor
from .base import DEFAULT_STRICT
from .CollectionInfo import CollectionInfoType
from .ImageCreation import ImageCreationType
from .ImageData import ImageDataType
from .GeoData import GeoDataType
from .GeoData import SCPType
from .Grid import GridType
from .Timeline import TimelineType
from .Position import PositionType
from .RadarCollection import RadarCollectionType
from .ImageFormation import ImageFormationType
from .SCPCOA import SCPCOAType
from .Radiometric import RadiometricType
from .Antenna import AntennaType
from .ErrorStatistics import ErrorStatisticsType
from .MatchInfo import MatchInfoType
from .RgAzComp import RgAzCompType
from .PFA import PFAType
from .RMA import RMAType
from .validation_checks import detailed_validation_checks
logger = logging.getLogger(__name__)
#########
# Module variables
_SICD_SPECIFICATION_IDENTIFIER = get_specification_identifier()
_SICD_DEFAULT_TUPLE = get_default_tuple()
_SICD_VERSION_DEFAULT = get_default_version_string()
_SICD_SPEC_DETAILS = {
key: {
'namespace': 'urn:SICD:{}'.format(key),
'details': get_urn_details(key)}
for key in WRITABLE_VERSIONS}
class SICDType(Serializable):
"""
Sensor Independent Complex Data object, containing all the relevant data to formulate products.
"""
_fields = (
'CollectionInfo', 'ImageCreation', 'ImageData', 'GeoData', 'Grid', 'Timeline', 'Position',
'RadarCollection', 'ImageFormation', 'SCPCOA', 'Radiometric', 'Antenna', 'ErrorStatistics',
'MatchInfo', 'RgAzComp', 'PFA', 'RMA')
_required = (
'CollectionInfo', 'ImageData', 'GeoData', 'Grid', 'Timeline', 'Position',
'RadarCollection', 'ImageFormation', 'SCPCOA')
_choice = ({'required': False, 'collection': ('RgAzComp', 'PFA', 'RMA')}, )
# descriptors
CollectionInfo = SerializableDescriptor(
'CollectionInfo', CollectionInfoType, _required, strict=False,
docstring='General information about the '
'collection.') # type: CollectionInfoType
ImageCreation = SerializableDescriptor(
'ImageCreation', ImageCreationType, _required, strict=False,
docstring='General information about the image '
'creation.') # type: Optional[ImageCreationType]
ImageData = SerializableDescriptor(
'ImageData', ImageDataType, _required, strict=False, # it is senseless to not have this element
docstring='The image pixel data.') # type: ImageDataType
GeoData = SerializableDescriptor(
'GeoData', GeoDataType, _required, strict=False,
docstring='The geographic coordinates of the image coverage area.') # type: GeoDataType
Grid = SerializableDescriptor(
'Grid', GridType, _required, strict=False,
docstring='The image sample grid.') # type: GridType
Timeline = SerializableDescriptor(
'Timeline', TimelineType, _required, strict=False,
docstring='The imaging collection time line.') # type: TimelineType
Position = SerializableDescriptor(
'Position', PositionType, _required, strict=False,
docstring='The platform and ground reference point coordinates as a function of time.') # type: PositionType
RadarCollection = SerializableDescriptor(
'RadarCollection', RadarCollectionType, _required, strict=False,
docstring='The radar collection information.') # type: RadarCollectionType
ImageFormation = SerializableDescriptor(
'ImageFormation', ImageFormationType, _required, strict=False,
docstring='The image formation process.') # type: ImageFormationType
SCPCOA = SerializableDescriptor(
'SCPCOA', SCPCOAType, _required, strict=False,
docstring='*Center of Aperture (COA)* for the *Scene Center Point (SCP)*.') # type: SCPCOAType
Radiometric = SerializableDescriptor(
'Radiometric', RadiometricType, _required, strict=False,
docstring='The radiometric calibration '
'parameters.') # type: Optional[RadiometricType]
Antenna = SerializableDescriptor(
'Antenna', AntennaType, _required, strict=False,
docstring='Parameters that describe the antenna illumination patterns '
'during the collection.') # type: Optional[AntennaType]
ErrorStatistics = SerializableDescriptor(
'ErrorStatistics', ErrorStatisticsType, _required, strict=False,
docstring='Parameters used to compute error statistics within the '
'*SICD* sensor model.') # type: Optional[ErrorStatisticsType]
MatchInfo = SerializableDescriptor(
'MatchInfo', MatchInfoType, _required, strict=False,
docstring='Information about other collections that are matched to the '
'current collection. The current collection is the collection '
'from which this *SICD* product was '
'generated.') # type: Optional[MatchInfoType]
RgAzComp = SerializableDescriptor(
'RgAzComp', RgAzCompType, _required, strict=False,
docstring='Parameters included for a *Range, Doppler* '
'image.') # type: Optional[RgAzCompType]
PFA = SerializableDescriptor(
'PFA', PFAType, _required, strict=False,
docstring='Parameters included when the image is formed using the '
'*Polar Formation Algorithm (PFA)*.') # type: Optional[PFAType]
RMA = SerializableDescriptor(
'RMA', RMAType, _required, strict=False,
docstring='Parameters included when the image is formed using the '
'*Range Migration Algorithm (RMA)*.') # type: Optional[RMAType]
def __init__(
self,
CollectionInfo: CollectionInfoType = None,
ImageCreation: Optional[ImageCreationType] = None,
ImageData: ImageDataType = None,
GeoData: GeoDataType = None,
Grid: GridType = None,
Timeline: TimelineType = None,
Position: PositionType = None,
RadarCollection: RadarCollectionType = None,
ImageFormation: ImageFormationType = None,
SCPCOA: SCPCOAType = None,
Radiometric: Optional[RadiometricType] = None,
Antenna: Optional[AntennaType] = None,
ErrorStatistics: Optional[ErrorStatisticsType] = None,
MatchInfo: Optional[MatchInfoType] = None,
RgAzComp: Optional[RgAzCompType] = None,
PFA: Optional[PFAType] = None,
RMA: Optional[RMAType] = None,
**kwargs):
"""
Parameters
----------
CollectionInfo : CollectionInfoType
ImageCreation : ImageCreationType
ImageData : ImageDataType
GeoData : GeoDataType
Grid : GridType
Timeline : TimelineType
Position : PositionType
RadarCollection : RadarCollectionType
ImageFormation : ImageFormationType
SCPCOA : SCPCOAType
Radiometric : RadiometricType
Antenna : AntennaType
ErrorStatistics : ErrorStatisticsType
MatchInfo : MatchInfoType
RgAzComp : RgAzCompType
PFA : PFAType
RMA : RMAType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
nitf = kwargs.get('_NITF', {})
if not isinstance(nitf, dict):
raise TypeError('Provided NITF options are required to be in dictionary form.')
self._NITF = nitf
self._coa_projection = None
self.CollectionInfo = CollectionInfo
self.ImageCreation = ImageCreation
self.ImageData = ImageData
self.GeoData = GeoData
self.Grid = Grid
self.Timeline = Timeline
self.Position = Position
self.RadarCollection = RadarCollection
self.ImageFormation = ImageFormation
self.SCPCOA = SCPCOA
self.Radiometric = Radiometric
self.Antenna = Antenna
self.ErrorStatistics = ErrorStatistics
self.MatchInfo = MatchInfo
self.RgAzComp = RgAzComp
self.PFA = PFA
self.RMA = RMA
super(SICDType, self).__init__(**kwargs)
@property
def coa_projection(self):
"""
The COA Projection object, if previously defined through using :func:`define_coa_projection`.
Returns
-------
None|sarpy.geometry.point_projection.COAProjection
"""
return self._coa_projection
@property
def NITF(self) -> Optional[Dict]:
"""
Optional dictionary of NITF header information, pertains only to subsequent
SICD file writing.
Returns
-------
Dict
"""
return self._NITF
@NITF.setter
def NITF(self, value: Optional[Dict]):
if value is None:
self._NITF = {}
return
if isinstance(value, dict):
self._NITF = value
if not isinstance(value, dict):
raise TypeError('data must be dictionary instance. Received {}'.format(type(value)))
@property
def ImageFormType(self) -> str:
"""
str: *READ ONLY* Identifies the specific image formation type supplied. This is determined by
returning the (first) attribute among `RgAzComp`, `PFA`, `RMA` which is populated. `OTHER` will be returned if
none of them are populated.
"""
for attribute in self._choice[0]['collection']:
if getattr(self, attribute) is not None:
return attribute
return 'OTHER'
def update_scp(
self,
point: Union[numpy.ndarray, list, tuple],
coord_system: str = 'ECF'):
"""
Modify the SCP point, and modify the associated SCPCOA fields.
Parameters
----------
point : numpy.ndarray|tuple|list
coord_system : str
Either 'ECF' or 'LLH', and 'ECF' will take precedence.
Returns
-------
None
"""
if isinstance(point, (list, tuple)):
point = numpy.array(point, dtype='float64')
if not isinstance(point, numpy.ndarray):
raise TypeError('point must be an numpy.ndarray')
if point.shape != (3, ):
raise ValueError('point must be a one-dimensional, 3 element array')
if coord_system == 'LLH':
self.GeoData.SCP.LLH = point
else:
self.GeoData.SCP.ECF = point
if self.SCPCOA is not None:
self.SCPCOA.rederive(self.Grid, self.Position, self.GeoData)
def _basic_validity_check(self) -> bool:
condition = super(SICDType, self)._basic_validity_check()
condition &= detailed_validation_checks(self)
return condition
def is_valid(self, recursive: bool = False, stack: bool = False) -> bool:
all_required = self._basic_validity_check()
if not recursive:
return all_required
valid_children = self._recursive_validity_check(stack=stack)
return all_required & valid_children
def define_geo_image_corners(self, override: bool = False) -> None:
"""
Defines the GeoData image corner points (if possible), if they are not already defined.
Returns
-------
None
"""
if self.GeoData is None:
self.GeoData = GeoDataType(SCP=SCPType(self.RadarCollection.Area.Plane.RefPt.ECF))
if self.GeoData.ImageCorners is not None and not override:
return # nothing to be done
try:
vertex_data = self.ImageData.get_full_vertex_data(dtype=numpy.float64)
corner_coords = self.project_image_to_ground_geo(vertex_data)
except (ValueError, AttributeError):
return
self.GeoData.ImageCorners = corner_coords
def define_geo_valid_data(self) -> None:
"""
Defines the GeoData valid data corner points (if possible), if they are not already defined.
Returns
-------
None
"""
if self.GeoData is None or self.GeoData.ValidData is not None:
return # nothing to be done
try:
valid_vertices = self.ImageData.get_valid_vertex_data(dtype=numpy.float64)
if valid_vertices is not None:
self.GeoData.ValidData = self.project_image_to_ground_geo(valid_vertices)
except AttributeError:
pass
def derive(self) -> None:
"""
Populates any potential derived data in the SICD structure. This should get called after reading an XML,
or as a user desires.
Returns
-------
None
"""
# Note that there is dependency in calling order between steps - don't naively rearrange the following.
if self.SCPCOA is None:
self.SCPCOA = SCPCOAType()
# noinspection PyProtectedMember
self.SCPCOA._derive_scp_time(self.Grid)
if self.Grid is not None:
# noinspection PyProtectedMember
self.Grid._derive_time_coa_poly(self.CollectionInfo, self.SCPCOA)
# noinspection PyProtectedMember
self.SCPCOA._derive_position(self.Position)
if self.Position is None and self.SCPCOA.ARPPos is not None and \
self.SCPCOA.ARPVel is not None and self.SCPCOA.SCPTime is not None:
self.Position = PositionType() # important parameter derived in the next step
if self.Position is not None:
# noinspection PyProtectedMember
self.Position._derive_arp_poly(self.SCPCOA)
if self.GeoData is not None:
self.GeoData.derive() # ensures both coordinate systems are defined for SCP
if self.Grid is not None:
# noinspection PyProtectedMember
self.Grid.derive_direction_params(self.ImageData)
if self.RadarCollection is not None:
self.RadarCollection.derive()
if self.ImageFormation is not None:
# call after RadarCollection.derive(), and only if the entire transmitted bandwidth was used to process.
# noinspection PyProtectedMember
self.ImageFormation._derive_tx_frequency_proc(self.RadarCollection)
# noinspection PyProtectedMember
self.SCPCOA._derive_geometry_parameters(self.GeoData)
# verify ImageFormation things make sense
im_form_algo = None
if self.ImageFormation is not None and self.ImageFormation.ImageFormAlgo is not None:
im_form_algo = self.ImageFormation.ImageFormAlgo.upper()
if im_form_algo == 'RGAZCOMP':
# Check Grid settings
if self.Grid is None:
self.Grid = GridType()
# noinspection PyProtectedMember
self.Grid._derive_rg_az_comp(self.GeoData, self.SCPCOA, self.RadarCollection, self.ImageFormation)
# Check RgAzComp settings
if self.RgAzComp is None:
self.RgAzComp = RgAzCompType()
# noinspection PyProtectedMember
self.RgAzComp._derive_parameters(self.Grid, self.Timeline, self.SCPCOA)
elif im_form_algo == 'PFA':
if self.PFA is None:
self.PFA = PFAType()
# noinspection PyProtectedMember
self.PFA._derive_parameters(self.Grid, self.SCPCOA, self.GeoData, self.Position, self.Timeline)
if self.Grid is not None:
# noinspection PyProtectedMember
self.Grid._derive_pfa(
self.GeoData, self.RadarCollection, self.ImageFormation, self.Position, self.PFA)
elif im_form_algo == 'RMA' or self.RMA is not None:
if self.RMA is not None:
# noinspection PyProtectedMember
self.RMA._derive_parameters(self.SCPCOA, self.Position, self.RadarCollection, self.ImageFormation)
if self.Grid is not None:
# noinspection PyProtectedMember
self.Grid._derive_rma(self.RMA, self.GeoData, self.RadarCollection, self.ImageFormation, self.Position)
self.define_geo_image_corners()
self.define_geo_valid_data()
if self.Radiometric is not None:
# noinspection PyProtectedMember
self.Radiometric._derive_parameters(self.Grid, self.SCPCOA)
def get_transmit_band_name(self) -> str:
"""
Gets the processed transmit band name.
Returns
-------
str
"""
if self.ImageFormation is None:
return 'UN'
return self.ImageFormation.get_transmit_band_name()
def get_processed_polarization_abbreviation(self) -> str:
"""
Gets the processed polarization abbreviation (two letters).
Returns
-------
str
"""
if self.ImageFormation is None:
return 'UN'
return self.ImageFormation.get_polarization_abbreviation()
def get_processed_polarization(self) -> str:
"""
Gets the processed polarization.
Returns
-------
str
"""
if self.ImageFormation is None:
return 'UN'
return self.ImageFormation.get_polarization()
def apply_reference_frequency(self, reference_frequency: float) -> None:
"""
If the reference frequency is used, adjust the necessary fields accordingly.
Parameters
----------
reference_frequency : float
The reference frequency.
Returns
-------
None
"""
if self.RadarCollection is None:
raise ValueError('RadarCollection is not defined. The reference frequency cannot be applied.')
elif not self.RadarCollection.RefFreqIndex: # it's None or 0
raise ValueError(
'RadarCollection.RefFreqIndex is not defined. The reference frequency should not be applied.')
# noinspection PyProtectedMember
self.RadarCollection._apply_reference_frequency(reference_frequency)
if self.ImageFormation is not None:
# noinspection PyProtectedMember
self.ImageFormation._apply_reference_frequency(reference_frequency)
if self.Antenna is not None:
# noinspection PyProtectedMember
self.Antenna._apply_reference_frequency(reference_frequency)
if self.RMA is not None:
# noinspection PyProtectedMember
self.RMA._apply_reference_frequency(reference_frequency)
def get_ground_resolution(self) -> Tuple[float, float]:
"""
Gets the ground resolution for the sicd.
Returns
-------
(float, float)
"""
graze = numpy.deg2rad(self.SCPCOA.GrazeAng)
twist = numpy.deg2rad(self.SCPCOA.TwistAng)
row_ss = self.Grid.Row.SS
col_ss = self.Grid.Col.SS
row_ground = abs(float(row_ss / numpy.cos(graze)))
col_ground = float(numpy.sqrt((numpy.tan(graze) * numpy.tan(twist) * row_ss)**2 + (col_ss/numpy.cos(twist))**2))
return row_ground, col_ground
def can_project_coordinates(self) -> bool:
"""
Determines whether the necessary elements are populated to permit projection
between image and physical coordinates. If False, then the (first discovered)
reason why not will be logged at error level.
Returns
-------
bool
"""
if self._coa_projection is not None:
return True
# GeoData elements?
if self.GeoData is None:
logger.error('Formulating a projection is not feasible because GeoData is not populated.')
return False
if self.GeoData.SCP is None:
logger.error('Formulating a projection is not feasible because GeoData.SCP is not populated.')
return False
if self.GeoData.SCP.ECF is None:
logger.error('Formulating a projection is not feasible because GeoData.SCP.ECF is not populated.')
return False
# ImageData elements?
if self.ImageData is None:
logger.error('Formulating a projection is not feasible because ImageData is not populated.')
return False
if self.ImageData.FirstRow is None:
logger.error('Formulating a projection is not feasible because ImageData.FirstRow is not populated.')
return False
if self.ImageData.FirstCol is None:
logger.error('Formulating a projection is not feasible because ImageData.FirstCol is not populated.')
return False
if self.ImageData.SCPPixel is None:
logger.error('Formulating a projection is not feasible because ImageData.SCPPixel is not populated.')
return False
if self.ImageData.SCPPixel.Row is None:
logger.error('Formulating a projection is not feasible because ImageData.SCPPixel.Row is not populated.')
return False
if self.ImageData.SCPPixel.Col is None:
logger.error('Formulating a projection is not feasible because ImageData.SCPPixel.Col is not populated.')
return False
# Position elements?
if self.Position is None:
logger.error('Formulating a projection is not feasible because Position is not populated.')
return False
if self.Position.ARPPoly is None:
logger.error('Formulating a projection is not feasible because Position.ARPPoly is not populated.')
return False
# Grid elements?
if self.Grid is None:
logger.error('Formulating a projection is not feasible because Grid is not populated.')
return False
if self.Grid.TimeCOAPoly is None:
logger.warning(
'Formulating a projection may be inaccurate, because Grid.TimeCOAPoly is not populated and '
'a constant approximation will be used.')
if self.Grid.Row is None:
logger.error('Formulating a projection is not feasible because Grid.Row is not populated.')
return False
if self.Grid.Row.SS is None:
logger.error('Formulating a projection is not feasible because Grid.Row.SS is not populated.')
return False
if self.Grid.Col is None:
logger.error('Formulating a projection is not feasible because Grid.Col is not populated.')
return False
if self.Grid.Col.SS is None:
logger.error('Formulating a projection is not feasible because Grid.Col.SS is not populated.')
return False
if self.Grid.Type is None:
logger.error('Formulating a projection is not feasible because Grid.Type is not populated.')
return False
# specifics for Grid.Type value
if self.Grid.Type == 'RGAZIM':
if self.ImageFormation is None:
logger.error(
'Formulating a projection is not feasible because Grid.Type = "RGAZIM",\n\t'
'but ImageFormation is not populated.')
return False
if self.ImageFormation.ImageFormAlgo is None:
logger.error(
'Formulating a projection is not feasible because Grid.Type = "RGAZIM",\n\t'
'but ImageFormation.ImageFormAlgo is not populated.')
return False
if self.ImageFormation.ImageFormAlgo == 'PFA':
if self.PFA is None:
logger.error(
'ImageFormation.ImageFormAlgo is "PFA",\n\t'
'but the PFA parameter is not populated.\n\t'
'No projection can be done.')
return False
if self.PFA.PolarAngPoly is None:
logger.error(
'ImageFormation.ImageFormAlgo is "PFA",\n\t'
'but the PFA.PolarAngPoly parameter is not populated.\n\t'
'No projection can be done.')
return False
if self.PFA.SpatialFreqSFPoly is None:
logger.error(
'ImageFormation.ImageFormAlgo is "PFA",\n\t'
'but the PFA.SpatialFreqSFPoly parameter is not populated.\n\t'
'No projection can be done.')
return False
elif self.ImageFormation.ImageFormAlgo == 'RGAZCOMP':
if self.RgAzComp is None:
logger.error(
'ImageFormation.ImageFormAlgo is "RGAZCOMP",\n\t'
'but the RgAzComp parameter is not populated.\n\t'
'No projection can be done.')
return False
if self.RgAzComp.AzSF is None:
logger.error(
'ImageFormation.ImageFormAlgo is "RGAZCOMP",\n\t'
'but the RgAzComp.AzSF parameter is not populated.\n\t'
'No projection can be done.')
return False
else:
logger.error(
'Grid.Type = "RGAZIM", and got unhandled ImageFormation.ImageFormAlgo {}.\n\t'
'No projection can be done.'.format(self.ImageFormation.ImageFormAlgo))
return False
elif self.Grid.Type == 'RGZERO':
if self.RMA is None or self.RMA.INCA is None:
logger.error(
'Grid.Type is "RGZERO", but the RMA.INCA parameter is not populated.\n\t'
'No projection can be done.')
return False
if self.RMA.INCA.R_CA_SCP is None or self.RMA.INCA.TimeCAPoly is None \
or self.RMA.INCA.DRateSFPoly is None:
logger.error(
'Grid.Type is "RGZERO", but the parameters\n\t'
'R_CA_SCP, TimeCAPoly, or DRateSFPoly of RMA.INCA parameter are not populated.\n\t'
'No projection can be done.')
return False
elif self.Grid.Type in ['XRGYCR', 'XCTYAT', 'PLANE']:
if self.Grid.Row.UVectECF is None or self.Grid.Col.UVectECF is None:
logger.error(
'Grid.Type is one of ["XRGYCR", "XCTYAT", "PLANE"], but the UVectECF parameter of '
'Grid.Row or Grid.Col is not populated.\n\t'
'No projection can be formulated.')
return False
else:
logger.error(
'Unhandled Grid.Type {},\n\t'
'unclear how to formulate a projection.'.format(self.Grid.Type))
return False
# logger.info('Consider calling sicd.define_coa_projection if the sicd structure is defined.')
return True
def define_coa_projection(
self,
delta_arp: Union[None, numpy.ndarray, list, tuple] = None,
delta_varp: Union[None, numpy.ndarray, list, tuple] = None,
range_bias: Optional[float] = None,
adj_params_frame: str = 'ECF',
override: bool = True) -> None:
"""
Define the COAProjection object.
Parameters
----------
delta_arp : None|numpy.ndarray|list|tuple
ARP position adjustable parameter (ECF, m). Defaults to 0 in each coordinate.
delta_varp : None|numpy.ndarray|list|tuple
VARP position adjustable parameter (ECF, m/s). Defaults to 0 in each coordinate.
range_bias : float|int
Range bias adjustable parameter (m), defaults to 0.
adj_params_frame : str
One of ['ECF', 'RIC_ECF', 'RIC_ECI'], specifying the coordinate frame used for
expressing `delta_arp` and `delta_varp` parameters.
override : bool
should we redefine, if it is previously defined?
Returns
-------
None
"""
if not self.can_project_coordinates():
logger.error('The COAProjection object cannot be defined.')
return
if self._coa_projection is not None and not override:
return
self._coa_projection = point_projection.COAProjection.from_sicd(
self, delta_arp=delta_arp, delta_varp=delta_varp, range_bias=range_bias,
adj_params_frame=adj_params_frame)
def project_ground_to_image(
self,
coords: Union[numpy.ndarray, list, tuple],
**kwargs) -> Tuple[numpy.ndarray, Union[numpy.ndarray, float], Union[numpy.ndarray, int]]:
"""
Transforms a 3D ECF point to pixel (row/column) coordinates. This is
implemented in accordance with the SICD Image Projections Description Document.
**Really Scene-To-Image projection.**"
Parameters
----------
coords : numpy.ndarray|tuple|list
ECF coordinate to map to scene coordinates, of size `N x 3`.
kwargs
The keyword arguments for the :func:`sarpy.geometry.point_projection.ground_to_image` method.
Returns
-------
image_points: numpy.ndarray
The determined image point array, of size `N x 2`. Following the
SICD convention, he upper-left pixel is [0, 0].
delta_gpn: numpy.ndarray|float
Residual ground plane displacement (m).
iterations: numpy.ndarray|int
The number of iterations performed.
See Also
--------
sarpy.geometry.point_projection.ground_to_image
"""
if 'use_structure_coa' not in kwargs:
kwargs['use_structure_coa'] = True
return point_projection.ground_to_image(coords, self, **kwargs)
def project_ground_to_image_geo(
self,
coords: Union[numpy.ndarray, list, tuple],
ordering: str = 'latlong',
**kwargs) -> Tuple[numpy.ndarray, Union[numpy.ndarray, float], Union[numpy.ndarray, int]]:
"""
Transforms a 3D Lat/Lon/HAE point to pixel (row/column) coordinates. This is
implemented in accordance with the SICD Image Projections Description Document.
**Really Scene-To-Image projection.**"
Parameters
----------
coords : numpy.ndarray|tuple|list
ECF coordinate to map to scene coordinates, of size `N x 3`.
ordering : str
If 'longlat', then the input is `[longitude, latitude, hae]`.
Otherwise, the input is `[latitude, longitude, hae]`. Passed through
to :func:`sarpy.geometry.geocoords.geodetic_to_ecf`.
kwargs
The keyword arguments for the :func:`sarpy.geometry.point_projection.ground_to_image_geo` method.
Returns
-------
image_points: numpy.ndarray
The determined image point array, of size `N x 2`. Following the
SICD convention, he upper-left pixel is [0, 0].
delta_gpn: numpy.ndarray|float
Residual ground plane displacement (m).
iterations: numpy.ndarray|int
The number of iterations performed.
See Also
--------
sarpy.geometry.point_projection.ground_to_image_geo
"""
if 'use_structure_coa' not in kwargs:
kwargs['use_structure_coa'] = True
return point_projection.ground_to_image_geo(coords, self, ordering=ordering, **kwargs)
def project_image_to_ground(
self,
im_points: Union[numpy.ndarray, list, tuple],
projection_type: str = 'HAE',
**kwargs) -> numpy.ndarray:
"""
Transforms image coordinates to ground plane ECF coordinate via the algorithm(s)
described in SICD Image Projections document.
Parameters
----------
im_points : numpy.ndarray|list|tuple
the image coordinate array
projection_type : str
One of `['PLANE', 'HAE', 'DEM']`. Type `DEM` is a work in progress.
kwargs
The keyword arguments for the :func:`sarpy.geometry.point_projection.image_to_ground` method.
Returns
-------
numpy.ndarray
Ground Plane Point (in ECF coordinates) corresponding to the input image coordinates.
See Also
--------
sarpy.geometry.point_projection.image_to_ground
"""
if 'use_structure_coa' not in kwargs:
kwargs['use_structure_coa'] = True
return point_projection.image_to_ground(
im_points, self, projection_type=projection_type, **kwargs)
def project_image_to_ground_geo(
self,
im_points: Union[numpy.ndarray, list, tuple],
ordering: str = 'latlong',
projection_type: str = 'HAE',
**kwargs) -> numpy.ndarray:
"""
Transforms image coordinates to ground plane WGS-84 coordinate via the algorithm(s)
described in SICD Image Projections document.
Parameters
----------
im_points : numpy.ndarray|list|tuple
the image coordinate array
projection_type : str
One of `['PLANE', 'HAE', 'DEM']`. Type `DEM` is a work in progress.
ordering : str
Determines whether return is ordered as `[lat, long, hae]` or `[long, lat, hae]`.
Passed through to :func:`sarpy.geometry.geocoords.ecf_to_geodetic`.
kwargs
The keyword arguments for the :func:`sarpy.geometry.point_projection.image_to_ground_geo` method.
Returns
-------
numpy.ndarray
Ground Plane Point (in ECF coordinates) corresponding to the input image coordinates.
See Also
--------
sarpy.geometry.point_projection.image_to_ground_geo
"""
if 'use_structure_coa' not in kwargs:
kwargs['use_structure_coa'] = True
return point_projection.image_to_ground_geo(
im_points, self, ordering=ordering, projection_type=projection_type, **kwargs)
def populate_rniirs(
self,
signal: Optional[float] = None,
noise: Optional[float] = None,
override: bool = False) -> None:
"""
Given the signal and noise values (in sigma zero power units),
calculate and populate an estimated RNIIRS value.
Parameters
----------
signal : None|float
noise : None|float
override : bool
Override the value, if present.
Returns
-------
None
"""
from sarpy.processing.sicd.rgiqe import populate_rniirs_for_sicd
populate_rniirs_for_sicd(self, signal=signal, noise=noise, override=override)
def get_suggested_name(
self,
product_number: int = 1) -> str:
"""
Get the suggested name stem for the sicd and derived data.
Parameters
----------
product_number : int
Returns
-------
str
"""
sugg_name = get_sicd_name(self, product_number)
if sugg_name is None:
sugg_name = self.CollectionInfo.CoreName if self.CollectionInfo.CoreName is not None else \
'Unknown_Sicd{}'.format(product_number)
return re.sub(':', '_', sugg_name)
def version_required(self) -> Tuple[int, int, int]:
"""
What SICD version is required for valid support?
Returns
-------
tuple
"""
required = (1, 1, 0)
for fld in self._fields:
val = getattr(self, fld)
if val is not None and hasattr(val, 'version_required'):
required = max(required, val.version_required())
return required
def get_des_details(
self,
check_older_version: bool = False) -> Dict:
"""
Gets the correct current SICD DES subheader details.
Parameters
----------
check_older_version : bool
If True and compatible, then version 1.1.0 information will be returned.
Otherwise, the most recent supported version will be returned.
Returns
-------
dict
"""
required_version = self.version_required()
# noinspection PyTypeChecker
if required_version > _SICD_DEFAULT_TUPLE or check_older_version:
info = _SICD_SPEC_DETAILS['{}.{}.{}'.format(*required_version)]
else:
info = _SICD_SPEC_DETAILS[_SICD_VERSION_DEFAULT]
spec_ns = info['namespace']
details = info['details']
spec_version = details['version']
spec_date = details['date']
return OrderedDict([
('DESSHSI', _SICD_SPECIFICATION_IDENTIFIER),
('DESSHSV', spec_version),
('DESSHSD', spec_date),
('DESSHTN', spec_ns)])
def copy(self):
"""
Provides a deep copy.
Returns
-------
SICDType
"""
out = super(SICDType, self).copy()
out._NITF = deepcopy(self._NITF)
return out
def to_xml_bytes(self, urn=None, tag='SICD', check_validity=False, strict=DEFAULT_STRICT):
if urn is None:
urn = _SICD_SPEC_DETAILS[_SICD_VERSION_DEFAULT]['namespace']
return super(SICDType, self).to_xml_bytes(urn=urn, tag=tag, check_validity=check_validity, strict=strict)
def to_xml_string(self, urn=None, tag='SICD', check_validity=False, strict=DEFAULT_STRICT):
return self.to_xml_bytes(urn=urn, tag=tag, check_validity=check_validity, strict=strict).decode('utf-8')
def create_subset_structure(
self,
row_bounds: Optional[Tuple[int, int]] = None,
column_bounds: Optional[Tuple[int, int]] = None):
"""
Create a version of the SICD structure for a given subset.
Parameters
----------
row_bounds : None|tuple
column_bounds : None|tuple
Returns
-------
sicd : SICDType
The sicd
row_bounds : tuple
Vetted tuple of the form `(min row, max row)`.
column_bounds : tuple
Vetted tuple of the form `(min column, max column)`.
"""
sicd = self.copy()
num_rows = self.ImageData.NumRows
num_cols = self.ImageData.NumCols
if row_bounds is not None:
start_row = int(row_bounds[0])
end_row = int(row_bounds[1])
if not (0 <= start_row < end_row <= num_rows):
raise ValueError(
'row bounds ({}, {}) are not sensible for NumRows {}'.format(
start_row, end_row, num_rows))
sicd.ImageData.FirstRow = sicd.ImageData.FirstRow + start_row
sicd.ImageData.NumRows = (end_row - start_row)
out_row_bounds = (start_row, end_row)
else:
out_row_bounds = (0, num_rows)
if column_bounds is not None:
start_col = int(column_bounds[0])
end_col = int(column_bounds[1])
if not (0 <= start_col < end_col <= num_cols):
raise ValueError(
'column bounds ({}, {}) are not sensible for NumCols {}'.format(
start_col, end_col, num_cols))
sicd.ImageData.FirstCol = sicd.ImageData.FirstCol + start_col
sicd.ImageData.NumCols = (end_col - start_col)
out_col_bounds = (start_col, end_col)
else:
out_col_bounds = (0, num_cols)
sicd.define_geo_image_corners(override=True)
return sicd, out_row_bounds, out_col_bounds
@classmethod
def from_xml_file(cls, file_path):
"""
Construct the sicd object from a stand-alone xml file path.
Parameters
----------
file_path : str
Returns
-------
SICDType
"""
root_node, xml_ns = parse_xml_from_file(file_path)
ns_key = 'default' if 'default' in xml_ns else None
return cls.from_node(root_node, xml_ns=xml_ns, ns_key=ns_key)
@classmethod
def from_xml_string(cls, xml_string):
"""
Construct the sicd object from a xml string.
Parameters
----------
xml_string : str|bytes
Returns
-------
SICDType
"""
root_node, xml_ns = parse_xml_from_string(xml_string)
ns_key = 'default' if 'default' in xml_ns else None
return cls.from_node(root_node, xml_ns=xml_ns, ns_key=ns_key)
| 41,397 | 37.725912 | 120 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/SCPCOA.py | """
The SCPCOAType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
from typing import Union, Optional, Tuple
import numpy
from numpy.linalg import norm
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import StringEnumDescriptor, FloatDescriptor, \
SerializableDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from .blocks import XYZType
from sarpy.geometry import geocoords
logger = logging.getLogger(__name__)
class GeometryCalculator(object):
"""
Performs the necessary SCPCOA geometry element calculations.
"""
def __init__(
self,
SCP: numpy.ndarray,
ARPPos: numpy.ndarray,
ARPVel: numpy.ndarray):
"""
Parameters
----------
SCP : numpy.ndarray
The scene center point.
ARPPos : numpy.ndarray
The aperture position in ECEF coordinates at the SCP center of aperture time.
ARPVel : numpy.ndarray
The aperture velocity in ECEF coordinates at the SCP center of aperture time.
"""
self.SCP = SCP
self.ARP = ARPPos
self.ARP_vel = ARPVel
self.LOS = (self.SCP - self.ARP)
# unit vector versions
self.uSCP = self._make_unit(self.SCP)
self.uARP = self._make_unit(self.ARP)
self.uARP_vel = self._make_unit(self.ARP_vel)
self.uLOS = self._make_unit(self.LOS)
self.left = numpy.cross(self.uARP, self.uARP_vel)
self.look = numpy.sign(self.left.dot(self.uLOS))
# Earth Tangent Plane (ETP) at the SCP is the plane tangent to the surface of constant height
# above the WGS 84 ellipsoid (HAE) that contains the SCP. The ETP is an approximation to the
# ground plane at the SCP.
self.ETP = geocoords.wgs_84_norm(SCP)
# slant plane unit normal
self.uSPZ = self._make_unit(self.look*numpy.cross(self.ARP_vel, self.uLOS))
# perpendicular component of range vector wrt the ground plane
self.uGPX = self._make_unit(-self.uLOS + numpy.dot(self.ETP, self.uLOS)*self.ETP)
self.uGPY = numpy.cross(self.ETP, self.uGPX) # already unit vector
# perpendicular component of north wrt the ground plane
self.uNORTH = self._make_unit(numpy.array([0, 0, 1]) - self.ETP[2]*self.ETP)
self.uEAST = numpy.cross(self.uNORTH, self.ETP) # already unit vector
@staticmethod
def _make_unit(vec: numpy.ndarray) -> numpy.ndarray:
vec_norm = norm(vec)
if vec_norm < 1e-6:
logger.error(
'The input vector to be normalized has norm {},\n\t'
'this is likely a mistake'.format(vec_norm))
return vec/vec_norm
@property
def ROV(self) -> float:
"""
float: Range over velocity
"""
return float(norm(self.LOS)/norm(self.ARP_vel))
@property
def SideOfTrack(self) -> str:
return 'R' if self.look < 0 else 'L'
@property
def SlantRange(self) -> float:
return float(norm(self.LOS))
@property
def GroundRange(self) -> float:
return norm(self.SCP)*numpy.arccos(self.uSCP.dot(self.uARP))
@property
def DopplerConeAng(self) -> float:
return float(numpy.rad2deg(numpy.arccos(self.uARP_vel.dot(self.uLOS))))
@property
def GrazeAng(self) -> float:
return self.get_graze_and_incidence()[0]
@property
def IncidenceAng(self) -> float:
return self.get_graze_and_incidence()[1]
def get_graze_and_incidence(self) -> Tuple[float, float]:
graze_ang = -float(numpy.rad2deg(numpy.arcsin(self.ETP.dot(self.uLOS))))
return graze_ang, 90 - graze_ang
@property
def TwistAng(self) -> float:
return float(-numpy.rad2deg(numpy.arcsin(self.uGPY.dot(self.uSPZ))))
@property
def SquintAngle(self) -> float:
arp_vel_proj = self._make_unit(self.uARP_vel - self.uARP_vel.dot(self.uARP)*self.uARP)
los_proj = self._make_unit(self.uLOS - self.uLOS.dot(self.uARP)*self.uARP)
return float(numpy.rad2deg(
numpy.arctan2(numpy.cross(los_proj, arp_vel_proj).dot(self.uARP), arp_vel_proj.dot(los_proj))))
@property
def SlopeAng(self) -> float:
return float(numpy.rad2deg(numpy.arccos(self.ETP.dot(self.uSPZ))))
@property
def AzimAng(self) -> float:
azim_ang = numpy.rad2deg(numpy.arctan2(self.uGPX.dot(self.uEAST), self.uGPX.dot(self.uNORTH)))
azim_ang = azim_ang if azim_ang > 0 else azim_ang + 360
return float(azim_ang)
@property
def LayoverAng(self) -> float:
return self.get_layover()[0]
def get_layover(self) -> Tuple[float, float]:
layover_ground = self.ETP - self.ETP.dot(self.uSPZ)*self.uSPZ
layover_ang = numpy.rad2deg(
numpy.arctan2(layover_ground.dot(self.uEAST), layover_ground.dot(self.uNORTH)))
layover_ang = layover_ang if layover_ang > 0 else layover_ang + 360
return float(layover_ang), float(norm(layover_ground))
def get_shadow(self) -> Tuple[float, float]:
shadow = self.ETP - self.uLOS/self.uLOS.dot(self.ETP)
shadow_prime = shadow - self.uSPZ*(shadow.dot(self.ETP)/self.uSPZ.dot(self.ETP))
shadow_angle = numpy.rad2deg(numpy.arctan2(shadow_prime.dot(self.uGPY), shadow_prime.dot(self.uGPX)))
return float(shadow_angle), float(norm(shadow_prime))
class SCPCOAType(Serializable):
"""
Center of Aperture (COA) for the Scene Center Point (SCP).
"""
_fields = (
'SCPTime', 'ARPPos', 'ARPVel', 'ARPAcc', 'SideOfTrack', 'SlantRange', 'GroundRange', 'DopplerConeAng',
'GrazeAng', 'IncidenceAng', 'TwistAng', 'SlopeAng', 'AzimAng', 'LayoverAng')
_required = _fields
_numeric_format = {
'SCPTime': FLOAT_FORMAT, 'SlantRange': '0.17E', 'GroundRange': '0.17E',
'DopplerConeAng': FLOAT_FORMAT, 'GrazeAng': FLOAT_FORMAT, 'IncidenceAng': FLOAT_FORMAT,
'TwistAng': FLOAT_FORMAT, 'SlopeAng': FLOAT_FORMAT, 'AzimAng': FLOAT_FORMAT,
'LayoverAng': FLOAT_FORMAT}
# class variables
_SIDE_OF_TRACK_VALUES = ('L', 'R')
# descriptors
SCPTime = FloatDescriptor(
'SCPTime', _required, strict=DEFAULT_STRICT,
docstring='*Center Of Aperture time for the SCP (t_COA_SCP)*, relative to collection '
'start in seconds.') # type: float
ARPPos = SerializableDescriptor(
'ARPPos', XYZType, _required, strict=DEFAULT_STRICT,
docstring='Aperture position at *t_COA_SCP* in ECF coordinates.') # type: XYZType
ARPVel = SerializableDescriptor(
'ARPVel', XYZType, _required, strict=DEFAULT_STRICT,
docstring='ARP Velocity at *t_COA_SCP* in ECF coordinates.') # type: XYZType
ARPAcc = SerializableDescriptor(
'ARPAcc', XYZType, _required, strict=DEFAULT_STRICT,
docstring='ARP Acceleration at *t_COA_SCP* in ECF coordinates.') # type: XYZType
SideOfTrack = StringEnumDescriptor(
'SideOfTrack', _SIDE_OF_TRACK_VALUES, _required, strict=DEFAULT_STRICT,
docstring='Side of track.') # type: str
SlantRange = FloatDescriptor(
'SlantRange', _required, strict=DEFAULT_STRICT,
docstring='Slant range from the aperture to the *SCP* in meters.') # type: float
GroundRange = FloatDescriptor(
'GroundRange', _required, strict=DEFAULT_STRICT,
docstring='Ground Range from the aperture nadir to the *SCP*. Distance measured along spherical earth model '
'passing through the *SCP* in meters.') # type: float
DopplerConeAng = FloatDescriptor(
'DopplerConeAng', _required, strict=DEFAULT_STRICT,
docstring='The Doppler Cone Angle to SCP at *t_COA_SCP* in degrees.') # type: float
GrazeAng = FloatDescriptor(
'GrazeAng', _required, strict=DEFAULT_STRICT, bounds=(0., 90.),
docstring='Grazing Angle between the SCP *Line of Sight (LOS)* and *Earth Tangent Plane (ETP)*.') # type: float
IncidenceAng = FloatDescriptor(
'IncidenceAng', _required, strict=DEFAULT_STRICT, bounds=(0., 90.),
docstring='Incidence Angle between the *LOS* and *ETP* normal.') # type: float
TwistAng = FloatDescriptor(
'TwistAng', _required, strict=DEFAULT_STRICT, bounds=(-90., 90.),
docstring='Angle between cross range in the *ETP* and cross range in the slant plane.') # type: float
SlopeAng = FloatDescriptor(
'SlopeAng', _required, strict=DEFAULT_STRICT, bounds=(0., 90.),
docstring='Slope Angle from the *ETP* to the slant plane at *t_COA_SCP*.') # type: float
AzimAng = FloatDescriptor(
'AzimAng', _required, strict=DEFAULT_STRICT, bounds=(0., 360.),
docstring='Angle from north to the line from the *SCP* to the aperture nadir at *COA*. Measured '
'clockwise in the *ETP*.') # type: float
LayoverAng = FloatDescriptor(
'LayoverAng', _required, strict=DEFAULT_STRICT, bounds=(0., 360.),
docstring='Angle from north to the layover direction in the *ETP* at *COA*. Measured '
'clockwise in the *ETP*.') # type: float
def __init__(
self,
SCPTime: float = None,
ARPPos: Union[XYZType, numpy.ndarray, list, tuple] = None,
ARPVel: Union[XYZType, numpy.ndarray, list, tuple] = None,
ARPAcc: Union[XYZType, numpy.ndarray, list, tuple] = None,
SideOfTrack: str = None,
SlantRange: float = None,
GroundRange: float = None,
DopplerConeAng: float = None,
GrazeAng: float = None,
IncidenceAng: float = None,
TwistAng: float = None,
SlopeAng: float = None,
AzimAng: float = None,
LayoverAng: float = None,
**kwargs):
"""
Parameters
----------
SCPTime : float
ARPPos : XYZType|numpy.ndarray|list|tuple
ARPVel : XYZType|numpy.ndarray|list|tuple
ARPAcc : XYZType|numpy.ndarray|list|tuple
SideOfTrack : str
SlantRange : float
GroundRange : float
DopplerConeAng : float
GrazeAng : float
IncidenceAng : float
TwistAng : float
SlopeAng : float
AzimAng : float
LayoverAng : float
kwargs
"""
self._ROV = None
self._squint = None
self._shadow = None
self._shadow_magnitude = None
self._layover_magnitude = None
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.SCPTime = SCPTime
self.ARPPos, self.ARPVel, self.ARPAcc = ARPPos, ARPVel, ARPAcc
self.SideOfTrack = SideOfTrack
self.SlantRange, self.GroundRange = SlantRange, GroundRange
self.DopplerConeAng, self.GrazeAng, self.IncidenceAng = DopplerConeAng, GrazeAng, IncidenceAng
self.TwistAng, self.SlopeAng, self.AzimAng, self.LayoverAng = TwistAng, SlopeAng, AzimAng, LayoverAng
super(SCPCOAType, self).__init__(**kwargs)
@property
def look(self) -> Optional[int]:
"""
int: An integer version of `SideOfTrack`:
* None if `SideOfTrack` is not defined
* -1 if SideOfTrack == 'R'
* 1 if SideOftrack == 'L'
"""
if self.SideOfTrack is None:
return None
else:
return -1 if self.SideOfTrack == 'R' else 1
@property
def ROV(self) -> Optional[float]:
"""
float: The Ratio of Range to Velocity at Center of Aperture time.
"""
return self._ROV
@property
def ThetaDot(self) -> Optional[float]:
"""
float: Derivative of Theta as a function of time at Center of Aperture time.
"""
if self.DopplerConeAng is None or self.ROV is None:
return None
return float(numpy.sin(numpy.deg2rad(self.DopplerConeAng))/self.ROV)
@property
def MultipathGround(self) -> Optional[float]:
"""
float: The anticipated angle of multipath features on the ground in degrees.
"""
if self.GrazeAng is None or self.TwistAng is None:
return None
return numpy.rad2deg(
-numpy.arctan(numpy.tan(numpy.deg2rad(self.TwistAng))*numpy.sin(numpy.deg2rad(self.GrazeAng))))
@property
def Multipath(self) -> Optional[float]:
"""
float: The anticipated angle of multipath features in degrees.
"""
if self.AzimAng is None or self.MultipathGround is None:
return None
return numpy.mod(self.AzimAng - 180 + self.MultipathGround, 360)
@property
def Shadow(self) -> Optional[float]:
"""
float: The anticipated angle of shadow features in degrees.
"""
return self._shadow
@property
def ShadowMagnitude(self) -> Optional[float]:
"""
float: The anticipated relative magnitude of shadow features.
"""
return self._shadow_magnitude
@property
def Squint(self) -> float:
"""
float: The squint angle, in degrees.
"""
return self._squint
@property
def LayoverMagnitude(self) -> float:
"""
float: The anticipated relative magnitude of layover features.
"""
return self._layover_magnitude
def _derive_scp_time(self, Grid, overwrite: bool = False):
"""
Expected to be called by SICD parent.
Parameters
----------
Grid : sarpy.io.complex.sicd_elements.GridType
overwrite : bool
Returns
-------
None
"""
if Grid is None or Grid.TimeCOAPoly is None:
return # nothing can be done
if not overwrite and self.SCPTime is not None:
return # nothing should be done
scp_time = Grid.TimeCOAPoly.Coefs[0, 0]
self.SCPTime = scp_time
def _derive_position(self, Position, overwrite: bool = False):
"""
Derive aperture position parameters, if necessary. Expected to be called by SICD parent.
Parameters
----------
Position : sarpy.io.complex.sicd_elements.Position.PositionType
overwrite : bool
Returns
-------
None
"""
if Position is None or Position.ARPPoly is None or self.SCPTime is None:
return # nothing can be derived
# set aperture position, velocity, and acceleration at scptime from position
# polynomial, if necessary
poly = Position.ARPPoly
scptime = self.SCPTime
if self.ARPPos is None or overwrite:
self.ARPPos = XYZType.from_array(poly(scptime))
self.ARPVel = XYZType.from_array(poly.derivative_eval(scptime, 1))
self.ARPAcc = XYZType.from_array(poly.derivative_eval(scptime, 2))
def _derive_geometry_parameters(self, GeoData, overwrite: bool = False):
"""
Expected to be called by SICD parent.
Parameters
----------
GeoData : sarpy.io.complex.sicd_elements.GeoData.GeoDataType
overwrite : bool
Returns
-------
None
"""
if GeoData is None or GeoData.SCP is None or GeoData.SCP.ECF is None or \
self.ARPPos is None or self.ARPVel is None:
return # nothing can be derived
# construct our calculator
calculator = GeometryCalculator(
GeoData.SCP.ECF.get_array(), self.ARPPos.get_array(), self.ARPVel.get_array())
# set all the values
self._ROV = calculator.ROV
if self.SideOfTrack is None or overwrite:
self.SideOfTrack = calculator.SideOfTrack
if self.SlantRange is None or overwrite:
self.SlantRange = calculator.SlantRange
if self.GroundRange is None or overwrite:
self.GroundRange = calculator.GroundRange
if self.DopplerConeAng is None or overwrite:
self.DopplerConeAng = calculator.DopplerConeAng
graz, inc = calculator.get_graze_and_incidence()
if self.GrazeAng is None or overwrite:
self.GrazeAng = graz
if self.IncidenceAng is None or overwrite:
self.IncidenceAng = inc
if self.TwistAng is None or overwrite:
self.TwistAng = calculator.TwistAng
self._squint = calculator.SquintAngle
if self.SlopeAng is None or overwrite:
self.SlopeAng = calculator.SlopeAng
if self.AzimAng is None or overwrite:
self.AzimAng = calculator.AzimAng
layover, self._layover_magnitude = calculator.get_layover()
if self.LayoverAng is None or overwrite:
self.LayoverAng = layover
self._shadow, self._shadow_magnitude = calculator.get_shadow()
def rederive(self, Grid, Position, GeoData):
"""
Rederive all derived quantities.
Parameters
----------
Grid : sarpy.io.complex.sicd_elements.GridType
Position : sarpy.io.complex.sicd_elements.Position.PositionType
GeoData : sarpy.io.complex.sicd_elements.GeoData.GeoDataType
Returns
-------
None
"""
self._derive_scp_time(Grid, overwrite=True)
self._derive_position(Position, overwrite=True)
self._derive_geometry_parameters(GeoData, overwrite=True)
def check_values(self, GeoData) -> bool:
"""
Check derived values for validity.
Parameters
----------
GeoData : sarpy.io.complex.sicd_elements.GeoData.GeoDataType
Returns
-------
bool
"""
if GeoData is None or GeoData.SCP is None or GeoData.SCP.ECF is None or \
self.ARPPos is None or self.ARPVel is None:
return True # nothing can be derived
# construct our calculator
calculator = GeometryCalculator(
GeoData.SCP.ECF.get_array(), self.ARPPos.get_array(), self.ARPVel.get_array())
cond = True
if calculator.SideOfTrack != self.SideOfTrack:
self.log_validity_error(
'SideOfTrack is expected to be {}, and is populated as {}'.format(
calculator.SideOfTrack, self.SideOfTrack))
cond = False
for attribute in ['SlantRange', 'GroundRange']:
val1 = getattr(self, attribute)
val2 = getattr(calculator, attribute)
if abs(val1/val2 - 1) > 1e-6:
self.log_validity_error(
'attribute {} is expected to have value {}, but is populated as {}'.format(attribute, val2, val1))
cond = False
for attribute in [
'DopplerConeAng', 'GrazeAng', 'IncidenceAng', 'TwistAng', 'SlopeAng', 'AzimAng', 'LayoverAng']:
val1 = getattr(self, attribute)
val2 = getattr(calculator, attribute)
if abs(val1 - val2) > 1e-3:
self.log_validity_error(
'attribute {} is expected to have value {}, but is populated as {}'.format(attribute, val2, val1))
cond = False
return cond
| 19,349 | 35.998088 | 120 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/ErrorStatistics.py | """
The ErrorStatisticsType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, Optional, Dict
from sarpy.io.xml.base import Serializable, ParametersCollection
from sarpy.io.xml.descriptors import StringEnumDescriptor, FloatDescriptor, \
SerializableDescriptor, ParametersDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from .blocks import ErrorDecorrFuncType
class CompositeSCPErrorType(Serializable):
"""
Composite error statistics for the Scene Center Point. Slant plane range *(Rg)*
and azimuth *(Az)* error statistics. Slant plane defined at *SCP COA*.
"""
_fields = ('Rg', 'Az', 'RgAz')
_required = _fields
_numeric_format = {key: FLOAT_FORMAT for key in _fields}
# descriptors
Rg = FloatDescriptor(
'Rg', _required, strict=DEFAULT_STRICT,
docstring='Estimated range error standard deviation.') # type: float
Az = FloatDescriptor(
'Az', _required, strict=DEFAULT_STRICT,
docstring='Estimated azimuth error standard deviation.') # type: float
RgAz = FloatDescriptor(
'RgAz', _required, strict=DEFAULT_STRICT,
docstring='Estimated range and azimuth error correlation coefficient.') # type: float
def __init__(
self,
Rg: float = None,
Az: float = None,
RgAz: float = None,
**kwargs):
"""
Parameters
----------
Rg : float
Az : float
RgAz : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Rg, self.Az, self.RgAz = Rg, Az, RgAz
super(CompositeSCPErrorType, self).__init__(**kwargs)
class CorrCoefsType(Serializable):
"""Correlation Coefficient parameters."""
_fields = (
'P1P2', 'P1P3', 'P1V1', 'P1V2', 'P1V3', 'P2P3', 'P2V1', 'P2V2', 'P2V3',
'P3V1', 'P3V2', 'P3V3', 'V1V2', 'V1V3', 'V2V3')
_required = _fields
_numeric_format = {key: FLOAT_FORMAT for key in _fields}
# descriptors
P1P2 = FloatDescriptor(
'P1P2', _required, strict=DEFAULT_STRICT, docstring='`P1` and `P2` correlation coefficient.') # type: float
P1P3 = FloatDescriptor(
'P1P3', _required, strict=DEFAULT_STRICT, docstring='`P1` and `P3` correlation coefficient.') # type: float
P1V1 = FloatDescriptor(
'P1V1', _required, strict=DEFAULT_STRICT, docstring='`P1` and `V1` correlation coefficient.') # type: float
P1V2 = FloatDescriptor(
'P1V2', _required, strict=DEFAULT_STRICT, docstring='`P1` and `V2` correlation coefficient.') # type: float
P1V3 = FloatDescriptor(
'P1V3', _required, strict=DEFAULT_STRICT, docstring='`P1` and `V3` correlation coefficient.') # type: float
P2P3 = FloatDescriptor(
'P2P3', _required, strict=DEFAULT_STRICT, docstring='`P2` and `P3` correlation coefficient.') # type: float
P2V1 = FloatDescriptor(
'P2V1', _required, strict=DEFAULT_STRICT, docstring='`P2` and `V1` correlation coefficient.') # type: float
P2V2 = FloatDescriptor(
'P2V2', _required, strict=DEFAULT_STRICT, docstring='`P2` and `V2` correlation coefficient.') # type: float
P2V3 = FloatDescriptor(
'P2V3', _required, strict=DEFAULT_STRICT, docstring='`P2` and `V3` correlation coefficient.') # type: float
P3V1 = FloatDescriptor(
'P3V1', _required, strict=DEFAULT_STRICT, docstring='`P3` and `V1` correlation coefficient.') # type: float
P3V2 = FloatDescriptor(
'P3V2', _required, strict=DEFAULT_STRICT, docstring='`P3` and `V2` correlation coefficient.') # type: float
P3V3 = FloatDescriptor(
'P3V3', _required, strict=DEFAULT_STRICT, docstring='`P3` and `V3` correlation coefficient.') # type: float
V1V2 = FloatDescriptor(
'V1V2', _required, strict=DEFAULT_STRICT, docstring='`V1` and `V2` correlation coefficient.') # type: float
V1V3 = FloatDescriptor(
'V1V3', _required, strict=DEFAULT_STRICT, docstring='`V1` and `V3` correlation coefficient.') # type: float
V2V3 = FloatDescriptor(
'V2V3', _required, strict=DEFAULT_STRICT, docstring='`V2` and `V3` correlation coefficient.') # type: float
def __init__(
self,
P1P2: float = None,
P1P3: float = None,
P1V1: float = None,
P1V2: float = None,
P1V3: float = None,
P2P3: float = None,
P2V1: float = None,
P2V2: float = None,
P2V3: float = None,
P3V1: float = None,
P3V2: float = None,
P3V3: float = None,
V1V2: float = None,
V1V3: float = None,
V2V3: float = None,
**kwargs):
"""
Parameters
----------
P1P2 : float
P1P3 : float
P1V1 : float
P1V2 : float
P1V3 : float
P2P3 : float
P2V1 : float
P2V2 : float
P2V3 : float
P3V1 : float
P3V2 : float
P3V3 : float
V1V2 : float
V1V3 : float
V2V3 : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.P1P2, self.P1P3, self.P1V1, self.P1V2, self.P1V3 = P1P2, P1P3, P1V1, P1V2, P1V3
self.P2P3, self.P2V1, self.P2V2, self.P2V3 = P2P3, P2V1, P2V2, P2V3
self.P3V1, self.P3V2, self.P3V3 = P3V1, P3V2, P3V3
self.V1V2, self.V1V3 = V1V2, V1V3
self.V2V3 = V2V3
super(CorrCoefsType, self).__init__(**kwargs)
class PosVelErrType(Serializable):
"""
Position and velocity error statistics for the radar platform.
"""
_fields = ('Frame', 'P1', 'P2', 'P3', 'V1', 'V2', 'V3', 'CorrCoefs', 'PositionDecorr')
_required = ('Frame', 'P1', 'P2', 'P3', 'V1', 'V2', 'V3')
_numeric_format = {
'P1': FLOAT_FORMAT, 'P2': FLOAT_FORMAT, 'P3': FLOAT_FORMAT,
'V1': FLOAT_FORMAT, 'V2': FLOAT_FORMAT, 'V3': FLOAT_FORMAT}
# class variables
_FRAME_VALUES = ('ECF', 'RIC_ECF', 'RIC_ECI')
# descriptors
Frame = StringEnumDescriptor(
'Frame', _FRAME_VALUES, _required, strict=True,
docstring='Coordinate frame used for expressing P,V errors statistics. Note: '
'*RIC = Radial, In-Track, Cross-Track*, where radial is defined to be from earth center through '
'the platform position.') # type: str
P1 = FloatDescriptor(
'P1', _required, strict=DEFAULT_STRICT, docstring='Position coordinate 1 standard deviation.') # type: float
P2 = FloatDescriptor(
'P2', _required, strict=DEFAULT_STRICT, docstring='Position coordinate 2 standard deviation.') # type: float
P3 = FloatDescriptor(
'P3', _required, strict=DEFAULT_STRICT, docstring='Position coordinate 3 standard deviation.') # type: float
V1 = FloatDescriptor(
'V1', _required, strict=DEFAULT_STRICT, docstring='Velocity coordinate 1 standard deviation.') # type: float
V2 = FloatDescriptor(
'V2', _required, strict=DEFAULT_STRICT, docstring='Velocity coordinate 2 standard deviation.') # type: float
V3 = FloatDescriptor(
'V3', _required, strict=DEFAULT_STRICT, docstring='Velocity coordinate 3 standard deviation.') # type: float
CorrCoefs = SerializableDescriptor(
'CorrCoefs', CorrCoefsType, _required, strict=DEFAULT_STRICT,
docstring='Correlation Coefficient parameters.') # type: Optional[CorrCoefsType]
PositionDecorr = SerializableDescriptor(
'PositionDecorr', ErrorDecorrFuncType, _required, strict=DEFAULT_STRICT,
docstring='Platform position error decorrelation function.') # type: Optional[ErrorDecorrFuncType]
def __init__(
self,
Frame: str = None,
P1: float = None,
P2: float = None,
P3: float = None,
V1: float = None,
V2: float = None,
V3: float = None,
CorrCoefs: Optional[CorrCoefsType] = None,
PositionDecorr: Optional[ErrorDecorrFuncType] = None,
**kwargs):
"""
Parameters
----------
Frame : str
P1 : float
P2 : float
P3 : float
V1 : float
V2 : float
V3 : float
CorrCoefs : None|CorrCoefsType
PositionDecorr : None|ErrorDecorrFuncType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Frame = Frame
self.P1, self.P2, self.P3 = P1, P2, P3
self.V1, self.V2, self.V3 = V1, V2, V3
self.CorrCoefs, self.PositionDecorr = CorrCoefs, PositionDecorr
super(PosVelErrType, self).__init__(**kwargs)
class RadarSensorErrorType(Serializable):
"""Radar sensor error statistics."""
_fields = ('RangeBias', 'ClockFreqSF', 'TransmitFreqSF', 'RangeBiasDecorr')
_required = ('RangeBias', )
_numeric_format = {'RangeBias': FLOAT_FORMAT, 'ClockFreqSF': FLOAT_FORMAT, 'TransmitFreqSF': FLOAT_FORMAT}
# descriptors
RangeBias = FloatDescriptor(
'RangeBias', _required, strict=DEFAULT_STRICT,
docstring='Range bias error standard deviation.') # type: float
ClockFreqSF = FloatDescriptor(
'ClockFreqSF', _required, strict=DEFAULT_STRICT,
docstring='Payload clock frequency scale factor standard deviation, '
r'where :math:`SF = (\Delta f)/f_0`.') # type: float
TransmitFreqSF = FloatDescriptor(
'TransmitFreqSF', _required, strict=DEFAULT_STRICT,
docstring='Transmit frequency scale factor standard deviation, '
r'where :math:`SF = (\Delta f)/f_0`.') # type: float
RangeBiasDecorr = SerializableDescriptor(
'RangeBiasDecorr', ErrorDecorrFuncType, _required, strict=DEFAULT_STRICT,
docstring='Range bias decorrelation rate.') # type: ErrorDecorrFuncType
def __init__(
self,
RangeBias: float = None,
ClockFreqSF: Optional[float] = None,
TransmitFreqSF: Optional[float] = None,
RangeBiasDecorr: Optional[ErrorDecorrFuncType] = None,
**kwargs):
"""
Parameters
----------
RangeBias : float
ClockFreqSF : None|float
TransmitFreqSF : None|float
RangeBiasDecorr : None|ErrorDecorrFuncType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.RangeBias, self.ClockFreqSF, self.TransmitFreqSF = RangeBias, ClockFreqSF, TransmitFreqSF
self.RangeBiasDecorr = RangeBiasDecorr
super(RadarSensorErrorType, self).__init__(**kwargs)
class TropoErrorType(Serializable):
"""Troposphere delay error statistics."""
_fields = ('TropoRangeVertical', 'TropoRangeSlant', 'TropoRangeDecorr')
_required = ()
_numeric_format = {'TropoRangeVertical': FLOAT_FORMAT, 'TropoRangeSlant': FLOAT_FORMAT}
# descriptors
TropoRangeVertical = FloatDescriptor(
'TropoRangeVertical', _required, strict=DEFAULT_STRICT,
docstring='Troposphere two-way delay error for normal incidence standard deviation. '
r'Expressed as a range error. :math:`(\Delta R) = (\Delta T) \cdot (c/2)`.') # type: float
TropoRangeSlant = FloatDescriptor(
'TropoRangeSlant', _required, strict=DEFAULT_STRICT,
docstring='Troposphere two-way delay error for the *SCP* line of sight at *COA* standard deviation. '
r'Expressed as a range error. :math:`(\Delta R) = (\Delta T) \cdot (c/2)`.') # type: float
TropoRangeDecorr = SerializableDescriptor(
'TropoRangeDecorr', ErrorDecorrFuncType, _required, strict=DEFAULT_STRICT,
docstring='Troposphere range error decorrelation function.') # type: ErrorDecorrFuncType
def __init__(
self,
TropoRangeVertical: Optional[float] = None,
TropoRangeSlant: Optional[float] = None,
TropoRangeDecorr: Optional[ErrorDecorrFuncType] = None,
**kwargs):
"""
Parameters
----------
TropoRangeVertical : None|float
TropoRangeSlant : None|float
TropoRangeDecorr : None|ErrorDecorrFuncType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TropoRangeVertical = TropoRangeVertical
self.TropoRangeSlant = TropoRangeSlant
self.TropoRangeDecorr = TropoRangeDecorr
super(TropoErrorType, self).__init__(**kwargs)
class IonoErrorType(Serializable):
"""Ionosphere delay error statistics."""
_fields = ('IonoRangeVertical', 'IonoRangeSlant', 'IonoRgRgRateCC', 'IonoRangeDecorr')
_required = ('IonoRgRgRateCC', )
_numeric_format = {
'IonoRangeVertical': FLOAT_FORMAT, 'IonoRangeSlant': FLOAT_FORMAT, 'IonoRgRgRateCC': FLOAT_FORMAT}
# descriptors
IonoRangeVertical = FloatDescriptor(
'IonoRangeVertical', _required, strict=DEFAULT_STRICT,
docstring='Ionosphere two-way delay error for normal incidence standard deviation. '
r'Expressed as a range error. '
r':math:`(\Delta R) = (\Delta T) \cdot (c/2)`.') # type: Optional[float]
IonoRangeSlant = FloatDescriptor(
'IonoRangeSlant', _required, strict=DEFAULT_STRICT,
docstring='Ionosphere two-way delay rate of change error for normal '
'incidence standard deviation. Expressed as a range rate error. '
r':math:`(\Delta \dot{R}) = (\Delta \dot{T}) \cdot (c/2)`.') # type: Optional[float]
IonoRgRgRateCC = FloatDescriptor(
'IonoRgRgRateCC', _required, strict=DEFAULT_STRICT,
docstring='Ionosphere range error and range rate error correlation coefficient.') # type: float
IonoRangeDecorr = SerializableDescriptor(
'IonoRangeDecorr', ErrorDecorrFuncType, _required, strict=DEFAULT_STRICT,
docstring='Ionosphere range error decorrelation rate.') # type: Optional[ErrorDecorrFuncType]
def __init__(
self,
IonoRangeVertical: Optional[float] = None,
IonoRangeSlant: Optional[float] = None,
IonoRgRgRateCC: float = None,
IonoRangeDecorr: Optional[ErrorDecorrFuncType] = None,
**kwargs):
"""
Parameters
----------
IonoRangeVertical : float
IonoRangeSlant : float
IonoRgRgRateCC : float
IonoRangeDecorr : ErrorDecorrFuncType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.IonoRangeVertical = IonoRangeVertical
self.IonoRangeSlant = IonoRangeSlant
self.IonoRgRgRateCC = IonoRgRgRateCC
self.IonoRangeDecorr = IonoRangeDecorr
super(IonoErrorType, self).__init__(**kwargs)
class ErrorComponentsType(Serializable):
"""Error statistics by components."""
_fields = ('PosVelErr', 'RadarSensor', 'TropoError', 'IonoError')
_required = ('PosVelErr', 'RadarSensor')
# descriptors
PosVelErr = SerializableDescriptor(
'PosVelErr', PosVelErrType, _required, strict=DEFAULT_STRICT,
docstring='Position and velocity error statistics for the radar platform.') # type: PosVelErrType
RadarSensor = SerializableDescriptor(
'RadarSensor', RadarSensorErrorType, _required, strict=DEFAULT_STRICT,
docstring='Radar sensor error statistics.') # type: RadarSensorErrorType
TropoError = SerializableDescriptor(
'TropoError', TropoErrorType, _required, strict=DEFAULT_STRICT,
docstring='Troposphere delay error statistics.') # type: TropoErrorType
IonoError = SerializableDescriptor(
'IonoError', IonoErrorType, _required, strict=DEFAULT_STRICT,
docstring='Ionosphere delay error statistics.') # type: IonoErrorType
def __init__(
self,
PosVelErr: PosVelErrType = None,
RadarSensor: RadarSensorErrorType = None,
TropoError: Optional[TropoErrorType] = None,
IonoError: Optional[IonoErrorType] = None,
**kwargs):
"""
Parameters
----------
PosVelErr : PosVelErrType
RadarSensor : RadarSensorErrorType
TropoError : TropoErrorType
IonoError : IonoErrorType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.PosVelErr = PosVelErr
self.RadarSensor = RadarSensor
self.TropoError = TropoError
self.IonoError = IonoError
super(ErrorComponentsType, self).__init__(**kwargs)
class UnmodeledDecorrType(Serializable):
"""
Unmodeled decorrelation function definition
"""
_fields = ('Xrow', 'Ycol')
_required = _fields
# descriptors
Xrow = SerializableDescriptor(
'Xrow', ErrorDecorrFuncType, _required, strict=DEFAULT_STRICT) # type: ErrorDecorrFuncType
Ycol = SerializableDescriptor(
'Ycol', ErrorDecorrFuncType, _required, strict=DEFAULT_STRICT) # type: ErrorDecorrFuncType
def __init__(
self,
Xrow: ErrorDecorrFuncType = None,
Ycol: ErrorDecorrFuncType = None,
**kwargs):
"""
Parameters
----------
Xrow : ErrorDecorrFuncType
Ycol : ErrorDecorrFuncType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Xrow = Xrow
self.Ycol = Ycol
super(UnmodeledDecorrType, self).__init__(**kwargs)
class UnmodeledType(Serializable):
_fields = ('Xrow', 'Ycol', 'XrowYcol', 'UnmodeledDecorr')
_required = ('Xrow', 'Ycol', 'XrowYcol')
_numeric_format = {fld: '0.17G' for fld in ('Xrow', 'Ycol', 'XrowYcol')}
Xrow = FloatDescriptor(
'Xrow', _required, strict=DEFAULT_STRICT) # type: float
Ycol = FloatDescriptor(
'Ycol', _required, strict=DEFAULT_STRICT) # type: float
XrowYcol = FloatDescriptor(
'XrowYcol', _required, strict=DEFAULT_STRICT) # type: float
UnmodeledDecorr = SerializableDescriptor(
'UnmodeledDecorr', UnmodeledDecorrType, _required,
strict=DEFAULT_STRICT) # type: Optional[UnmodeledDecorrType]
def __init__(
self,
Xrow: float = None,
Ycol: float = None,
XrowYcol: float = None,
UnmodeledDecorr: Optional[UnmodeledDecorrType] = None,
**kwargs):
"""
Parameters
----------
Xrow : float
Ycol : float
XrowYcol : float
UnmodeledDecorr : None|UnmodeledDecorrType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Xrow = Xrow
self.Ycol = Ycol
self.XrowYcol = XrowYcol
self.UnmodeledDecorr = UnmodeledDecorr
super(UnmodeledType, self).__init__(**kwargs)
class ErrorStatisticsType(Serializable):
"""Parameters used to compute error statistics within the SICD sensor model."""
_fields = ('CompositeSCP', 'Components', 'Unmodeled', 'AdditionalParms')
_required = ()
_collections_tags = {'AdditionalParms': {'array': False, 'child_tag': 'Parameter'}}
# descriptors
CompositeSCP = SerializableDescriptor(
'CompositeSCP', CompositeSCPErrorType, _required, strict=DEFAULT_STRICT,
docstring='Composite error statistics for the scene center point. '
'*Slant plane range (Rg)* and *azimuth (Az)* error statistics. '
'Slant plane defined at '
'*Scene Center Point, Center of Azimuth (SCP COA)*.') # type: Optional[CompositeSCPErrorType]
Components = SerializableDescriptor(
'Components', ErrorComponentsType, _required, strict=DEFAULT_STRICT,
docstring='Error statistics by components.') # type: Optional[ErrorComponentsType]
Unmodeled = SerializableDescriptor(
'Unmodeled', UnmodeledType, _required, strict=DEFAULT_STRICT) # type: Optional[UnmodeledType]
AdditionalParms = ParametersDescriptor(
'AdditionalParms', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Any additional parameters.') # type: Optional[ParametersCollection]
def __init__(
self,
CompositeSCP: Optional[CompositeSCPErrorType] = None,
Components: Optional[ErrorComponentsType] = None,
Unmodeled: Optional[UnmodeledType] = None,
AdditionalParms: Union[None, ParametersCollection, Dict] = None,
**kwargs):
"""
Parameters
----------
CompositeSCP : None|CompositeSCPErrorType
Components : None|ErrorComponentsType
Unmodeled : None|UnmodeledType
AdditionalParms : None|ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.CompositeSCP = CompositeSCP
self.Components = Components
self.Unmodeled = Unmodeled
self.AdditionalParms = AdditionalParms
super(ErrorStatisticsType, self).__init__(**kwargs)
def version_required(self):
"""
What SICD version is required?
Returns
-------
Tuple[int, int, int]
"""
return (1, 1, 0) if self.Unmodeled is None else (1, 3, 0)
| 22,407 | 39.085868 | 117 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/ImageCreation.py | """
The ImageCreation elements.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Optional, Union
import numpy
from datetime import datetime, date
from sarpy.io.xml.base import Serializable
from sarpy.io.xml.descriptors import StringDescriptor, DateTimeDescriptor
from .base import DEFAULT_STRICT
class ImageCreationType(Serializable):
"""
General information about the image creation.
"""
_fields = ('Application', 'DateTime', 'Site', 'Profile')
_required = ()
# descriptors
Application = StringDescriptor(
'Application', _required, strict=DEFAULT_STRICT,
docstring='Name and version of the application used to create the image.') # type: str
DateTime = DateTimeDescriptor(
'DateTime', _required, strict=DEFAULT_STRICT, numpy_datetime_units='us',
docstring='Date and time the image creation application processed the image (UTC).') # type: numpy.datetime64
Site = StringDescriptor(
'Site', _required, strict=DEFAULT_STRICT,
docstring='The creation site of this SICD product.') # type: str
Profile = StringDescriptor(
'Profile', _required, strict=DEFAULT_STRICT,
docstring='Identifies what profile was used to create this SICD product.') # type: str
def __init__(
self,
Application: Optional[str] = None,
DateTime: Union[None, numpy.datetime64, datetime, date, str] = None,
Site: Optional[str] = None,
Profile: Optional[str] = None,
**kwargs):
"""
Parameters
----------
Application : str
DateTime : numpy.datetime64|datetime|date|str
Site : str
Profile : str
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Application = Application
self.DateTime = DateTime
self.Site = Site
self.Profile = Profile
super(ImageCreationType, self).__init__(**kwargs)
| 2,133 | 29.927536 | 118 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/GeoData.py | """
The GeoData definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from collections import OrderedDict
from xml.etree import ElementTree
from typing import List, Union, Dict, Sequence, Optional
import numpy
from sarpy.io.xml.base import Serializable, SerializableArray, ParametersCollection, \
find_children, parse_serializable
from sarpy.io.xml.descriptors import StringDescriptor, StringEnumDescriptor, \
SerializableDescriptor, ParametersDescriptor, SerializableArrayDescriptor
from .base import DEFAULT_STRICT, SerializableCPArrayDescriptor, SerializableCPArray
from .blocks import XYZType, LatLonRestrictionType, LatLonHAERestrictionType, \
LatLonCornerStringType, LatLonArrayElementType
from sarpy.geometry.geocoords import geodetic_to_ecf, ecf_to_geodetic
class GeoInfoType(Serializable):
"""
A geographic feature.
"""
_fields = ('name', 'Descriptions', 'Point', 'Line', 'Polygon')
_required = ('name', )
_set_as_attribute = ('name', )
_choice = ({'required': False, 'collection': ('Point', 'Line', 'Polygon')}, )
_collections_tags = {
'Descriptions': {'array': False, 'child_tag': 'Desc'},
'Line': {'array': True, 'child_tag': 'Endpoint'},
'Polygon': {'array': True, 'child_tag': 'Vertex'}, }
# descriptors
name = StringDescriptor(
'name', _required, strict=DEFAULT_STRICT,
docstring='The name.') # type: str
Descriptions = ParametersDescriptor(
'Descriptions', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='Descriptions of the geographic feature.') # type: ParametersCollection
Point = SerializableDescriptor(
'Point', LatLonRestrictionType, _required, strict=DEFAULT_STRICT,
docstring='A geographic point with WGS-84 coordinates.') # type: LatLonRestrictionType
Line = SerializableArrayDescriptor(
'Line', LatLonArrayElementType, _collections_tags, _required, strict=DEFAULT_STRICT, minimum_length=2,
docstring='A geographic line (array) with WGS-84 coordinates.'
) # type: Union[SerializableArray, List[LatLonArrayElementType]]
Polygon = SerializableArrayDescriptor(
'Polygon', LatLonArrayElementType, _collections_tags, _required, strict=DEFAULT_STRICT, minimum_length=3,
docstring='A geographic polygon (array) with WGS-84 coordinates.'
) # type: Union[SerializableArray, List[LatLonArrayElementType]]
def __init__(
self,
name: str = None,
Descriptions: Union[None, ParametersCollection, Dict] = None,
Point=None,
Line=None,
Polygon=None,
GeoInfos=None,
**kwargs):
"""
Parameters
----------
name : str
Descriptions : None|ParametersCollection|dict
Point : None|LatLonRestrictionType|numpy.ndarray|list|tuple
Line : None|SerializableArray|List[LatLonArrayElementType]|numpy.ndarray|list|tuple
Polygon : None|SerializableArray|List[LatLonArrayElementType]|numpy.ndarray|list|tuple
GeoInfos : None|Sequence[GeoInfoTpe]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.name = name
self.Descriptions = Descriptions
self.Point = Point
self.Line = Line
self.Polygon = Polygon
self._GeoInfos = []
if GeoInfos is None:
pass
elif isinstance(GeoInfos, GeoInfoType):
self.addGeoInfo(GeoInfos)
elif isinstance(GeoInfos, (list, tuple)):
for el in GeoInfos:
self.addGeoInfo(el)
else:
raise ValueError('GeoInfos got unexpected type {}'.format(type(GeoInfos)))
super(GeoInfoType, self).__init__(**kwargs)
@property
def FeatureType(self) -> Optional[str]:
"""
str: READ ONLY attribute. Identifies the feature type among. This is determined by
returning the (first) attribute among `Point`, `Line`, `Polygon` which is populated.
`None` will be returned if none of them are populated.
"""
for attribute in self._choice[0]['collection']:
if getattr(self, attribute) is not None:
return attribute
return None
@property
def GeoInfos(self):
"""
List[GeoInfoType]: list of GeoInfos.
"""
return self._GeoInfos
def getGeoInfo(self, key: str):
"""
Get GeoInfo(s) with name attribute == `key`.
Parameters
----------
key : str
Returns
-------
List[GeoInfoType]
"""
return [entry for entry in self._GeoInfos if entry.name == key]
def addGeoInfo(self, value):
"""
Add the given GeoInfo to the GeoInfos list.
Parameters
----------
value : GeoInfoType|Dict
Returns
-------
None
"""
if isinstance(value, ElementTree.Element):
gi_key = self._child_xml_ns_key.get('GeoInfos', self._xml_ns_key)
value = GeoInfoType.from_node(value, self._xml_ns, ns_key=gi_key)
elif isinstance(value, dict):
value = GeoInfoType.from_dict(value)
if isinstance(value, GeoInfoType):
self._GeoInfos.append(value)
else:
raise TypeError('Trying to set GeoInfo element with unexpected type {}'.format(type(value)))
def _validate_features(self) -> bool:
if self.Line is not None and self.Line.size < 2:
self.log_validity_error('GeoInfo has a Line feature with {} points defined.'.format(self.Line.size))
return False
if self.Polygon is not None and self.Polygon.size < 3:
self.log_validity_error('GeoInfo has a Polygon feature with {} points defined.'.format(self.Polygon.size))
return False
return True
def _basic_validity_check(self) -> bool:
condition = super(GeoInfoType, self)._basic_validity_check()
return condition & self._validate_features()
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
if kwargs is None:
kwargs = OrderedDict()
gi_key = cls._child_xml_ns_key.get('GeoInfos', ns_key)
kwargs['GeoInfos'] = find_children(node, 'GeoInfo', xml_ns, gi_key)
return super(GeoInfoType, cls).from_node(node, xml_ns, ns_key=ns_key, kwargs=kwargs)
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
node = super(GeoInfoType, self).to_node(
doc, tag, ns_key=ns_key, parent=parent, check_validity=check_validity, strict=strict, exclude=exclude)
# slap on the GeoInfo children
for entry in self._GeoInfos:
entry.to_node(doc, tag, ns_key=ns_key, parent=node, strict=strict)
return node
def to_dict(self, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
out = super(GeoInfoType, self).to_dict(check_validity=check_validity, strict=strict, exclude=exclude)
# slap on the GeoInfo children
if len(self.GeoInfos) > 0:
out['GeoInfos'] = [entry.to_dict(check_validity=check_validity, strict=strict) for entry in self._GeoInfos]
return out
class SCPType(Serializable):
"""
Scene Center Point (SCP) in full (global) image. This should be the the precise location.
Note that setting one of ECF or LLH will implicitly set the other to it's corresponding matched value.
"""
_fields = ('ECF', 'LLH')
_required = _fields
_ECF = None
_LLH = None
def __init__(
self,
ECF: Union[None, XYZType, numpy.ndarray, tuple, list] = None,
LLH: Union[None, LatLonHAERestrictionType, numpy.ndarray, tuple, list] = None,
**kwargs):
"""
To avoid the potential of inconsistent state, ECF and LLH are not simultaneously
used. If ECF is provided, it is used to populate LLH. Otherwise, if LLH is provided,
then it is used the populate ECF.
Parameters
----------
ECF : XYZType|numpy.ndarray|list|tuple
LLH : LatLonHAERestrictionType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
if ECF is not None:
self.ECF = ECF
elif LLH is not None:
self.LLH = LLH
super(SCPType, self).__init__(**kwargs)
@property
def ECF(self) -> XYZType:
"""
XYZType: The ECF coordinates.
"""
return self._ECF
@ECF.setter
def ECF(self, value):
if value is not None:
self._ECF = parse_serializable(value, 'ECF', self, XYZType)
self._LLH = LatLonHAERestrictionType.from_array(ecf_to_geodetic(self._ECF.get_array()))
@property
def LLH(self) -> LatLonHAERestrictionType:
"""
LatLonHAERestrictionType: The WGS-84 coordinates.
"""
return self._LLH
@LLH.setter
def LLH(self, value):
if value is not None:
self._LLH = parse_serializable(value, 'LLH', self, LatLonHAERestrictionType)
self._ECF = XYZType.from_array(geodetic_to_ecf(self._LLH.get_array(order='LAT')))
def get_image_center_abbreviation(self) -> str:
"""
Gets the center coordinate abbreviation for the suggested name.
Returns
-------
str
"""
llh = self.LLH
lat = int(numpy.round(llh.Lat))
lon = int(numpy.round(llh.Lon))
return '{0:02d}'.format(abs(lat)) + ('N' if lat >= 0 else 'S') + \
'{0:03d}'.format(abs(lon)) + ('E' if lon >= 0 else 'W')
class GeoDataType(Serializable):
"""Container specifying the image coverage area in geographic coordinates."""
_fields = ('EarthModel', 'SCP', 'ImageCorners', 'ValidData')
_required = ('EarthModel', 'SCP', 'ImageCorners')
_collections_tags = {
'ValidData': {'array': True, 'child_tag': 'Vertex'},
'ImageCorners': {'array': True, 'child_tag': 'ICP'},
}
# other class variables
_EARTH_MODEL_VALUES = ('WGS_84', )
# descriptors
EarthModel = StringEnumDescriptor(
'EarthModel', _EARTH_MODEL_VALUES, _required, strict=True, default_value='WGS_84',
docstring='The Earth Model.') # type: str
SCP = SerializableDescriptor(
'SCP', SCPType, _required, strict=DEFAULT_STRICT,
docstring='The Scene Center Point *(SCP)* in full (global) image. This is the '
'precise location.') # type: SCPType
ImageCorners = SerializableCPArrayDescriptor(
'ImageCorners', LatLonCornerStringType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='The geographic image corner points array. Image corners points projected to the '
'ground/surface level. Points may be projected to the same height as the SCP if ground/surface '
'height data is not available. The corner positions are approximate geographic locations and '
'not intended for analytical use.') # type: Union[SerializableCPArray, List[LatLonCornerStringType]]
ValidData = SerializableArrayDescriptor(
'ValidData', LatLonArrayElementType, _collections_tags, _required,
strict=DEFAULT_STRICT, minimum_length=3,
docstring='The full image array includes both valid data and some zero filled pixels.'
) # type: Union[SerializableArray, List[LatLonArrayElementType]]
def __init__(
self,
EarthModel: str = 'WGS_84',
SCP: SCPType = None,
ImageCorners=None,
ValidData=None,
GeoInfos: List[GeoInfoType] = None,
**kwargs):
"""
Parameters
----------
EarthModel : str
SCP : SCPType
ImageCorners : SerializableCPArray|List[LatLonCornerStringType]|numpy.ndarray|list|tuple
ValidData : SerializableArray|List[LatLonArrayElementType]|numpy.ndarray|list|tuple
GeoInfos : List[GeoInfoType]
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.EarthModel = EarthModel
self.SCP = SCP
self.ImageCorners = ImageCorners
self.ValidData = ValidData
self._GeoInfos = []
if GeoInfos is None:
pass
elif isinstance(GeoInfos, GeoInfoType):
self.setGeoInfo(GeoInfos)
elif isinstance(GeoInfos, (list, tuple)):
for el in GeoInfos:
self.setGeoInfo(el)
else:
raise ValueError('GeoInfos got unexpected type {}'.format(type(GeoInfos)))
super(GeoDataType, self).__init__(**kwargs)
def derive(self):
"""
Populates any potential derived data in GeoData. Is expected to be called by
the `SICD` parent as part of a more extensive derived data effort.
Returns
-------
None
"""
pass
@property
def GeoInfos(self) -> List[GeoInfoType]:
"""
List[GeoInfoType]: list of GeoInfos.
"""
return self._GeoInfos
def getGeoInfo(self, key: str) -> List[GeoInfoType]:
"""
Get the GeoInfo(s) with name attribute == `key`
Parameters
----------
key : str
Returns
-------
List[GeoInfoType]
"""
return [entry for entry in self._GeoInfos if entry.name == key]
def setGeoInfo(self, value: [GeoInfoType, Dict]):
"""
Add the given GeoInfo to the GeoInfos list.
Parameters
----------
value : GeoInfoType
Returns
-------
None
"""
if isinstance(value, ElementTree.Element):
gi_key = self._child_xml_ns_key.get('GeoInfos', self._xml_ns_key)
value = GeoInfoType.from_node(value, self._xml_ns, ns_key=gi_key)
elif isinstance(value, dict):
value = GeoInfoType.from_dict(value)
if isinstance(value, GeoInfoType):
self._GeoInfos.append(value)
else:
raise TypeError('Trying to set GeoInfo element with unexpected type {}'.format(type(value)))
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
if kwargs is None:
kwargs = OrderedDict()
gi_key = cls._child_xml_ns_key.get('GeoInfos', ns_key)
kwargs['GeoInfos'] = find_children(node, 'GeoInfo', xml_ns, gi_key)
return super(GeoDataType, cls).from_node(node, xml_ns, ns_key=ns_key, kwargs=kwargs)
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
node = super(GeoDataType, self).to_node(
doc, tag, ns_key=ns_key, parent=parent, check_validity=check_validity, strict=strict, exclude=exclude)
# slap on the GeoInfo children
for entry in self._GeoInfos:
entry.to_node(doc, 'GeoInfo', ns_key=ns_key, parent=node, strict=strict)
return node
def to_dict(self, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
out = super(GeoDataType, self).to_dict(check_validity=check_validity, strict=strict, exclude=exclude)
# slap on the GeoInfo children
if len(self.GeoInfos) > 0:
out['GeoInfos'] = [entry.to_dict(check_validity=check_validity, strict=strict) for entry in self._GeoInfos]
return out
def _basic_validity_check(self) -> bool:
condition = super(GeoDataType, self)._basic_validity_check()
return condition
| 16,031 | 35.60274 | 119 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/ImageData.py | """
The ImageData definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import List, Union, Optional
import numpy
from sarpy.io.xml.base import Serializable, Arrayable, SerializableArray
from sarpy.io.xml.descriptors import IntegerDescriptor, FloatArrayDescriptor, \
StringEnumDescriptor, SerializableDescriptor, SerializableArrayDescriptor
from sarpy.geometry.geometry_elements import LinearRing
from .base import DEFAULT_STRICT, FLOAT_FORMAT
from .blocks import RowColType, RowColArrayElement
class FullImageType(Serializable, Arrayable):
"""
The full image product attributes.
"""
_fields = ('NumRows', 'NumCols')
_required = _fields
# descriptors
NumRows = IntegerDescriptor(
'NumRows', _required, strict=DEFAULT_STRICT,
docstring='Number of rows in the original full image product. May include zero pixels.') # type: int
NumCols = IntegerDescriptor(
'NumCols', _required, strict=DEFAULT_STRICT,
docstring='Number of columns in the original full image product. May include zero pixels.') # type: int
def __init__(
self,
NumRows: int = None,
NumCols: int = None,
**kwargs):
"""
Parameters
----------
NumRows : int
NumCols : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.NumRows, self.NumCols = NumRows, NumCols
super(FullImageType, self).__init__(**kwargs)
def get_array(self, dtype=numpy.int64) -> numpy.ndarray:
"""Gets an array representation of the class instance.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return
Returns
-------
numpy.ndarray
array of the form `[X,Y,Z]`
"""
return numpy.array([self.NumRows, self.NumCols], dtype=dtype)
@classmethod
def from_array(cls, array: Union[numpy.ndarray, list, tuple]):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed `[NumRows, NumCols]`
Returns
-------
FullImageType
"""
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError('Expected array to be of length 2, and received {}'.format(len(array)))
return cls(NumRows=array[0], NumCols=array[1])
raise ValueError('Expected array to be numpy.ndarray, list, or tuple, got {}'.format(type(array)))
class ImageDataType(Serializable):
"""The image pixel data."""
_collections_tags = {
'AmpTable': {'array': True, 'child_tag': 'Amplitude'},
'ValidData': {'array': True, 'child_tag': 'Vertex'},
}
_fields = (
'PixelType', 'AmpTable', 'NumRows', 'NumCols', 'FirstRow', 'FirstCol', 'FullImage', 'SCPPixel', 'ValidData')
_required = ('PixelType', 'NumRows', 'NumCols', 'FirstRow', 'FirstCol', 'FullImage', 'SCPPixel')
_numeric_format = {'AmpTable': FLOAT_FORMAT}
_PIXEL_TYPE_VALUES = ("RE32F_IM32F", "RE16I_IM16I", "AMP8I_PHS8I")
# descriptors
PixelType = StringEnumDescriptor(
'PixelType', _PIXEL_TYPE_VALUES, _required, strict=True,
docstring="The PixelType attribute which specifies the interpretation of the file data.") # type: str
AmpTable = FloatArrayDescriptor(
'AmpTable', _collections_tags, _required, strict=DEFAULT_STRICT,
minimum_length=256, maximum_length=256,
docstring="The amplitude look-up table. This is required if "
"`PixelType == 'AMP8I_PHS8I'`") # type: numpy.ndarray
NumRows = IntegerDescriptor(
'NumRows', _required, strict=True,
docstring='The number of Rows in the product. May include zero rows.') # type: int
NumCols = IntegerDescriptor(
'NumCols', _required, strict=True,
docstring='The number of Columns in the product. May include zero rows.') # type: int
FirstRow = IntegerDescriptor(
'FirstRow', _required, strict=DEFAULT_STRICT,
docstring='Global row index of the first row in the product. '
'Equal to 0 in full image product.') # type: int
FirstCol = IntegerDescriptor(
'FirstCol', _required, strict=DEFAULT_STRICT,
docstring='Global column index of the first column in the product. '
'Equal to 0 in full image product.') # type: int
FullImage = SerializableDescriptor(
'FullImage', FullImageType, _required, strict=DEFAULT_STRICT,
docstring='Original full image product.') # type: FullImageType
SCPPixel = SerializableDescriptor(
'SCPPixel', RowColType, _required, strict=DEFAULT_STRICT,
docstring='Scene Center Point pixel global row and column index. Should be located near the '
'center of the full image.') # type: RowColType
ValidData = SerializableArrayDescriptor(
'ValidData', RowColArrayElement, _collections_tags, _required, strict=DEFAULT_STRICT, minimum_length=3,
docstring='Indicates the full image includes both valid data and some zero filled pixels. '
'Simple polygon encloses the valid data (may include some zero filled pixels for simplification). '
'Vertices in clockwise order.') # type: Union[SerializableArray, List[RowColArrayElement]]
def __init__(
self,
PixelType: str = None,
AmpTable: Optional[numpy.ndarray] = None,
NumRows: int = None,
NumCols: int = None,
FirstRow: int = None,
FirstCol: int = None,
FullImage: Union[FullImageType, numpy.ndarray, list, tuple] = None,
SCPPixel: Union[RowColType, numpy.ndarray, list, tuple] = None,
ValidData=None,
**kwargs):
"""
Parameters
----------
PixelType : str
AmpTable : numpy.ndarray|list|tuple
NumRows : int
NumCols : int
FirstRow : int
FirstCol : int
FullImage : FullImageType|numpy.ndarray|list|tuple
SCPPixel : RowColType|numpy.ndarray|list|tuple
ValidData : SerializableArray|List[RowColArrayElement]|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.PixelType = PixelType
self.AmpTable = AmpTable
self.NumRows, self.NumCols = NumRows, NumCols
self.FirstRow, self.FirstCol = FirstRow, FirstCol
self.FullImage = FullImage
self.SCPPixel = SCPPixel
self.ValidData = ValidData
super(ImageDataType, self).__init__(**kwargs)
def _check_valid_data(self) -> bool:
if self.ValidData is None:
return True
if len(self.ValidData) < 2:
return True
value = True
valid_data = self.ValidData.get_array(dtype='float64')
lin_ring = LinearRing(coordinates=valid_data)
area = lin_ring.get_area()
if area == 0:
self.log_validity_error('ValidData encloses no area.')
value = False
elif area > 0:
self.log_validity_error(
"ValidData must be traversed in clockwise direction.")
value = False
for i, entry in enumerate(valid_data):
if not ((self.FirstRow <= entry[0] <= self.FirstRow + self.NumRows) and
(self.FirstCol <= entry[1] <= self.FirstCol + self.NumCols)):
self.log_validity_warning(
'ValidData entry {} is not contained in the image bounds'.format(i))
value = False
return value
def _basic_validity_check(self) -> bool:
condition = super(ImageDataType, self)._basic_validity_check()
if (self.PixelType == 'AMP8I_PHS8I') and (self.AmpTable is None):
self.log_validity_error(
"We have `PixelType='AMP8I_PHS8I'` and `AmpTable` is not defined for ImageDataType.")
condition = False
if (self.PixelType != 'AMP8I_PHS8I') and (self.AmpTable is not None):
self.log_validity_error(
"We have `PixelType != 'AMP8I_PHS8I'` and `AmpTable` is defined for ImageDataType.")
condition = False
if (self.ValidData is not None) and (len(self.ValidData) < 3):
self.log_validity_error(
"We have `ValidData` defined with fewer than 3 entries.")
condition = False
condition &= self._check_valid_data()
return condition
def get_valid_vertex_data(self, dtype=numpy.int64) -> Optional[numpy.ndarray]:
"""
Gets an array of `[row, col]` indices defining the valid data. If this is not viable, then `None`
will be returned.
Parameters
----------
dtype : object
the data type for the array
Returns
-------
numpy.ndarray|None
"""
if self.ValidData is None:
return None
out = numpy.zeros((self.ValidData.size, 2), dtype=dtype)
for i, entry in enumerate(self.ValidData):
out[i, :] = entry.get_array(dtype=dtype)
return out
def get_full_vertex_data(self, dtype=numpy.int64) -> Optional[numpy.ndarray]:
"""
Gets an array of `[row, col]` indices defining the full vertex data. If this is not viable, then `None`
will be returned.
Parameters
----------
dtype : object
the data type for the array
Returns
-------
numpy.ndarray|None
"""
if self.NumRows is None or self.NumCols is None:
return None
return numpy.array(
[[0, 0], [0, self.NumCols - 1], [self.NumRows - 1, self.NumCols - 1], [self.NumRows - 1, 0]], dtype=dtype)
def get_pixel_size(self) -> int:
"""
Gets the size per pixel, in bytes.
Returns
-------
int
"""
if self.PixelType == "RE32F_IM32F":
return 8
elif self.PixelType == "RE16I_IM16I":
return 4
elif self.PixelType == "AMP8I_PHS8I":
return 2
else:
raise ValueError('Got unhandled pixel type `{}`'.format(self.PixelType))
| 10,655 | 36.389474 | 118 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/RadarCollection.py | """
The RadarCollectionType definition.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import List, Union, Optional, Dict, Tuple
import logging
import numpy
from sarpy.io.xml.base import Serializable, Arrayable, SerializableArray, \
ParametersCollection, parse_float
from sarpy.io.xml.descriptors import StringDescriptor, StringEnumDescriptor, \
FloatDescriptor, IntegerDescriptor, SerializableDescriptor, \
SerializableArrayDescriptor, UnitVectorDescriptor, ParametersDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT, \
SerializableCPArrayDescriptor, SerializableCPArray
from .blocks import XYZType, LatLonHAECornerRestrictionType, \
POLARIZATION1_VALUES, POLARIZATION2_VALUES, DUAL_POLARIZATION_VALUES
from .utils import polstring_version_required
import sarpy.geometry.geocoords as geocoords
logger = logging.getLogger(__name__)
def get_band_name(freq: float) -> str:
"""
Gets the band names associated with the given frequency (in Hz).
Parameters
----------
freq : float
The frequency in Hz.
Returns
-------
str
"""
if freq is None:
return 'UN'
elif 3e6 <= freq < 3e7:
return 'HF'
elif 3e7 <= freq < 3e8:
return 'VHF'
elif 3e8 <= freq < 1e9:
return 'UHF'
elif 1e9 <= freq < 2e9:
return 'L'
elif 2e9 <= freq < 4e9:
return 'S'
elif 4e9 <= freq < 8e9:
return 'C'
elif 8e9 <= freq < 1.2e10:
return 'X'
elif 1.2e10 <= freq < 1.8e10:
return 'KU'
elif 1.8e10 <= freq < 2.7e10:
return 'K'
elif 2.7e10 <= freq < 4e10:
return 'KA'
elif 4e10 <= freq < 7.5e10:
return 'V'
elif 7.5e10 <= freq < 1.1e11:
return 'W'
elif 1.1e11 <= freq < 3e11:
return 'MM'
else:
return 'UN'
class TxFrequencyType(Serializable, Arrayable):
"""
The transmit frequency range.
"""
_fields = ('Min', 'Max')
_required = _fields
_numeric_format = {'Min': FLOAT_FORMAT, 'Max': FLOAT_FORMAT}
# descriptors
Min = FloatDescriptor(
'Min', _required, strict=DEFAULT_STRICT,
docstring='The transmit minimum frequency in Hz.') # type: float
Max = FloatDescriptor(
'Max', _required, strict=DEFAULT_STRICT,
docstring='The transmit maximum frequency in Hz.') # type: float
def __init__(
self,
Min: float = None,
Max: float = None,
**kwargs):
"""
Parameters
----------
Min : float
Max : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Min, self.Max = Min, Max
super(TxFrequencyType, self).__init__(**kwargs)
@property
def center_frequency(self) -> Optional[float]:
"""
None|float: The center frequency
"""
if self.Min is None or self.Max is None:
return None
return 0.5*(self.Min + self.Max)
def _apply_reference_frequency(self, reference_frequency: float):
if self.Min is not None:
self.Min += reference_frequency
if self.Max is not None:
self.Max += reference_frequency
def _basic_validity_check(self) -> bool:
condition = super(TxFrequencyType, self)._basic_validity_check()
if self.Min is not None and self.Max is not None and self.Max < self.Min:
self.log_validity_error(
'Invalid frequency bounds Min ({}) > Max ({})'.format(self.Min, self.Max))
condition = False
return condition
def get_band_abbreviation(self) -> str:
"""
Gets the band abbreviation for the suggested name.
Returns
-------
str
"""
band_name = get_band_name(self.center_frequency)
return band_name + '_'*(3 - len(band_name))
def get_array(self, dtype=numpy.float64) -> numpy.ndarray:
"""
Gets an array representation of the data.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
data type of the return
Returns
-------
numpy.ndarray
data array with appropriate entry order
"""
return numpy.array([self.Min, self.Max], dtype=dtype)
@classmethod
def from_array(cls, array: Union[numpy.ndarray, list, tuple]):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed [Min, Max]
Returns
-------
LatLonType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError('Expected array to be of length 2, and received {}'.format(len(array)))
return cls(Min=array[0], Max=array[1])
raise ValueError('Expected array to be numpy.ndarray, list, or tuple, got {}'.format(type(array)))
class WaveformParametersType(Serializable):
"""
Transmit and receive demodulation waveform parameters.
"""
_fields = (
'TxPulseLength', 'TxRFBandwidth', 'TxFreqStart', 'TxFMRate', 'RcvDemodType', 'RcvWindowLength',
'ADCSampleRate', 'RcvIFBandwidth', 'RcvFreqStart', 'RcvFMRate', 'index')
_required = ()
_set_as_attribute = ('index', )
_numeric_format = {
'TxPulseLength': FLOAT_FORMAT, 'TxRFBandwidth': '0.17E', 'TxFreqStart': '0.17E',
'TxFMRate': '0.17E', 'RcvWindowLength': FLOAT_FORMAT, 'ADCSampleRate': '0.17E',
'RcvIFBandwidth': '0.17E', 'RcvFreqStart': '0.17E', 'RcvFMRate': '0.17E'}
# descriptors
TxPulseLength = FloatDescriptor(
'TxPulseLength', _required, strict=DEFAULT_STRICT,
docstring='Transmit pulse length in seconds.') # type: float
TxRFBandwidth = FloatDescriptor(
'TxRFBandwidth', _required, strict=DEFAULT_STRICT,
docstring='Transmit RF bandwidth of the transmit pulse in Hz.') # type: float
TxFreqStart = FloatDescriptor(
'TxFreqStart', _required, strict=DEFAULT_STRICT,
docstring='Transmit Start frequency for Linear FM waveform in Hz, may be relative '
'to reference frequency.') # type: float
TxFMRate = FloatDescriptor(
'TxFMRate', _required, strict=DEFAULT_STRICT,
docstring='Transmit FM rate for Linear FM waveform in Hz/second.') # type: float
RcvWindowLength = FloatDescriptor(
'RcvWindowLength', _required, strict=DEFAULT_STRICT,
docstring='Receive window duration in seconds.') # type: float
ADCSampleRate = FloatDescriptor(
'ADCSampleRate', _required, strict=DEFAULT_STRICT,
docstring='Analog-to-Digital Converter sampling rate in samples/second.') # type: float
RcvIFBandwidth = FloatDescriptor(
'RcvIFBandwidth', _required, strict=DEFAULT_STRICT,
docstring='Receive IF bandwidth in Hz.') # type: float
RcvFreqStart = FloatDescriptor(
'RcvFreqStart', _required, strict=DEFAULT_STRICT,
docstring='Receive demodulation start frequency in Hz, may be relative to reference frequency.') # type: float
index = IntegerDescriptor(
'index', _required, strict=False, docstring="The array index.") # type: int
def __init__(
self,
TxPulseLength: Optional[float] = None,
TxRFBandwidth: Optional[float] = None,
TxFreqStart: Optional[float] = None,
TxFMRate: Optional[float] = None,
RcvDemodType: Optional[str] = None,
RcvWindowLength: Optional[float] = None,
ADCSampleRate: Optional[float] = None,
RcvIFBandwidth: Optional[float] = None,
RcvFreqStart: Optional[float] = None,
RcvFMRate: Optional[float] = None,
index: int = None,
**kwargs):
"""
Parameters
----------
TxPulseLength : float
TxRFBandwidth : float
TxFreqStart : float
TxFMRate : float
RcvDemodType : str
RcvWindowLength : float
ADCSampleRate : float
RcvIFBandwidth : float
RcvFreqStart : float
RcvFMRate : float
index : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self._RcvFMRate = None
self.TxPulseLength, self.TxRFBandwidth = TxPulseLength, TxRFBandwidth
self.TxFreqStart, self.TxFMRate = TxFreqStart, TxFMRate
self.RcvWindowLength = RcvWindowLength
self.ADCSampleRate = ADCSampleRate
self.RcvIFBandwidth = RcvIFBandwidth
self.RcvFreqStart = RcvFreqStart
# NB: self.RcvDemodType is read only.
if RcvDemodType == 'CHIRP' and RcvFMRate is None:
self.RcvFMRate = 0.0
elif RcvDemodType == 'STRETCH' and RcvFMRate is not None:
self.RcvFMRate = RcvFMRate
else:
self.RcvFMRate = None
self.index = index
super(WaveformParametersType, self).__init__(**kwargs)
@property
def RcvDemodType(self) -> Optional[str]:
"""
str: READ ONLY. Receive demodulation used when Linear FM waveform is
used on transmit. This value is derived form the value of `RcvFMRate`.
* `None` - `RcvFMRate` is `None`.
* `'CHIRP'` - `RcvFMRate=0`.
* `'STRETCH'` - `RcvFMRate` is non-zero.
"""
if self._RcvFMRate is None:
return None
elif self._RcvFMRate == 0:
return 'CHIRP'
else:
return 'STRETCH'
@property
def RcvFMRate(self) -> Optional[float]:
"""
float: Receive FM rate in Hz/sec. Also, determines the value of `RcvDemodType`. **Optional.**
"""
return self._RcvFMRate
@RcvFMRate.setter
def RcvFMRate(self, value: Optional[float]):
if value is None:
self._RcvFMRate = None
else:
try:
self._RcvFMRate = parse_float(value, 'RcvFMRate', self)
except Exception as e:
logger.error(
'Failed parsing value {} for field RCVFMRate of type "float",\n\t'
'with error {} - {}.\n\t'
'The value has been set to None.'.format(value, type(e), e))
self._RcvFMRate = None
def _basic_validity_check(self) -> bool:
valid = super(WaveformParametersType, self)._basic_validity_check()
return valid
def derive(self):
"""
Populate derived data in `WaveformParametersType`.
Returns
-------
None
"""
if self.TxPulseLength is not None and self.TxFMRate is not None and self.TxRFBandwidth is None:
self.TxRFBandwidth = self.TxPulseLength*self.TxFMRate
if self.TxPulseLength is not None and self.TxRFBandwidth is not None and self.TxFMRate is None:
self.TxFMRate = self.TxRFBandwidth/self.TxPulseLength
if self.TxFMRate is not None and self.TxRFBandwidth is not None and self.TxPulseLength is None:
self.TxPulseLength = self.TxRFBandwidth/self.TxFMRate
def _apply_reference_frequency(self, reference_frequency: float):
if self.TxFreqStart is not None:
self.TxFreqStart += reference_frequency
if self.RcvFreqStart is not None:
self.RcvFreqStart += reference_frequency
class TxStepType(Serializable):
"""
Transmit sequence step details.
"""
_fields = ('WFIndex', 'TxPolarization', 'index')
_required = ('index', )
_set_as_attribute = ('index', )
# descriptors
WFIndex = IntegerDescriptor(
'WFIndex', _required, strict=DEFAULT_STRICT,
docstring='The waveform number for this step.') # type: int
TxPolarization = StringEnumDescriptor(
'TxPolarization', POLARIZATION2_VALUES, _required, strict=DEFAULT_STRICT,
docstring='Transmit signal polarization for this step.') # type: str
index = IntegerDescriptor(
'index', _required, strict=DEFAULT_STRICT,
docstring='The step index') # type: int
def __init__(
self,
WFIndex: Optional[int] = None,
TxPolarization: Optional[str] = None,
index: int = None,
**kwargs):
"""
Parameters
----------
WFIndex : int
TxPolarization : str
index : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.WFIndex = WFIndex
self.TxPolarization = TxPolarization
self.index = index
super(TxStepType, self).__init__(**kwargs)
class ChanParametersType(Serializable):
"""
Transmit receive sequence step details.
"""
_fields = ('TxRcvPolarization', 'RcvAPCIndex', 'index')
_required = ('TxRcvPolarization', 'index', )
_set_as_attribute = ('index', )
# descriptors
TxRcvPolarization = StringEnumDescriptor(
'TxRcvPolarization', DUAL_POLARIZATION_VALUES, _required, strict=DEFAULT_STRICT,
docstring='Combined Transmit and Receive signal polarization for the channel.') # type: str
RcvAPCIndex = IntegerDescriptor(
'RcvAPCIndex', _required, strict=DEFAULT_STRICT,
docstring='Index of the Receive Aperture Phase Center (Rcv APC). Only include if Receive APC position '
'polynomial(s) are included.') # type: int
index = IntegerDescriptor(
'index', _required, strict=DEFAULT_STRICT, docstring='The parameter index') # type: int
def __init__(
self,
TxRcvPolarization: Optional[str] = None,
RcvAPCIndex: Optional[int] = None,
index: int = None,
**kwargs):
"""
Parameters
----------
TxRcvPolarization : str
RcvAPCIndex : int
index : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TxRcvPolarization = TxRcvPolarization
self.RcvAPCIndex = RcvAPCIndex
self.index = index
super(ChanParametersType, self).__init__(**kwargs)
def get_transmit_polarization(self) -> Optional[str]:
if self.TxRcvPolarization is None:
return None
elif self.TxRcvPolarization in ['OTHER', 'UNKNOWN']:
return 'OTHER'
else:
return self.TxRcvPolarization.split(':')[0]
def version_required(self) -> Tuple[int, int, int]:
"""
What SICD version is required?
Returns
-------
tuple
"""
return polstring_version_required(self.TxRcvPolarization)
class ReferencePointType(Serializable):
"""The reference point definition"""
_fields = ('ECF', 'Line', 'Sample', 'name')
_required = ('ECF', 'Line', 'Sample')
_set_as_attribute = ('name', )
_numeric_format = {'Line': FLOAT_FORMAT, 'Sample': FLOAT_FORMAT}
# descriptors
ECF = SerializableDescriptor(
'ECF', XYZType, _required, strict=DEFAULT_STRICT,
docstring='The geographical coordinates for the reference point.') # type: XYZType
Line = FloatDescriptor(
'Line', _required, strict=DEFAULT_STRICT,
docstring='The reference point line index.') # type: float
Sample = FloatDescriptor(
'Sample', _required, strict=DEFAULT_STRICT,
docstring='The reference point sample index.') # type: float
name = StringDescriptor(
'name', _required, strict=DEFAULT_STRICT,
docstring='The reference point name.') # type: str
def __init__(
self,
ECF: Union[XYZType, numpy.ndarray, list, tuple] = None,
Line: float = None,
Sample: float = None,
name: Optional[str] = None,
**kwargs):
"""
Parameters
----------
ECF : XYZType|numpy.ndarray|list|tuple
Line : float
Sample : float
name : str
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.ECF = ECF
self.Line = Line
self.Sample = Sample
self.name = name
super(ReferencePointType, self).__init__(**kwargs)
class XDirectionType(Serializable):
"""The X direction of the collect"""
_fields = ('UVectECF', 'LineSpacing', 'NumLines', 'FirstLine')
_required = _fields
_numeric_format = {'LineSpacing': FLOAT_FORMAT, }
# descriptors
UVectECF = UnitVectorDescriptor(
'UVectECF', XYZType, _required, strict=DEFAULT_STRICT,
docstring='The unit vector in the X direction.') # type: XYZType
LineSpacing = FloatDescriptor(
'LineSpacing', _required, strict=DEFAULT_STRICT,
docstring='The collection line spacing in the X direction in meters.') # type: float
NumLines = IntegerDescriptor(
'NumLines', _required, strict=DEFAULT_STRICT,
docstring='The number of lines in the X direction.') # type: int
FirstLine = IntegerDescriptor(
'FirstLine', _required, strict=DEFAULT_STRICT,
docstring='The first line index.') # type: int
def __init__(
self,
UVectECF: Union[XYZType, numpy.ndarray, list, tuple] = None,
LineSpacing: int = None,
NumLines: int = None,
FirstLine: int = None,
**kwargs):
"""
Parameters
----------
UVectECF : XYZType|numpy.ndarray|list|tuple
LineSpacing : float
NumLines : int
FirstLine : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.UVectECF = UVectECF
self.LineSpacing = LineSpacing
self.NumLines = NumLines
self.FirstLine = FirstLine
super(XDirectionType, self).__init__(**kwargs)
class YDirectionType(Serializable):
"""The Y direction of the collect"""
_fields = ('UVectECF', 'SampleSpacing', 'NumSamples', 'FirstSample')
_required = _fields
_numeric_format = {'SampleSpacing': FLOAT_FORMAT, }
# descriptors
UVectECF = UnitVectorDescriptor(
'UVectECF', XYZType, _required, strict=DEFAULT_STRICT,
docstring='The unit vector in the Y direction.') # type: XYZType
SampleSpacing = FloatDescriptor(
'SampleSpacing', _required, strict=DEFAULT_STRICT,
docstring='The collection sample spacing in the Y direction in meters.') # type: float
NumSamples = IntegerDescriptor(
'NumSamples', _required, strict=DEFAULT_STRICT,
docstring='The number of samples in the Y direction.') # type: int
FirstSample = IntegerDescriptor(
'FirstSample', _required, strict=DEFAULT_STRICT,
docstring='The first sample index.') # type: int
def __init__(
self,
UVectECF: Union[XYZType, numpy.ndarray, list, tuple] = None,
SampleSpacing: float = None,
NumSamples: int = None,
FirstSample: int = None,
**kwargs):
"""
Parameters
----------
UVectECF : XYZType|numpy.ndarray|list|tuple
SampleSpacing : float
NumSamples : int
FirstSample : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.UVectECF = UVectECF
self.SampleSpacing = SampleSpacing
self.NumSamples = NumSamples
self.FirstSample = FirstSample
super(YDirectionType, self).__init__(**kwargs)
class SegmentArrayElement(Serializable):
"""The reference point definition"""
_fields = ('StartLine', 'StartSample', 'EndLine', 'EndSample', 'Identifier', 'index')
_required = _fields
_set_as_attribute = ('index', )
# descriptors
StartLine = IntegerDescriptor(
'StartLine', _required, strict=DEFAULT_STRICT,
docstring='The starting line number of the segment.') # type: int
StartSample = IntegerDescriptor(
'StartSample', _required, strict=DEFAULT_STRICT,
docstring='The starting sample number of the segment.') # type: int
EndLine = IntegerDescriptor(
'EndLine', _required, strict=DEFAULT_STRICT,
docstring='The ending line number of the segment.') # type: int
EndSample = IntegerDescriptor(
'EndSample', _required, strict=DEFAULT_STRICT,
docstring='The ending sample number of the segment.') # type: int
Identifier = StringDescriptor(
'Identifier', _required, strict=DEFAULT_STRICT,
docstring='Identifier for the segment data boundary.')
index = IntegerDescriptor(
'index', _required, strict=DEFAULT_STRICT,
docstring='The array index.') # type: int
def __init__(
self,
StartLine: int = None,
StartSample: int = None,
EndLine: int = None,
EndSample: int = None,
Identifier: str = None,
index: int = None,
**kwargs):
"""
Parameters
----------
StartLine : int
StartSample : int
EndLine : int
EndSample : int
Identifier : str
index : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.StartLine, self.EndLine = StartLine, EndLine
self.StartSample, self.EndSample = StartSample, EndSample
self.Identifier = Identifier
self.index = index
super(SegmentArrayElement, self).__init__(**kwargs)
class ReferencePlaneType(Serializable):
"""
The reference plane.
"""
_fields = ('RefPt', 'XDir', 'YDir', 'SegmentList', 'Orientation')
_required = ('RefPt', 'XDir', 'YDir')
_collections_tags = {'SegmentList': {'array': True, 'child_tag': 'Segment'}}
# other class variable
_ORIENTATION_VALUES = ('UP', 'DOWN', 'LEFT', 'RIGHT', 'ARBITRARY')
# descriptors
RefPt = SerializableDescriptor(
'RefPt', ReferencePointType, _required, strict=DEFAULT_STRICT,
docstring='The reference point.') # type: ReferencePointType
XDir = SerializableDescriptor(
'XDir', XDirectionType, _required, strict=DEFAULT_STRICT,
docstring='The X direction collection plane parameters.') # type: XDirectionType
YDir = SerializableDescriptor(
'YDir', YDirectionType, _required, strict=DEFAULT_STRICT,
docstring='The Y direction collection plane parameters.') # type: YDirectionType
SegmentList = SerializableArrayDescriptor(
'SegmentList', SegmentArrayElement, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='The segment array.') # type: Union[SerializableArray, List[SegmentArrayElement]]
Orientation = StringEnumDescriptor(
'Orientation', _ORIENTATION_VALUES, _required, strict=DEFAULT_STRICT,
docstring='Describes the shadow intent of the display plane.') # type: str
def __init__(
self,
RefPt: ReferencePointType = None,
XDir: XDirectionType = None,
YDir: YDirectionType = None,
SegmentList: Union[SerializableArray, List[SegmentArrayElement]] = None,
Orientation: Optional[str] = None,
**kwargs):
"""
Parameters
----------
RefPt : ReferencePointType
XDir : XDirectionType
YDir : YDirectionType
SegmentList : SerializableArray|List[SegmentArrayElement]
Orientation : str
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.RefPt = RefPt
self.XDir, self.YDir = XDir, YDir
self.SegmentList = SegmentList
self.Orientation = Orientation
super(ReferencePlaneType, self).__init__(**kwargs)
def get_ecf_corner_array(self) -> numpy.ndarray:
"""
Use the XDir and YDir definitions to return the corner points in ECF coordinates as a `4x3` array.
Returns
-------
numpy.ndarray
The corner points of the collection area, with order following the AreaType order convention.
"""
ecf_ref = self.RefPt.ECF.get_array()
x_shift = self.XDir.UVectECF.get_array() * self.XDir.LineSpacing
y_shift = self.YDir.UVectECF.get_array() * self.YDir.SampleSpacing
# order convention
x_offset = numpy.array(
[self.XDir.FirstLine, self.XDir.FirstLine, self.XDir.NumLines, self.XDir.NumLines])
y_offset = numpy.array(
[self.YDir.FirstSample, self.YDir.NumSamples, self.YDir.NumSamples, self.YDir.FirstSample])
corners = numpy.zeros((4, 3), dtype=numpy.float64)
for i in range(4):
corners[i, :] = \
ecf_ref + x_shift*(x_offset[i] - self.RefPt.Line) + y_shift*(y_offset[i] - self.RefPt.Sample)
return corners
class AreaType(Serializable):
"""
The collection area.
"""
_fields = ('Corner', 'Plane')
_required = ('Corner', )
_collections_tags = {
'Corner': {'array': True, 'child_tag': 'ACP'}, }
# descriptors
Corner = SerializableCPArrayDescriptor(
'Corner', LatLonHAECornerRestrictionType, _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='The collection area corner point definition array.'
) # type: Union[SerializableCPArray, List[LatLonHAECornerRestrictionType]]
Plane = SerializableDescriptor(
'Plane', ReferencePlaneType, _required, strict=DEFAULT_STRICT,
docstring='A rectangular area in a geo-located display plane.') # type: ReferencePlaneType
def __init__(
self,
Corner: Union[SerializableCPArray, List[LatLonHAECornerRestrictionType], numpy.ndarray, list, tuple] = None,
Plane: ReferencePlaneType = None,
**kwargs):
"""
Parameters
----------
Corner : SerializableCPArray|List[LatLonHAECornerRestrictionType]|numpy.ndarray|list|tuple
Plane : ReferencePlaneType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Corner = Corner
self.Plane = Plane
super(AreaType, self).__init__(**kwargs)
self.derive()
def _derive_corner_from_plane(self):
# try to define the corner points - for SICD 0.5.
if self.Corner is not None:
return # nothing to be done
if self.Plane is None:
return # nothing to derive from
corners = self.Plane.get_ecf_corner_array()
self.Corner = [
LatLonHAECornerRestrictionType(**{'Lat': entry[0], 'Lon': entry[1], 'HAE': entry[2], 'index': i+1})
for i, entry in enumerate(geocoords.ecf_to_geodetic(corners))]
def derive(self):
"""
Derive the corner points from the plane, if necessary.
Returns
-------
None
"""
self._derive_corner_from_plane()
class RadarCollectionType(Serializable):
"""The Radar Collection Type"""
_fields = (
'TxFrequency', 'RefFreqIndex', 'Waveform', 'TxPolarization', 'TxSequence', 'RcvChannels', 'Area', 'Parameters')
_required = ('TxFrequency', 'TxPolarization', 'RcvChannels')
_collections_tags = {
'Waveform': {'array': True, 'child_tag': 'WFParameters'},
'TxSequence': {'array': True, 'child_tag': 'TxStep'},
'RcvChannels': {'array': True, 'child_tag': 'ChanParameters'},
'Parameters': {'array': False, 'child_tag': 'Parameter'}}
# descriptors
TxFrequency = SerializableDescriptor(
'TxFrequency', TxFrequencyType, _required, strict=DEFAULT_STRICT,
docstring='The transmit frequency range.') # type: TxFrequencyType
RefFreqIndex = IntegerDescriptor(
'RefFreqIndex', _required, strict=DEFAULT_STRICT,
docstring='The reference frequency index, if applicable. If present and non-zero, '
'all (most) RF frequency values are expressed as offsets from a reference '
'frequency.') # type: int
Waveform = SerializableArrayDescriptor(
'Waveform', WaveformParametersType, _collections_tags, _required,
strict=DEFAULT_STRICT, minimum_length=1,
docstring='Transmit and receive demodulation waveform parameters.'
) # type: Union[SerializableArray, List[WaveformParametersType]]
TxPolarization = StringEnumDescriptor(
'TxPolarization', POLARIZATION1_VALUES, _required, strict=DEFAULT_STRICT,
docstring='The transmit polarization.') # type: str
TxSequence = SerializableArrayDescriptor(
'TxSequence', TxStepType, _collections_tags, _required, strict=DEFAULT_STRICT, minimum_length=1,
docstring='The transmit sequence parameters array. If present, indicates the transmit signal steps through '
'a repeating sequence of waveforms and/or polarizations. '
'One step per Inter-Pulse Period.') # type: Union[None, SerializableArray, List[TxStepType]]
RcvChannels = SerializableArrayDescriptor(
'RcvChannels', ChanParametersType, _collections_tags,
_required, strict=DEFAULT_STRICT, minimum_length=1,
docstring='Receive data channel parameters.') # type: Union[SerializableArray, List[ChanParametersType]]
Area = SerializableDescriptor(
'Area', AreaType, _required, strict=DEFAULT_STRICT,
docstring='The imaged area covered by the collection.') # type: AreaType
Parameters = ParametersDescriptor(
'Parameters', _collections_tags, _required, strict=DEFAULT_STRICT,
docstring='A parameters collections.') # type: ParametersCollection
def __init__(
self,
TxFrequency: TxFrequencyType = None,
RefFreqIndex: Optional[int] = None,
Waveform: Union[None, SerializableArray, List[WaveformParametersType]] = None,
TxPolarization: str = None,
TxSequence: Union[None, SerializableArray, List[TxStepType]] = None,
RcvChannels: Union[SerializableArray, List[ChanParametersType]] = None,
Area: Optional[AreaType] = None,
Parameters: Union[None, ParametersCollection, Dict] = None,
**kwargs):
"""
Parameters
----------
TxFrequency : TxFrequencyType|numpy.ndarray|list|tuple
RefFreqIndex : int
Waveform : SerializableArray|List[WaveformParametersType]
TxPolarization : str
TxSequence : SerializableArray|List[TxStepType]
RcvChannels : SerializableArray|List[ChanParametersType]
Area : AreaType
Parameters : ParametersCollection|dict
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TxFrequency = TxFrequency
self.RefFreqIndex = RefFreqIndex
self.Waveform = Waveform
self.TxPolarization = TxPolarization
self.TxSequence = TxSequence
self.RcvChannels = RcvChannels
self.Area = Area
self.Parameters = Parameters
super(RadarCollectionType, self).__init__(**kwargs)
def derive(self):
"""
Populates derived data in RadarCollection. Expected to be called by `SICD` parent.
Returns
-------
None
"""
self._derive_tx_polarization()
if self.Area is not None:
self.Area.derive()
if self.Waveform is not None:
for entry in self.Waveform:
entry.derive()
self._derive_tx_frequency() # call after waveform entry derive call
self._derive_wf_params()
def _derive_tx_polarization(self):
def check_sequence():
unique_entries = set(entry.TxPolarization for entry in self.TxSequence)
if len(unique_entries) == 1:
self.TxPolarization = self.TxSequence[0].TxPolarization
else:
self.TxPolarization = 'SEQUENCE'
# TxPolarization was optional prior to SICD 1.0. It may need to be derived.
if self.TxSequence is not None:
check_sequence()
return
if self.TxPolarization is not None:
return # nothing to be done
if self.RcvChannels is None:
return # nothing to derive from
if len(self.RcvChannels) > 1:
# TxSequence may need to be derived from RCvChannels, for SICD before 1.0 or poorly formed
if self.TxSequence is not None or self.RcvChannels is None or len(self.RcvChannels) < 2:
return
tx_pols = list(chan_param.get_transmit_polarization() for chan_param in self.RcvChannels)
if len(tx_pols) > 1:
self.TxSequence = [TxStepType(index=i+1, TxPolarization=tx_pol) for i, tx_pol in enumerate(tx_pols)]
check_sequence()
else:
self.TxPolarization = tx_pols[0]
else:
self.TxPolarization = self.RcvChannels[0].get_transmit_polarization()
def _derive_tx_frequency(self):
if self.Waveform is None or self.Waveform.size == 0:
return # nothing to be done
if not(self.TxFrequency is None or self.TxFrequency.Min is None or self.TxFrequency.Max is None):
return # no need to do anything
if self.TxFrequency is None:
self.TxFrequency = TxFrequencyType()
if self.TxFrequency.Min is None:
self.TxFrequency.Min = min(
entry.TxFreqStart for entry in self.Waveform if entry.TxFreqStart is not None)
if self.TxFrequency.Max is None:
self.TxFrequency.Max = max(
(entry.TxFreqStart + entry.TxRFBandwidth) for entry in self.Waveform if
entry.TxFreqStart is not None and entry.TxRFBandwidth is not None)
def _derive_wf_params(self):
if self.TxFrequency is None or self.TxFrequency.Min is None or self.TxFrequency.Max is None:
return # nothing that we can do
if self.Waveform is None or self.Waveform.size != 1:
return # nothing to be done
entry = self.Waveform[0] # only true for single waveform definition
if entry.TxFreqStart is None:
entry.TxFreqStart = self.TxFrequency.Min
if entry.TxRFBandwidth is None:
entry.TxRFBandwidth = self.TxFrequency.Max - self.TxFrequency.Min
def _apply_reference_frequency(self, reference_frequency):
"""
If the reference frequency is used, adjust the necessary fields accordingly.
Expected to be called by `SICD` parent.
Parameters
----------
reference_frequency : float
The reference frequency.
Returns
-------
None
"""
if self.TxFrequency is not None:
# noinspection PyProtectedMember
self.TxFrequency._apply_reference_frequency(reference_frequency)
if self.Waveform is not None:
for entry in self.Waveform:
# noinspection PyProtectedMember
entry._apply_reference_frequency(reference_frequency)
self.RefFreqIndex = 0
def get_polarization_abbreviation(self):
"""
Gets the polarization collection abbreviation for the suggested name.
Returns
-------
str
"""
if self.RcvChannels is None:
pol_count = 0
else:
pol_count = len(self.RcvChannels)
if pol_count == 1:
return 'S'
elif pol_count == 2:
return 'D'
elif pol_count == 3:
return 'T'
elif pol_count > 3:
return 'Q'
else:
return 'U'
def _check_frequency(self):
# type: () -> bool
if self.RefFreqIndex is not None:
return True
if self.TxFrequency is not None and self.TxFrequency.Min is not None \
and self.TxFrequency.Min <= 0:
self.log_validity_error(
'TxFrequency.Min is negative, but RefFreqIndex is not populated.')
return False
return True
def _check_tx_sequence(self):
# type: () -> bool
cond = True
if self.TxPolarization == 'SEQUENCE' and self.TxSequence is None:
self.log_validity_error(
'TxPolarization is populated as "SEQUENCE", but TxSequence is not populated.')
cond = False
if self.TxSequence is not None:
if self.TxPolarization != 'SEQUENCE':
self.log_validity_error(
'TxSequence is populated, but TxPolarization is populated as {}'.format(self.TxPolarization))
cond = False
tx_pols = list(set([entry.TxPolarization for entry in self.TxSequence]))
if len(tx_pols) == 1:
self.log_validity_error(
'TxSequence is populated, but the only unique TxPolarization '
'among the entries is {}'.format(tx_pols[0]))
cond = False
return cond
def _check_waveform_parameters(self):
"""
Validate the waveform parameters for consistency.
Returns
-------
bool
"""
def validate_entry(index, waveform):
# type: (int, WaveformParametersType) -> bool
this_cond = True
try:
if abs(waveform.TxRFBandwidth/(waveform.TxPulseLength*waveform.TxFMRate) - 1) > 1e-3:
self.log_validity_error(
'The TxRFBandwidth, TxPulseLength, and TxFMRate parameters of Waveform '
'entry {} are inconsistent'.format(index+1))
this_cond = False
except (AttributeError, ValueError, TypeError):
pass
if waveform.RcvDemodType == 'CHIRP' and waveform.RcvFMRate != 0:
self.log_validity_error(
'RcvDemodType == "CHIRP" and RcvFMRate != 0 in Waveform entry {}'.format(index+1))
this_cond = False
if waveform.RcvDemodType == 'STRETCH' and \
waveform.RcvFMRate is not None and waveform.TxFMRate is not None and \
abs(waveform.RcvFMRate/waveform.TxFMRate - 1) > 1e-3:
self.log_validity_warning(
'RcvDemodType = "STRETCH", RcvFMRate = {}, and TxFMRate = {} in '
'Waveform entry {}. The RcvFMRate and TxFMRate should very likely '
'be the same.'.format(waveform.RcvFMRate, waveform.TxFMRate, index+1))
if self.RefFreqIndex is None:
if waveform.TxFreqStart <= 0:
self.log_validity_error(
'TxFreqStart is negative in Waveform entry {}, but RefFreqIndex '
'is not populated.'.format(index+1))
this_cond = False
if waveform.RcvFreqStart is not None and waveform.RcvFreqStart <= 0:
self.log_validity_error(
'RcvFreqStart is negative in Waveform entry {}, but RefFreqIndex '
'is not populated.'.format(index + 1))
this_cond = False
if waveform.TxPulseLength is not None and waveform.RcvWindowLength is not None and \
waveform.TxPulseLength > waveform.RcvWindowLength:
self.log_validity_error(
'TxPulseLength ({}) is longer than RcvWindowLength ({}) in '
'Waveform entry {}'.format(waveform.TxPulseLength, waveform.RcvWindowLength, index+1))
this_cond = False
if waveform.RcvIFBandwidth is not None and waveform.ADCSampleRate is not None and \
waveform.RcvIFBandwidth > waveform.ADCSampleRate:
self.log_validity_error(
'RcvIFBandwidth ({}) is longer than ADCSampleRate ({}) in '
'Waveform entry {}'.format(waveform.RcvIFBandwidth, waveform.ADCSampleRate, index+1))
this_cond = False
if waveform.RcvDemodType is not None and waveform.RcvDemodType == 'CHIRP' \
and waveform.TxRFBandwidth is not None and waveform.ADCSampleRate is not None \
and (waveform.TxRFBandwidth > waveform.ADCSampleRate):
self.log_validity_error(
'RcvDemodType is "CHIRP" and TxRFBandwidth ({}) is larger than ADCSampleRate ({}) '
'in Waveform entry {}'.format(waveform.TxRFBandwidth, waveform.ADCSampleRate, index+1))
this_cond = False
if waveform.RcvWindowLength is not None and waveform.TxPulseLength is not None and \
waveform.TxFMRate is not None and waveform.RcvFreqStart is not None and \
waveform.TxFreqStart is not None and waveform.TxRFBandwidth is not None:
freq_tol = (waveform.RcvWindowLength - waveform.TxPulseLength)*waveform.TxFMRate
if waveform.RcvFreqStart >= waveform.TxFreqStart + waveform.TxRFBandwidth + freq_tol:
self.log_validity_error(
'RcvFreqStart ({}), TxFreqStart ({}), and TxRfBandwidth ({}) parameters are inconsistent '
'in Waveform entry {}'.format(
waveform.RcvFreqStart, waveform.TxFreqStart, waveform.TxRFBandwidth, index + 1))
this_cond = False
if waveform.RcvFreqStart <= waveform.TxFreqStart - freq_tol:
self.log_validity_error(
'RcvFreqStart ({}) and TxFreqStart ({}) parameters are inconsistent '
'in Waveform entry {}'.format(waveform.RcvFreqStart, waveform.TxFreqStart, index + 1))
this_cond = False
return this_cond
if self.Waveform is None or len(self.Waveform) < 1:
return True
cond = True
# fetch min/max TxFreq observed
wf_min_freq = None
wf_max_freq = None
for entry in self.Waveform:
freq_start = entry.TxFreqStart
freq_bw = entry.TxRFBandwidth
if freq_start is not None:
wf_min_freq = freq_start if wf_min_freq is None else \
min(wf_min_freq, freq_start)
if entry.TxRFBandwidth is not None:
wf_max_freq = freq_start + freq_bw if wf_max_freq is None else \
max(wf_max_freq, freq_start + freq_bw)
if wf_min_freq is not None and self.TxFrequency is not None and self.TxFrequency.Min is not None:
if abs(self.TxFrequency.Min/wf_min_freq - 1) > 1e-3:
self.log_validity_error(
'The stated TxFrequency.Min is {}, but the minimum populated in a '
'Waveform entry is {}'.format(self.TxFrequency.Min, wf_min_freq))
cond = False
if wf_max_freq is not None and self.TxFrequency is not None and self.TxFrequency.Max is not None:
if abs(self.TxFrequency.Max/wf_max_freq - 1) > 1e-3:
self.log_validity_error(
'The stated TxFrequency.Max is {}, but the maximum populated in a '
'Waveform entry is {}'.format(self.TxFrequency.Max, wf_max_freq))
cond = False
for t_index, t_waveform in enumerate(self.Waveform):
cond &= validate_entry(t_index, t_waveform)
return cond
def _basic_validity_check(self):
valid = super(RadarCollectionType, self)._basic_validity_check()
valid &= self._check_frequency()
valid &= self._check_tx_sequence()
valid &= self._check_waveform_parameters()
return valid
def version_required(self):
"""
What SICD version is required?
Returns
-------
tuple
"""
requires = (1, 1, 0)
if self.RcvChannels is None:
return requires
for entry in self.RcvChannels:
requires = max(requires, entry.version_required())
return requires
| 45,426 | 36.982441 | 120 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_elements/blocks.py | """
Basic building blocks for SICD standard.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
from typing import Union, Optional, Tuple
from collections import OrderedDict
import numpy
from sarpy.io.xml.base import Serializable, Arrayable, \
get_node_value, create_text_node, create_new_node, find_children
from sarpy.io.xml.descriptors import IntegerDescriptor, StringEnumDescriptor, \
FloatDescriptor, FloatModularDescriptor, SerializableDescriptor
from .base import DEFAULT_STRICT, FLOAT_FORMAT
_len2_array_text = 'Expected array to be of length 2, ' \
'and received `{}`'
_len3_array_text = 'Expected array to be of length 3, ' \
'and received `{}`'
_array_type_text = 'Expected array to be numpy.ndarray, list, or tuple, got `{}`'
#########
# Polarization constants
POLARIZATION1_VALUES = ('V', 'H', 'X', 'Y', 'S', 'E', 'RHC', 'LHC', 'OTHER', 'UNKNOWN', 'SEQUENCE')
POLARIZATION2_VALUES = ('V', 'H', 'X', 'Y', 'S', 'E', 'RHC', 'LHC', 'OTHER')
DUAL_POLARIZATION_VALUES = tuple('{}:{}'.format(pol1, pol2) for pol1 in POLARIZATION2_VALUES
for pol2 in POLARIZATION2_VALUES) + \
('OTHER', 'UNKNOWN')
##########
# Geographical coordinates
class XYZType(Serializable, Arrayable):
"""A spatial point in ECF coordinates."""
_fields = ('X', 'Y', 'Z')
_required = _fields
_numeric_format = {'X': FLOAT_FORMAT, 'Y': FLOAT_FORMAT, 'Z': FLOAT_FORMAT}
# descriptors
X = FloatDescriptor(
'X', _required, strict=True,
docstring='The X attribute. Assumed to ECF or other, similar coordinates.') # type: float
Y = FloatDescriptor(
'Y', _required, strict=True,
docstring='The Y attribute. Assumed to ECF or other, similar coordinates.') # type: float
Z = FloatDescriptor(
'Z', _required, strict=True,
docstring='The Z attribute. Assumed to ECF or other, similar coordinates.') # type: float
def __init__(
self,
X: float = None,
Y: float = None,
Z: float = None,
**kwargs):
"""
Parameters
----------
X : float
Y : float
Z : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.X, self.Y, self.Z = X, Y, Z
super(XYZType, self).__init__(**kwargs)
@classmethod
def from_array(cls, array: Union[numpy.ndarray, list, tuple]):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed [X, Y, Z]
Returns
-------
XYZType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 3:
raise ValueError(_len3_array_text.format(array))
return cls(X=array[0], Y=array[1], Z=array[2])
raise ValueError(_array_type_text.format(type(array)))
def get_array(self, dtype=numpy.float64) -> numpy.ndarray:
"""
Gets an array representation of the class instance.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return
Returns
-------
numpy.ndarray
array of the form [X,Y,Z]
"""
return numpy.array([self.X, self.Y, self.Z], dtype=dtype)
class LatLonType(Serializable, Arrayable):
"""A two-dimensional geographic point in WGS-84 coordinates."""
_fields = ('Lat', 'Lon')
_required = _fields
_numeric_format = {'Lat': FLOAT_FORMAT, 'Lon': FLOAT_FORMAT}
# descriptors
Lat = FloatDescriptor(
'Lat', _required, strict=True,
docstring='The latitude attribute. Assumed to be WGS-84 coordinates.') # type: float
Lon = FloatDescriptor(
'Lon', _required, strict=True,
docstring='The longitude attribute. Assumed to be WGS-84 coordinates.') # type: float
def __init__(
self,
Lat: float = None,
Lon: float = None,
**kwargs):
"""
Parameters
----------
Lat : float
Lon : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Lat, self.Lon = Lat, Lon
super(LatLonType, self).__init__(**kwargs)
def get_array(
self,
dtype=numpy.float64,
order: str = 'LAT') -> numpy.ndarray:
"""
Gets an array representation of the data.
Parameters
----------
order : str
Determines array order. 'LAT' yields [Lat, Lon], and anything else yields [Lon, Lat].
dtype : str|numpy.dtype|numpy.number
data type of the return
order : str
Returns
-------
numpy.ndarray
data array with appropriate entry order
"""
if order.upper() == 'LAT':
return numpy.array([self.Lat, self.Lon], dtype=dtype)
else:
return numpy.array([self.Lon, self.Lat], dtype=dtype)
@classmethod
def from_array(cls, array: Union[numpy.ndarray, list, tuple]):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed [Lat, Lon]
Returns
-------
LatLonType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError(_len2_array_text.format(array))
return cls(Lat=array[0], Lon=array[1])
raise ValueError(_array_type_text.format(type(array)))
def dms_format(
self,
frac_secs: bool = False) -> Tuple[Tuple[int, int, int, str], Tuple[int, int, int, str]]:
"""
Get degree-minutes-seconds representation.
Parameters
----------
frac_secs : bool
Should a fractional seconds (i.e. a float), otherwise integer
Returns
-------
tuple
of the form ((deg lat, mins lat, secs lat, N/S), (deg lon, mins lon, secs lon, E/W))
Here degrees and minutes will be int, secs will be a float.
"""
def reduce(value):
val = abs(value)
deg = int(val)
val = 60*(val - deg)
mins = int(val)
secs = 60*(val - mins)
if not frac_secs:
secs = int(secs)
return deg, mins, secs
x = 'S' if self.Lat < 0 else 'N'
y = 'W' if self.Lon < 0 else 'E'
return reduce(self.Lat) + (x, ), reduce(self.Lon) + (y, )
class LatLonArrayElementType(LatLonType):
"""A geographic point in an array"""
_fields = ('Lat', 'Lon', 'index')
_required = _fields
_set_as_attribute = ('index', )
index = IntegerDescriptor(
'index', _required, strict=False, docstring="The array index") # type: int
def __init__(
self,
Lat: float = None,
Lon: float = None,
index: int = None,
**kwargs):
"""
Parameters
----------
Lat : float
Lon : float
index : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.index = index
super(LatLonArrayElementType, self).__init__(Lat=Lat, Lon=Lon, **kwargs)
@classmethod
def from_array(
cls,
array: Union[numpy.ndarray, list, tuple],
index: int = 1):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed [Lat, Lon]
index : int
(1 based) array index
Returns
-------
LatLonArrayElementType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError(_len2_array_text.format(array))
return cls(Lat=array[0], Lon=array[1], index=index)
raise ValueError(_array_type_text.format(type(array)))
class LatLonRestrictionType(LatLonType):
"""A two-dimensional geographic point in WGS-84 coordinates."""
_fields = ('Lat', 'Lon')
_required = _fields
# descriptors
Lat = FloatModularDescriptor(
'Lat', 90.0, _required, strict=True,
docstring='The latitude attribute. Assumed to be WGS-84 coordinates.') # type: float
Lon = FloatModularDescriptor(
'Lon', 180.0, _required, strict=True,
docstring='The longitude attribute. Assumed to be WGS-84 coordinates.') # type: float
def __init__(
self,
Lat: float = None,
Lon: float = None,
**kwargs):
"""
Parameters
----------
Lat : float
Lon : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
super(LatLonRestrictionType, self).__init__(Lat=Lat, Lon=Lon, **kwargs)
@classmethod
def from_array(cls, array: Union[numpy.ndarray, list, tuple]):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed [Lat, Lon]
Returns
-------
LatLonRestrictionType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError(_len2_array_text.format(array))
return cls(Lat=array[0], Lon=array[1])
raise ValueError(_array_type_text.format(type(array)))
class LatLonHAEType(LatLonType):
"""A three-dimensional geographic point in WGS-84 coordinates."""
_fields = ('Lat', 'Lon', 'HAE')
_required = _fields
_numeric_format = {'Lat': FLOAT_FORMAT, 'Lon': FLOAT_FORMAT, 'HAE': FLOAT_FORMAT}
# descriptors
HAE = FloatDescriptor(
'HAE', _required, strict=True,
docstring='The Height Above Ellipsoid (in meters) attribute. Assumed to be '
'WGS-84 coordinates.') # type: float
def __init__(
self,
Lat: float = None,
Lon: float = None,
HAE: float = None,
**kwargs):
"""
Parameters
----------
Lat : float
Lon : float
HAE : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.HAE = HAE
super(LatLonHAEType, self).__init__(Lat=Lat, Lon=Lon, **kwargs)
def get_array(self, dtype=numpy.float64, order='LAT') -> numpy.ndarray:
"""
Gets an array representation of the data.
Parameters
----------
order : str
Determines array order. 'LAT' yields [Lat, Lon, HAE], and anything else yields [Lon, Lat, HAE].
dtype : str|numpy.dtype|numpy.number
data type of the return
Returns
-------
numpy.ndarray
data array with appropriate entry order
"""
if order.upper() == 'LAT':
return numpy.array([self.Lat, self.Lon, self.HAE], dtype=dtype)
else:
return numpy.array([self.Lon, self.Lat, self.HAE], dtype=dtype)
@classmethod
def from_array(cls, array: Union[numpy.ndarray, list, tuple]):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed [Lat, Lon, HAE]
Returns
-------
LatLonHAEType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 3:
raise ValueError(_len3_array_text.format(array))
return cls(Lat=array[0], Lon=array[1], HAE=array[2])
raise ValueError(_array_type_text.format(type(array)))
class LatLonHAERestrictionType(LatLonHAEType):
_fields = ('Lat', 'Lon', 'HAE')
_required = _fields
"""A three-dimensional geographic point in WGS-84 coordinates."""
Lat = FloatModularDescriptor(
'Lat', 90.0, _required, strict=True,
docstring='The latitude attribute. Assumed to be WGS-84 coordinates.') # type: float
Lon = FloatModularDescriptor(
'Lon', 180.0, _required, strict=True,
docstring='The longitude attribute. Assumed to be WGS-84 coordinates.') # type: float
def __init__(
self,
Lat: float = None,
Lon: float = None,
HAE: float = None,
**kwargs):
"""
Parameters
----------
Lat : float
Lon : float
HAE : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
super(LatLonHAERestrictionType, self).__init__(Lat=Lat, Lon=Lon, HAE=HAE, **kwargs)
@classmethod
def from_array(cls, array: Union[numpy.ndarray, list, tuple]):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed [Lat, Lon, HAE]
Returns
-------
LatLonHAERestrictionType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 3:
raise ValueError(_len3_array_text.format(array))
return cls(Lat=array[0], Lon=array[1], HAE=array[2])
raise ValueError(_array_type_text.format(type(array)))
class LatLonCornerType(LatLonType):
"""A two-dimensional geographic point in WGS-84 coordinates representing a collection area box corner point."""
_fields = ('Lat', 'Lon', 'index')
_required = _fields
_set_as_attribute = ('index', )
# descriptors
index = IntegerDescriptor(
'index', _required, strict=False, bounds=(1, 4),
docstring='The integer index. This represents a clockwise enumeration of '
'the rectangle vertices wrt the frame of reference of the collector. '
'Should be 1-4, but 0-3 may be permissible.') # type: int
def __init__(
self,
Lat: float = None,
Lon: float = None,
index: int = None,
**kwargs):
"""
Parameters
----------
Lat : float
Lon : float
index : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.index = index
super(LatLonCornerType, self).__init__(Lat=Lat, Lon=Lon, **kwargs)
@classmethod
def from_array(
cls,
array: Union[numpy.ndarray, list, tuple],
index: int = 1):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed [Lat, Lon]
index : int
(1 based) array index
Returns
-------
LatLonCornerType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError(_len2_array_text.format(array))
return cls(Lat=array[0], Lon=array[1], index=index)
raise ValueError(_array_type_text.format(type(array)))
class LatLonCornerStringType(LatLonType):
"""A two-dimensional geographic point in WGS-84 coordinates representing a collection area box corner point."""
_fields = ('Lat', 'Lon', 'index')
_required = _fields
_set_as_attribute = ('index', )
# other specific class variable
_CORNER_VALUES = ('1:FRFC', '2:FRLC', '3:LRLC', '4:LRFC')
# descriptors
index = StringEnumDescriptor(
'index', _CORNER_VALUES, _required, strict=False,
docstring="The string index.") # type: str
def __init__(
self,
Lat: float = None,
Lon: float = None,
index: str = None,
**kwargs):
"""
Parameters
----------
Lat : float
Lon : float
index : str
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.index = index
super(LatLonCornerStringType, self).__init__(Lat=Lat, Lon=Lon, **kwargs)
@classmethod
def from_array(
cls,
array: Union[numpy.ndarray, list, tuple],
index: str = '1:FRFC'):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed `[Lat, Lon]`
index : str
array index in `('1:FRFC', '2:FRLC', '3:LRLC', '4:LRFC')`
Returns
-------
LatLonCornerStringType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError(_len2_array_text.format(array))
return cls(Lat=array[0], Lon=array[1], index=index)
raise ValueError(_array_type_text.format(type(array)))
class LatLonHAECornerRestrictionType(LatLonHAERestrictionType):
"""A three-dimensional geographic point in WGS-84 coordinates. Represents a collection area box corner point."""
_fields = ('Lat', 'Lon', 'HAE', 'index')
_required = _fields
_set_as_attribute = ('index', )
# descriptors
index = IntegerDescriptor(
'index', _required, strict=False, bounds=(1, 4),
docstring='The integer index. This represents a clockwise enumeration of the '
'rectangle vertices wrt the frame of reference of the collector. '
'Should be 1-4, but 0-3 may be permissible.') # type: int
def __init__(
self,
Lat: float = None,
Lon: float = None,
HAE: float = None,
index: int = None,
**kwargs):
"""
Parameters
----------
Lat : float
Lon : float
HAE : float
index : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.index = index
super(LatLonHAECornerRestrictionType, self).__init__(Lat=Lat, Lon=Lon, HAE=HAE, **kwargs)
@classmethod
def from_array(
cls,
array: Union[numpy.ndarray, list, tuple],
index: int = 1):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed `[Lat, Lon, HAE]`
index : int
(1 based) array index
Returns
-------
LatLonHAECornerRestrictionType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 3:
raise ValueError(_len3_array_text.format(array))
return cls(Lat=array[0], Lon=array[1], HAE=array[2], index=index)
raise ValueError(_array_type_text.format(type(array)))
class LatLonHAECornerStringType(LatLonHAEType):
"""A three-dimensional geographic point in WGS-84 coordinates. Represents a collection area box corner point."""
_fields = ('Lat', 'Lon', 'HAE', 'index')
_required = _fields
_set_as_attribute = ('index', )
_CORNER_VALUES = ('1:FRFC', '2:FRLC', '3:LRLC', '4:LRFC')
# descriptors
index = StringEnumDescriptor(
'index', _CORNER_VALUES, _required, strict=False, docstring="The string index.") # type: str
def __init__(
self,
Lat: float = None,
Lon: float = None,
HAE: float = None,
index: str = None,
**kwargs):
"""
Parameters
----------
Lat : float
Lon : float
HAE : float
index : str
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.index = index
super(LatLonHAECornerStringType, self).__init__(Lat=Lat, Lon=Lon, HAE=HAE, **kwargs)
@classmethod
def from_array(
cls,
array: Union[numpy.ndarray, list, tuple],
index: str = '1:FRFC'):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed `[Lat, Lon, HAE]`
index : str
array index in `('1:FRFC', '2:FRLC', '3:LRLC', '4:LRFC')`
Returns
-------
LatLonHAECornerStringType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 3:
raise ValueError(_len3_array_text.format(array))
return cls(Lat=array[0], Lon=array[1], HAE=array[2], index=index)
raise ValueError(_array_type_text.format(type(array)))
#######
# Image space coordinates
class RowColType(Serializable, Arrayable):
"""A row and column attribute container - used as indices into array(s)."""
_fields = ('Row', 'Col')
_required = _fields
Row = IntegerDescriptor(
'Row', _required, strict=True, docstring='The Row attribute.') # type: int
Col = IntegerDescriptor(
'Col', _required, strict=True, docstring='The Column attribute.') # type: int
def __init__(
self,
Row: int = None,
Col: int = None,
**kwargs):
"""
Parameters
----------
Row : int
Col : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Row, self.Col = Row, Col
super(RowColType, self).__init__(**kwargs)
def get_array(self, dtype=numpy.int64) -> numpy.ndarray:
"""
Gets an array representation of the class instance.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return
Returns
-------
numpy.ndarray
array of the form [Row, Col]
"""
return numpy.array([self.Row, self.Col], dtype=dtype)
@classmethod
def from_array(cls, array: Union[numpy.ndarray, list, tuple]):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed `[Row, Col]`
Returns
-------
RowColType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError(_len2_array_text.format(array))
return cls(Row=array[0], Col=array[1])
raise ValueError(_array_type_text.format(type(array)))
class RowColArrayElement(RowColType):
"""An array element row and column attribute container - used as indices into other array(s)."""
# Note - in the SICD standard this type is listed as RowColvertexType. This is not a descriptive name
# and has an inconsistency in camel case
_fields = ('Row', 'Col', 'index')
_required = _fields
_set_as_attribute = ('index', )
# descriptors
index = IntegerDescriptor(
'index', _required, strict=False, docstring='The array index attribute.') # type: int
def __init__(
self,
Row: int = None,
Col: int = None,
index: int = None,
**kwargs):
"""
Parameters
----------
Row : int
Col : int
index : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.index = index
super(RowColArrayElement, self).__init__(Row=Row, Col=Col, **kwargs)
@classmethod
def from_array(
cls,
array: Union[numpy.ndarray, list, tuple],
index: int = 1):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed `[Row, Col]`
index : int
(1 based) the array index
Returns
-------
RowColArrayElement
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 2:
raise ValueError(_len2_array_text.format(array))
return cls(Row=array[0], Col=array[1], index=index)
raise ValueError(_array_type_text.format(type(array)))
###############
# Polynomial Types
class Poly1DType(Serializable, Arrayable):
"""
Represents a one-variable polynomial, defined by one-dimensional coefficient array.
"""
__slots__ = ('_coefs', )
_fields = ('Coefs', 'order1')
_required = ('Coefs', )
_numeric_format = {'Coefs': FLOAT_FORMAT}
def __init__(
self,
Coefs: Union[None, numpy.ndarray, list, tuple] = None,
**kwargs):
"""
Parameters
----------
Coefs : numpy.ndarray|tuple|list
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self._coefs = None
self.Coefs = Coefs
super(Poly1DType, self).__init__(**kwargs)
@property
def order1(self) -> int:
"""
int: The order1 attribute [READ ONLY] - that is, the largest exponent
presented in the monomial terms of coefs.
"""
return self.Coefs.size - 1
@property
def Coefs(self) -> numpy.ndarray:
"""
numpy.ndarray: The one-dimensional polynomial coefficient array of
dtype=float64. Assignment object must be a
one-dimensional numpy.ndarray, or naively convertible to one.
.. Note::
This returns the direct coefficient array. Use the `get_array()`
method to get a copy of the coefficient array of specified data
type.
"""
return self._coefs
@Coefs.setter
def Coefs(self, value: Union[numpy.ndarray, list, tuple]):
if value is None:
raise ValueError('The coefficient array for a Poly1DType instance must be defined.')
if isinstance(value, (list, tuple)):
value = numpy.array(value, dtype=numpy.float64)
if not isinstance(value, numpy.ndarray):
raise ValueError(
'Coefs for class Poly1D must be a list or numpy.ndarray. Received type {}.'.format(type(value)))
elif len(value.shape) != 1:
raise ValueError(
'Coefs for class Poly1D must be one-dimensional. Received numpy.ndarray '
'of shape {}.'.format(value.shape))
elif not value.dtype.name == 'float64':
value = numpy.cast[numpy.float64](value)
self._coefs = value
def __call__(self, x: Union[float, int, numpy.ndarray]) -> numpy.ndarray:
"""
Evaluate the polynomial at points `x`. This passes `x` straight through to :func:`polyval` of
`numpy.polynomial.polynomial`.
Parameters
----------
x : float|int|numpy.ndarray
The point(s) at which to evaluate.
Returns
-------
numpy.ndarray
"""
return numpy.polynomial.polynomial.polyval(x, self._coefs)
def __getitem__(self, item):
return self._coefs[item]
def __setitem__(self, item, value):
self._coefs[item] = value
def derivative(
self,
der_order: int = 1,
return_poly: bool = False):
"""
Calculate the `der_order` derivative of the polynomial.
Parameters
----------
der_order : int
the order of the derivative
return_poly : bool
return a Poly1DType if True, otherwise return the coefficient array.
Returns
-------
Poly1DType|numpy.ndarray
"""
coefs = numpy.polynomial.polynomial.polyder(self._coefs, der_order)
if return_poly:
return Poly1DType(Coefs=coefs)
return coefs
def derivative_eval(
self,
x: Union[float, int, numpy.ndarray],
der_order: int = 1) -> numpy.ndarray:
"""
Evaluate the `der_order` derivative of the polynomial at points `x`. This uses the
functionality presented in `numpy.polynomial.polynomial`.
Parameters
----------
x : float|int|numpy.ndarray
The point(s) at which to evaluate.
der_order : int
The derivative.
Returns
-------
numpy.ndarray
"""
coefs = self.derivative(der_order=der_order, return_poly=False)
return numpy.polynomial.polynomial.polyval(x, coefs)
def shift(self, t_0: float, alpha: float = 1, return_poly: bool = False):
r"""
Transform a polynomial with respect to an affine shift in the coordinate system.
That is, :math:`P(x) = Q(\alpha\cdot(t-t_0))`.
Be careful to follow the convention that the transformation parameters express the *current coordinate system*
as a shifted, **and then** scaled version of the *new coordinate system*. If the new coordinate is
:math:`t = \beta\cdot x - t_0`, then :math:`x = (t - t_0)/\beta`, and :math:`\alpha = 1/\beta`.
Parameters
----------
t_0 : float
the **current center coordinate** in the **new coordinate system.**
That is, `x=0` when `t=t_0`.
alpha : float
the scale. That is, when `t = t0 + 1`, then `x = alpha`. **NOTE:** it is assumed that the
coordinate system is re-centered, and **then** scaled.
return_poly : bool
if `True`, a Poly1DType object be returned, otherwise the coefficients array is returned.
Returns
-------
Poly1DType|numpy.ndarray
"""
# prepare array workspace
out = numpy.copy(self._coefs)
if t_0 != 0 and out.size > 1:
siz = out.size
# let's use horner's method, so iterate from top down
for i in range(siz):
index = siz-i-1
if i > 0:
out[index:siz-1] -= t_0*out[index+1:siz]
if alpha != 1 and out.size > 1:
out *= numpy.power(alpha, numpy.arange(out.size))
if return_poly:
return Poly1DType(Coefs=out)
else:
return out
@classmethod
def from_array(cls, array: Union[numpy.ndarray, list, tuple]):
"""
Create from the coefficients array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
must be one-dimensional
Returns
-------
Poly1DType
"""
if array is None:
return None
return cls(Coefs=array)
def get_array(self, dtype=numpy.float64) -> numpy.ndarray:
"""
Gets *a copy* of the coefficent array of specified data type.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return
Returns
-------
numpy.ndarray
one-dimensional coefficient array
"""
return numpy.array(self._coefs, dtype=dtype)
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
order1 = int(node.attrib['order1'])
coefs = numpy.zeros((order1+1, ), dtype=numpy.float64)
coef_key = cls._child_xml_ns_key.get('Coefs', ns_key)
coef_nodes = find_children(node, 'Coef', xml_ns, coef_key)
for cnode in coef_nodes:
ind = int(cnode.attrib['exponent1'])
val = float(get_node_value(cnode))
coefs[ind] = val
return cls(Coefs=coefs)
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
if parent is None:
parent = doc.getroot()
if ns_key is None:
node = create_new_node(doc, tag, parent=parent)
else:
node = create_new_node(doc, '{}:{}'.format(ns_key, tag), parent=parent)
if 'Coefs' in self._child_xml_ns_key:
ctag = '{}:Coef'.format(self._child_xml_ns_key['Coefs'])
elif ns_key is not None:
ctag = '{}:Coef'.format(ns_key)
else:
ctag = 'Coef'
node.attrib['order1'] = str(self.order1)
fmt_func = self._get_formatter('Coef')
for i, val in enumerate(self.Coefs):
# if val != 0.0: # should we serialize it sparsely?
cnode = create_text_node(doc, ctag, fmt_func(val), parent=node)
cnode.attrib['exponent1'] = str(i)
return node
def to_dict(self, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
out = OrderedDict()
out['Coefs'] = self.Coefs.tolist()
return out
def minimize_order(self):
"""
Trim the trailing zeros for the coefficient array. This modifies the object in place.
Returns
-------
None
"""
boolc = (self.Coefs != 0)
if not numpy.any(boolc):
self.Coefs = numpy.zeros((1, ), dtype='float64')
return
last_ind = numpy.amax(numpy.arange(self.Coefs.size)[boolc])
if last_ind == self.Coefs.size-1:
return
if last_ind == 0:
self.Coefs = numpy.array([self.Coefs[0], ], dtype='float64')
else:
self.Coefs = self.Coefs[:last_ind+1]
class Poly2DType(Serializable, Arrayable):
"""
Represents a one-variable polynomial, defined by two-dimensional coefficient array.
"""
__slots__ = ('_coefs', )
_fields = ('Coefs', 'order1', 'order2')
_required = ('Coefs', )
_numeric_format = {'Coefs': FLOAT_FORMAT}
def __init__(
self,
Coefs: Union[numpy.ndarray, list, tuple] = None,
**kwargs):
"""
Parameters
----------
Coefs : numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self._coefs = None
self.Coefs = Coefs
super(Poly2DType, self).__init__(**kwargs)
def __call__(
self,
x: Union[float, int, numpy.ndarray],
y: Union[float, int, numpy.ndarray]) -> numpy.ndarray:
"""
Evaluate a polynomial at points [`x`, `y`]. This passes `x`,`y`
straight through to :func:`polyval2d` of `numpy.polynomial.polynomial`.
Parameters
----------
x : float|int|numpy.ndarray
The first dependent variable of point(s) at which to evaluate.
y : float|int|numpy.ndarray
The second dependent variable of point(s) at which to evaluate.
Returns
-------
numpy.ndarray
"""
return numpy.polynomial.polynomial.polyval2d(x, y, self._coefs)
@property
def order1(self) -> int:
"""
int: The order1 attribute [READ ONLY] - that is, largest exponent1 presented in the monomial terms of coefs.
"""
return self._coefs.shape[0] - 1
@property
def order2(self) -> int:
"""
int: The order1 attribute [READ ONLY] - that is, largest exponent2 presented in the monomial terms of coefs.
"""
return self._coefs.shape[1] - 1
@property
def Coefs(self) -> numpy.ndarray:
"""
numpy.ndarray: The two-dimensional polynomial coefficient array of dtype=float64. Assignment object must be a
two-dimensional numpy.ndarray, or naively convertible to one.
.. Note:: this returns the direct coefficient array. Use the `get_array()` method to get a copy of the
coefficient array of specified data type.
"""
return self._coefs
@Coefs.setter
def Coefs(self, value: Union[numpy.ndarray, list, tuple]):
if value is None:
raise ValueError('The coefficient array for a Poly2DType instance must be defined.')
if isinstance(value, (list, tuple)):
value = numpy.array(value, dtype=numpy.float64)
if not isinstance(value, numpy.ndarray):
raise ValueError(
'Coefs for class Poly2D must be a list or numpy.ndarray. Received type {}.'.format(type(value)))
elif len(value.shape) != 2:
raise ValueError(
'Coefs for class Poly2D must be two-dimensional. Received numpy.ndarray '
'of shape {}.'.format(value.shape))
elif not value.dtype.name == 'float64':
value = numpy.cast[numpy.float64](value)
self._coefs = value
def __getitem__(self, item):
return self._coefs[item]
def __setitem__(self, item, value):
self._coefs[item] = value
def shift(
self,
t1_shift: float = 0,
t1_scale: float = 1,
t2_shift: float = 0,
t2_scale: float = 1,
return_poly: bool = False):
r"""
Transform a polynomial with respect to an affine shift in the coordinate system.
That is, :math:`P(x1, x2) = Q(t1_scale\cdot(t1 - t1_shift), t2_scale\cdot(t2 - t2_shift))`.
Be careful to follow the convention that the transformation parameters express the
*current coordinate system* as a shifted, **and then** scaled version of the
*new coordinate system*.
Parameters
----------
t1_shift : float
the **current center coordinate** in the **new coordinate system.**
That is, `x1=0` when `t1=t1_shift`.
t1_scale : float
the scale. That is, when `t1 = t1_shift + 1`, then `x1 = t1_scale`.
**NOTE:** it is assumed that the coordinate system is re-centered, and **then** scaled.
t2_shift : float
the **current center coordinate** in the **new coordinate system.**
That is, `x2=0` when `t2=t2_shift`.
t2_scale : float
the scale. That is, when `t2 = t2_shift + 1`, then `x2 = t2_scale`.
**NOTE:** it is assumed that the coordinate system is re-centered, and **then** scaled.
return_poly : bool
if `True`, a Poly2DType object be returned, otherwise the coefficients array is returned.
Returns
-------
Poly2DType|numpy.ndarray
"""
# prepare our array workspace
out = numpy.copy(self._coefs)
# handle first axis - everything is commutative, so order doesn't matter
if t1_shift != 0 and self._coefs.shape[0] > 1:
siz = out.shape[0]
# let's use horner's method, so iterate from top down
for i in range(siz):
index = siz-i-1
if i > 0:
out[index:siz-1, :] -= t1_shift*out[index+1:siz, :]
if t1_scale != 1 and out.shape[0] > 1:
out = numpy.power(t1_scale, numpy.arange(out.shape[0]))[:, numpy.newaxis]*out
# handle second axis
if t2_shift != 0 and out.shape[1] > 1:
siz = out.shape[1]
# let's use horner's method, so iterate from top down
for i in range(siz):
index = siz-i-1
if i > 0:
out[:, index:siz-1] -= t2_shift*out[:, index+1:siz]
if t2_scale != 1 and out.shape[1] > 1:
out *= numpy.power(t2_scale, numpy.arange(out.shape[1]))
if return_poly:
return Poly2DType(Coefs=out)
else:
return out
@classmethod
def from_array(cls, array: Union[numpy.ndarray, list, tuple]):
"""
Create from the coefficients array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
must be two-dimensional.
Returns
-------
Poly2DType
"""
if array is None:
return None
return cls(Coefs=array)
def get_array(self, dtype=numpy.float64) -> numpy.ndarray:
"""
Gets **a copy** of the coefficent array of specified data type.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return
Returns
-------
numpy.ndarray
two-dimensional coefficient array
"""
return numpy.array(self._coefs, dtype=dtype)
@classmethod
def from_node(cls, node, xml_ns, ns_key=None, kwargs=None):
order1 = int(node.attrib['order1'])
order2 = int(node.attrib['order2'])
coefs = numpy.zeros((order1+1, order2+1), dtype=numpy.float64)
coef_key = cls._child_xml_ns_key.get('Coefs', ns_key)
coef_nodes = find_children(node, 'Coef', xml_ns, coef_key)
for cnode in coef_nodes:
ind1 = int(cnode.attrib['exponent1'])
ind2 = int(cnode.attrib['exponent2'])
val = float(get_node_value(cnode))
coefs[ind1, ind2] = val
return cls(Coefs=coefs)
def to_node(self, doc, tag, ns_key=None, parent=None, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
if parent is None:
parent = doc.getroot()
if ns_key is None:
node = create_new_node(doc, tag, parent=parent)
else:
node = create_new_node(doc, '{}:{}'.format(ns_key, tag), parent=parent)
if 'Coefs' in self._child_xml_ns_key:
ctag = '{}:Coef'.format(self._child_xml_ns_key['Coefs'])
elif ns_key is not None:
ctag = '{}:Coef'.format(ns_key)
else:
ctag = 'Coef'
node.attrib['order1'] = str(self.order1)
node.attrib['order2'] = str(self.order2)
fmt_func = self._get_formatter('Coefs')
for i, val1 in enumerate(self._coefs):
for j, val in enumerate(val1):
# if val != 0.0: # should we serialize it sparsely?
cnode = create_text_node(doc, ctag, fmt_func(val), parent=node)
cnode.attrib['exponent1'] = str(i)
cnode.attrib['exponent2'] = str(j)
return node
def to_dict(self, check_validity=False, strict=DEFAULT_STRICT, exclude=()):
out = OrderedDict()
out['Coefs'] = self.Coefs.tolist()
return out
def minimize_order(self):
"""
Trim the trailing zeros for the coefficient array. This modifies the object in place.
Returns
-------
None
"""
boolc = (self.Coefs != 0)
if not numpy.any(boolc):
self.Coefs = numpy.zeros((1, 1), dtype='float64')
return
col_inds, row_inds = numpy.meshgrid(
numpy.arange(self.Coefs.shape[1]), numpy.arange(self.Coefs.shape[0]))
last_row_ind = numpy.amax(row_inds[boolc])
last_col_ind = numpy.amax(col_inds[boolc])
if last_row_ind == self.Coefs.shape[0]-1 and last_col_ind == self.Coefs.shape[1]-1:
return
if last_row_ind == 0 and last_col_ind == 0:
self.Coefs = numpy.array([[self.Coefs[0, 0], ], ], dtype='float64')
elif last_row_ind == 0:
self.Coefs = numpy.reshape(self.Coefs[0, :last_col_ind+1], (1, -1))
elif last_col_ind == 0:
self.Coefs = numpy.reshape(self.Coefs[:last_row_ind+1, 0], (-1, 1))
else:
self.Coefs = self.Coefs[:last_row_ind+1, :last_col_ind+1]
class XYZPolyType(Serializable, Arrayable):
"""
Represents a single variable polynomial for each of `X`, `Y`, and `Z`. This gives position in ECF coordinates
as a function of a single dependent variable.
"""
_fields = ('X', 'Y', 'Z')
_required = _fields
# descriptors
X = SerializableDescriptor(
'X', Poly1DType, _required, strict=False,
docstring='The polynomial for the X coordinate.') # type: Poly1DType
Y = SerializableDescriptor(
'Y', Poly1DType, _required, strict=False,
docstring='The polynomial for the Y coordinate.') # type: Poly1DType
Z = SerializableDescriptor(
'Z', Poly1DType, _required, strict=False,
docstring='The polynomial for the Z coordinate.') # type: Poly1DType
def __init__(
self,
X: Union[Poly1DType, numpy.ndarray, list, tuple] = None,
Y: Union[Poly1DType, numpy.ndarray, list, tuple] = None,
Z: Union[Poly1DType, numpy.ndarray, list, tuple] = None,
**kwargs):
"""
Parameters
----------
X : Poly1DType|numpy.ndarray|list|tuple
Y : Poly1DType|numpy.ndarray|list|tuple
Z : Poly1DType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.X, self.Y, self.Z = X, Y, Z
super(XYZPolyType, self).__init__(**kwargs)
def __call__(self, t: Union[float, int, numpy.ndarray]) -> numpy.ndarray:
"""
Evaluate the polynomial at points `t`. This passes `t` straight through
to :func:`polyval` of `numpy.polynomial.polynomial` for each of
`X,Y,Z` components.
Parameters
----------
t : float|int|numpy.ndarray
The point(s) at which to evaluate.
Returns
-------
numpy.ndarray
"""
x = self.X(t)
y = self.Y(t)
z = self.Z(t)
if numpy.ndim(x) == 0:
return numpy.array([x, y, z], dtype=x.dtype)
else:
o_shape = x.shape
x = numpy.reshape(x, (-1, 1))
y = numpy.reshape(y, (-1, 1))
z = numpy.reshape(z, (-1, 1))
out = numpy.hstack((x, y, z))
return numpy.reshape(out, o_shape + (3, ))
def get_array(self, dtype='object') -> numpy.ndarray:
"""Gets an array representation of the class instance.
Parameters
----------
dtype : str|numpy.dtype|numpy.number
numpy data type of the return.
If `object`, an array of Poly1DType objects is returned.
Otherwise, a ndarray of shape (3, N) of coefficient vectors is returned.
Returns
-------
numpy.ndarray
array of the form `[X,Y,Z]`.
"""
if dtype in ['object', numpy.dtype('object')]:
return numpy.array([self.X, self.Y, self.Z], dtype='object')
else:
# return a 3 x N array of coefficients
xv = self.X.Coefs
yv = self.Y.Coefs
zv = self.Z.Coefs
length = max(xv.size, yv.size, zv.size)
out = numpy.zeros((3, length), dtype=dtype)
out[0, :xv.size] = xv
out[1, :yv.size] = yv
out[2, :zv.size] = zv
return out
@classmethod
def from_array(cls, array: Union[numpy.ndarray, list, tuple]):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed `[X, Y, Z]`
Returns
-------
XYZPolyType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 3:
raise ValueError(_len3_array_text.format(array))
return cls(X=array[0], Y=array[1], Z=array[2])
raise ValueError(_array_type_text.format(type(array)))
def derivative(
self,
der_order: int = 1,
return_poly: bool = False):
"""
Calculate the `der_order` derivative of each component polynomial.
Parameters
----------
der_order : int
the order of the derivative
return_poly : bool
if `True`, a XYZPolyType if returned, otherwise a list of the coefficient arrays is returned.
Returns
-------
XYZPolyType|list
"""
coefs = [
getattr(self, attrib).derivative(der_order=der_order, return_poly=False) for attrib in ['X', 'Y', 'Z']]
if return_poly:
return XYZPolyType(X=coefs[0], Y=coefs[1], Z=coefs[2])
return coefs
def derivative_eval(
self,
t: Union[float, int, numpy.ndarray],
der_order: int = 1) -> numpy.ndarray:
"""
Evaluate the `der_order` derivative of the polynomial collection at points `x`.
This uses the functionality presented in `numpy.polynomial.polynomial`.
Parameters
----------
t : float|int|numpy.ndarray
The point(s) at which to evaluate.
der_order : int
The derivative.
Returns
-------
numpy.ndarray
"""
der_poly = self.derivative(der_order=der_order, return_poly=True)
return der_poly(t)
def shift(
self,
t_0: float,
alpha: float = 1,
return_poly: bool = False):
r"""
Transform a polynomial with respect to an affine shift in the coordinate system.
That is, :math:`P(u) = Q(\alpha\cdot(t-t_0))`.
Be careful to follow the convention that the transformation parameters express the *current coordinate system*
as a shifted, **and then** scaled version of the *new coordinate system*. If the new coordinate is
:math:`t = \beta\cdot u - t_0`, then :math:`u = (t - t_0)/\beta`, and :math:`\alpha = 1/\beta`.
Parameters
----------
t_0 : float
the **current center coordinate** in the **new coordinate system.**
That is, `u=0` when `t=t_0`.
alpha : float
the scale. That is, when `t = t0 + 1`, then :math:`u = \alpha`.
return_poly : bool
if `True`, an XYZPolyType instance is returned, otherwise a list of the coefficient arrays is returned.
Returns
-------
XYZPolyType|list
"""
coefs = [
getattr(self, attrib).shift(t_0, alpha=alpha, return_poly=False) for attrib in ['X', 'Y', 'Z']]
if return_poly:
return XYZPolyType(X=coefs[0], Y=coefs[1], Z=coefs[2])
return coefs
def minimize_order(self):
"""
Trim the trailing zeros for each component coefficient array. This
modifies the object in place.
Returns
-------
None
"""
self.X.minimize_order()
self.Y.minimize_order()
self.Z.minimize_order()
class XYZPolyAttributeType(XYZPolyType):
"""
An array element of X, Y, Z polynomials. The output of these polynomials are expected
to be spatial variables in the ECF coordinate system.
"""
_fields = ('X', 'Y', 'Z', 'index')
_required = _fields
_set_as_attribute = ('index', )
# descriptors
index = IntegerDescriptor(
'index', _required, strict=False, docstring='The array index value.') # type: int
def __init__(
self,
X: Union[Poly1DType, numpy.ndarray, list, tuple] = None,
Y: Union[Poly1DType, numpy.ndarray, list, tuple] = None,
Z: Union[Poly1DType, numpy.ndarray, list, tuple] = None,
index: int = None,
**kwargs):
"""
Parameters
----------
X : Poly1DType|numpy.ndarray|list|tuple
Y : Poly1DType|numpy.ndarray|list|tuple
Z : Poly1DType|numpy.ndarray|list|tuple
index : int
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.index = index
super(XYZPolyAttributeType, self).__init__(X=X, Y=Y, Z=Z, **kwargs)
@classmethod
def from_array(
cls,
array: Union[numpy.ndarray, list, tuple],
index: int = 1):
"""
Create from an array type entry.
Parameters
----------
array: numpy.ndarray|list|tuple
assumed [X, Y, Z]
index : int
the array index
Returns
-------
XYZPolyAttributeType
"""
if array is None:
return None
if isinstance(array, (numpy.ndarray, list, tuple)):
if len(array) < 3:
raise ValueError(_len3_array_text.format(array))
return cls(X=array[0], Y=array[1], Z=array[2], index=index)
raise ValueError(_array_type_text.format(type(array)))
class GainPhasePolyType(Serializable):
"""A container for the Gain and Phase Polygon definitions."""
_fields = ('GainPoly', 'PhasePoly')
_required = _fields
# descriptors
GainPoly = SerializableDescriptor(
'GainPoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='One-way signal gain (in dB) as a function of X-axis direction cosine (DCX) (variable 1) '
'and Y-axis direction cosine (DCY) (variable 2). Gain relative to gain at DCX = 0 '
'and DCY = 0, so constant coefficient is always 0.0.') # type: Poly2DType
PhasePoly = SerializableDescriptor(
'PhasePoly', Poly2DType, _required, strict=DEFAULT_STRICT,
docstring='One-way signal phase (in cycles) as a function of DCX (variable 1) and '
'DCY (variable 2). Phase relative to phase at DCX = 0 and DCY = 0, '
'so constant coefficient is always 0.0.') # type: Poly2DType
def __init__(
self,
GainPoly: Union[Poly2DType, numpy.ndarray, list, tuple] = None,
PhasePoly: Union[Poly2DType, numpy.ndarray, list, tuple] = None,
**kwargs):
"""
Parameters
----------
GainPoly : Poly2DType|numpy.ndarray|list|tuple
PhasePoly : Poly2DType|numpy.ndarray|list|tuple
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.GainPoly = GainPoly
self.PhasePoly = PhasePoly
super(GainPhasePolyType, self).__init__(**kwargs)
def __call__(
self,
x: Union[float, int, numpy.ndarray],
y: Union[float, int, numpy.ndarray]) -> Optional[numpy.ndarray]:
"""
Evaluate a polynomial at points [`x`, `y`]. This passes `x`,`y` straight
through to the call method for each component.
Parameters
----------
x : float|int|numpy.ndarray
The first dependent variable of point(s) at which to evaluate.
y : float|int|numpy.ndarray
The second dependent variable of point(s) at which to evaluate.
Returns
-------
numpy.ndarray
"""
if self.GainPoly is None or self.PhasePoly is None:
return None
return numpy.array([self.GainPoly(x, y), self.PhasePoly(x, y)], dtype=numpy.float64)
def minimize_order(self):
"""
Trim the trailing zeros for each component coefficient array. This
modifies the object in place.
Returns
-------
None
"""
self.GainPoly.minimize_order()
self.PhasePoly.minimize_order()
#############
# Error Decorrelation type
class ErrorDecorrFuncType(Serializable):
r"""
This container allows parameterization of linear error decorrelation rate model.
If :math:`(\Delta t) = |t2 - t1|`, then
.. math::
CC(\Delta t) = \min(1.0, \max(0.0, CC0 - DCR\cdot(\Delta t)))
"""
_fields = ('CorrCoefZero', 'DecorrRate')
_required = _fields
_numeric_format = {'CorrCoefZero': FLOAT_FORMAT, 'DecorrRate': FLOAT_FORMAT}
# descriptors
CorrCoefZero = FloatDescriptor(
'CorrCoefZero', _required, strict=True, bounds=(-1, 1),
docstring='Error correlation coefficient for zero time difference (CC0).') # type: float
DecorrRate = FloatDescriptor(
'DecorrRate', _required, strict=True, bounds=(0, None),
docstring='Error decorrelation rate. Simple linear decorrelation rate (DCR).') # type: float
def __init__(
self,
CorrCoefZero: float = None,
DecorrRate: float = None,
**kwargs):
"""
Parameters
----------
CorrCoefZero : float
DecorrRate : float
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.CorrCoefZero = CorrCoefZero
self.DecorrRate = DecorrRate
super(ErrorDecorrFuncType, self).__init__(**kwargs)
| 58,784 | 30.792861 | 118 | py |
sarpy | sarpy-master/sarpy/io/complex/sicd_schema/__init__.py | """
Tools for inspecting a SICD urn url and providing basic details.
"""
__classification__ = 'UNCLASSIFIED'
__author__ = "Thomas McCullough"
import os
import re
from typing import List, Tuple, Dict, Optional
_SICD_DEFAULT_TUPLE = (1, 3, 0)
_SICD_SPECIFICATION_IDENTIFIER = 'SICD Volume 1 Design & Implementation Description Document'
_the_directory = os.path.split(__file__)[0]
urn_mapping = {
'urn:SICD:0.3.1': {
'tuple': (0, 3, 1),
'version': '0.3.1',
'release': '0.3.1',
'date': '2009-03-17T00:00:00Z',
'schema': os.path.join(_the_directory, 'SICD_schema_V0.3.1_2009_03_17.xsd')},
'urn:SICD:0.4.0': {
'tuple': (0, 4, 0),
'version': '0.4.0',
'release': '0.4.0',
'date': '2010-02-12T00:00:00Z',
'schema': os.path.join(_the_directory, 'SICD_schema_V0.4.0_2010_02_12.xsd')},
'urn:SICD:0.4.1': {
'tuple': (0, 4, 1),
'version': '0.4.1',
'release': '0.4.1',
'date': '2010-07-15T00:00:00Z',
'schema': os.path.join(_the_directory, 'SICD_schema_V0.4.1_2010_07_15.xsd')},
'urn:SICD:0.5.0': {
'tuple': (0, 5, 0),
'version': '0.5.0',
'release': '0.5.0',
'date': '2011-01-12T00:00:00Z',
'schema': os.path.join(_the_directory, 'SICD_schema_V0.5.0_2011_01_12.xsd')},
'urn:SICD:1.0.0': {
'tuple': (1, 0, 0),
'version': '1.0.0',
'release': '1.0.0',
'date': '2011-08-31T00:00:00Z',
'schema': os.path.join(_the_directory, 'SICD_schema_V1.0.0_2011_08_31.xsd')},
'urn:SICD:1.0.1': {
'tuple': (1, 0, 1),
'version': '1.0.1',
'release': '1.0.1',
'date': '2013-02-25T00:00:00Z',
'schema': os.path.join(_the_directory, 'SICD_schema_V1.0.1_2013_02_25.xsd')},
'urn:SICD:1.1.0': {
'tuple': (1, 1, 0),
'version': '1.1.0',
'release': '1.1.0',
'date': '2014-09-30T00:00:00Z',
'schema': os.path.join(_the_directory, 'SICD_schema_V1.1.0_2014_09_30.xsd')},
'urn:SICD:1.2.0': {
'tuple': (1, 2, 0),
'version': '1.2.0',
'release': '1.2.0',
'date': '2016-06-30T00:00:00Z',
'schema': os.path.join(_the_directory, 'SICD_schema_V1.2.0_2016_06_30.xsd')},
'urn:SICD:1.2.1': {
'tuple': (1, 2, 1),
'version': '1.2.1',
'release': '1.2.1',
'date': '2018-12-13T00:00:00Z',
'schema': os.path.join(_the_directory, 'SICD_schema_V1.2.1_2018_12_13.xsd')},
'urn:SICD:1.3.0': {
'tuple': (1, 3, 0),
'version': '1.3.0',
'release': '1.3.0',
'date': '2022-11-30T00:00:00Z',
'schema': os.path.join(_the_directory, 'SICD_schema_V1.3.0_2021_11_30.xsd')}
}
WRITABLE_VERSIONS = tuple(entry['release'] for key, entry in urn_mapping.items() if entry['tuple'] >= (1, 0, 0))
# validate the defined paths
for key, entry in urn_mapping.items():
schema_path = entry.get('schema', None)
if schema_path is not None and not os.path.exists(schema_path):
raise ValueError('`{}` has nonexistent schema path {}'.format(key, schema_path))
def get_default_tuple() -> Tuple[int, int, int]:
"""
Get the default SICD version tuple.
Returns
-------
Tuple[int, int, int]
"""
return _SICD_DEFAULT_TUPLE
def get_default_version_string() -> str:
"""
Get the default SICD version string.
Returns
-------
str
"""
return '{}.{}.{}'.format(*_SICD_DEFAULT_TUPLE)
def get_specification_identifier() -> str:
"""
Get the SICD specification identifier string.
Returns
-------
str
"""
return _SICD_SPECIFICATION_IDENTIFIER
def check_urn(urn_string: str) -> str:
"""
Checks that the urn string follows the correct pattern.
Parameters
----------
urn_string : str
Returns
-------
str
Raises
------
ValueError
This raises an exception for a poorly formed or unmapped SICD urn.
"""
if not isinstance(urn_string, str):
raise TypeError(
'Expected a urn input of string type, got type {}'.format(type(urn_string)))
the_match = re.match(r'^\d.\d.\d$', urn_string)
if the_match is not None:
urn_string = 'urn:SICD:{}'.format(urn_string)
the_match = re.match(r'^urn:SICD:\d.\d.\d$', urn_string)
if the_match is None:
raise ValueError(
'Input provided as `{}`,\nbut should be of the form '
'`urn:SICD:<major>.<minor>.<release>'.format(urn_string))
return urn_string
def get_urn_details(urn_string: str) -> Dict[str, str]:
"""
Gets the associated details for the given SICD urn, or raise an exception for
poorly formatted or unrecognized urn.
Parameters
----------
urn_string : str
Returns
-------
Dict[str, str]
"""
urn_string = check_urn(urn_string)
out = urn_mapping.get(urn_string, None)
if out is None:
raise KeyError(
'Got correctly formatted, but unmapped SICD urn {}.'.format(urn_string))
return out
def get_schema_path(the_urn: str) -> Optional[str]:
"""
Gets the path to the proper schema file for the given urn.
Parameters
----------
the_urn : str
Returns
-------
None|str
"""
result = get_urn_details(the_urn)
return result.get('schema', None)
def get_versions() -> List[str]:
"""
Gets a list of recognized SICD urn.
Returns
-------
List[str]
"""
return list(sorted(urn_mapping.keys(), key=lambda x: urn_mapping[x]['tuple']))
| 5,598 | 25.918269 | 112 | py |
sarpy | sarpy-master/sarpy/io/DEM/DEM.py | """
Establish base expected functionality for digital elevation model handling.
"""
import numpy
from typing import List
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
class DEMInterpolator(object):
"""
Abstract DEM class presenting base required functionality.
"""
def get_elevation_hae(self, lat, lon, block_size=50000):
"""
Get the elevation value relative to the WGS-84 ellipsoid.
Parameters
----------
lat : numpy.ndarray|list|tuple|int|float
lon : numpy.ndarray|list|tuple|int|float
block_size : int|None
If `None`, then the entire calculation will proceed as a single block.
Otherwise, block processing using blocks of the given size will be used.
The minimum value used for this is 50,000, and any smaller value will be
replaced with 50,000. Default is 50,000.
Returns
-------
numpy.ndarray
the elevation relative to the WGS-84 ellipsoid.
"""
raise NotImplementedError
def get_elevation_geoid(self, lat, lon, block_size=50000):
"""
Get the elevation value relative to the geoid.
Parameters
----------
lat : numpy.ndarray|list|tuple|int|float
lon : numpy.ndarray|list|tuple|int|float
block_size : int|None
If `None`, then the entire calculation will proceed as a single block.
Otherwise, block processing using blocks of the given size will be used.
The minimum value used for this is 50,000, and any smaller value will be
replaced with 50,000. Default is 50,000.
Returns
-------
numpy.ndarray
the elevation relative to the geoid
"""
raise NotImplementedError
def get_max_hae(self, lat_lon_box=None):
"""
Get the maximum dem value with respect to HAE, which should be assumed
**approximately** correct. This may possibly be with respect to some
Area of Interest.
Parameters
----------
lat_lon_box : None|numpy.ndarray
None or any area of interest of the form `[lat min lat max, lon min, lon max]`.
Returns
-------
float
"""
raise NotImplementedError
def get_min_hae(self, lat_lon_box=None):
"""
Get the minimum dem value with respect to HAE, which should be assumed
**approximately** correct. This may possibly be with respect to some
Area of Interest.
Parameters
----------
lat_lon_box : None|numpy.ndarray
None or any area of interest of the form `[lat min lat max, lon min, lon max]`.
Returns
-------
float
"""
raise NotImplementedError
def get_max_geoid(self, lat_lon_box=None):
"""
Get the maximum dem value with respect to the geoid, which should be assumed
**approximately** correct. This may possibly be with respect to some
Area of Interest.
Parameters
----------
lat_lon_box : None|numpy.ndarray
None or any area of interest of the form `[lat min lat max, lon min, lon max]`.
Returns
-------
float
"""
raise NotImplementedError
def get_min_geoid(self, lat_lon_box=None):
"""
Get the minimum dem value with respect to geoid, whihc should be assumed
**approximately** correct. This may possibly be with respect to some
Area of Interest.
Parameters
----------
lat_lon_box : None|numpy.ndarray
None or any area of interest of the form `[lat min lat max, lon min, lon max]`.
Returns
-------
float
"""
raise NotImplementedError
class DEMList(object):
"""
Abstract class for creating a searchable list of applicable DEM files of a
given type.
"""
def get_file_list(self, lat_lon_box):
"""
This will return the list of files associated with covering the
`lat_lon_box` using a DEM. Extraneous files (i.e. with region not overlapping
the provided box) should NOT be returned, and files should be returned in
order of preference.
.. Note: It should be considered the user's responsibility to ensure
that necessary DEM files will be found by this methodology, and
regions lacking DEM file(s) should be assumed to have elevation
at WGS-84 mean sea level.
Parameters
----------
lat_lon_box : numpy.ndarray|list|tuple
The bounding box of the form `[lat min, lat max, lon min, lon max]`.
Returns
-------
List[str]
"""
raise NotImplementedError
| 4,862 | 28.834356 | 91 | py |
sarpy | sarpy-master/sarpy/io/DEM/DTED.py | """
Classes and methods for parsing and using digital elevation models in DTED format.
"""
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
import logging
import os
import struct
import numpy
from sarpy.io.DEM.DEM import DEMList, DEMInterpolator
from sarpy.io.DEM.utils import argument_validation
from sarpy.io.DEM.geoid import GeoidHeight
from sarpy.io.general.base import SarpyIOError
logger = logging.getLogger(__name__)
#######
# module variables
_SUPPORTED_DTED_FILE_TYPES = {
'DTED1': {'fext': '.dt1'},
'DTED2': {'fext': '.dt2'},
'SRTM1': {'fext': '.dt1'},
'SRTM2': {'fext': '.dt2'},
'SRTM2F': {'fext': '.dt2'}}
def get_default_prioritization():
"""
Gets the default prioritization of the DTED types.
Returns
-------
List[str]
"""
# TODO: what should this actually be?
return 'DTED2', 'DTED1', 'SRTM2F', 'SRTM2', 'SRTM1'
def get_lat_lon_box(lats, lons):
"""
Gets the lat/lon bounding box, as appropriate.
Parameters
----------
lats : numpy.ndarray|list|tuple|float|int
lons : numpy.ndarray|list|tuple|float|int
Returns
-------
numpy.ndarray
"""
def get_min_max(inp, lon=False):
if isinstance(inp, (int, float, numpy.number)):
return inp, inp
else:
min_val, max_val = numpy.min(inp), numpy.max(inp)
if not lon:
return numpy.min(inp), numpy.max(inp)
# check for 180/-180 crossing
if not (min_val < -90 and max_val > 90):
return min_val, max_val
inp = numpy.array(inp).flatten()
min_val = numpy.min(inp[inp >= 0])
max_val = numpy.max(inp[inp <= 0])
return min_val, max_val
out = numpy.zeros((4, ), dtype='float64')
out[:2] = get_min_max(lats)
out[2:] = get_min_max(lons, lon=True)
return out
class DTEDList(DEMList):
"""
The DEM directory structure is assumed to look like the following:
* For DTED<1,2>: `<root_dir>/dted/<1, 2>/<lon_string>/<lat_string>.dtd`
* For SRTM<1,2,2F>: `<root_dir>/srtm/<1, 2, 2f>/<lon_string>/<lat_string>.dt<1,2,2>`
Here `<lat_string>` corresponds to a string of the form `X##`, where
`X` is one of 'N' or 'S', and `##` is the zero-padded formatted string for
the integer value `floor(lat)`.
Similarly, `<lon_string>` corresponds to a string of the form `Y##`, where
`Y` is one of 'E' or 'W', and `###` is the zero-padded formatted string for
the integer value `floor(lon)`.
<lon_string>, <lat_string> corresponds to the origin in the lower left corner
of the DEM tile.
"""
__slots__ = ('_root_dir', '_missing_error')
def __init__(self, root_directory, missing_error=False):
"""
Parameters
----------
root_directory : str
missing_error : bool
Raise an exception when DTED files are missing?
"""
self._root_dir = root_directory
self._missing_error = missing_error
@property
def root_dir(self):
"""
str: the root directory
"""
return self._root_dir
def _get_directory_stem(self, dem_type):
if dem_type.startswith('DTED'):
return os.path.join(self._root_dir, dem_type[:4].lower(), dem_type[-1])
elif dem_type.startswith('SRTM'):
return os.path.join(self._root_dir, dem_type[:4].lower(), dem_type[4:].lower())
else:
raise ValueError('Unhandled dem_type {}'.format(dem_type))
def _get_file_list(self, lat_lon_list, dem_type):
"""
Helper method for getting the file list for a specified type.
Parameters
----------
lat_lon_list : list
dem_type : str
Returns
-------
List[str]
"""
def get_box(la, lo):
x = 'n' if la >= 0 else 's'
y = 'e' if lo >= 0 else 'w'
return '{0:s}{1:03d}'.format(y, abs(lo)), '{0:s}{1:02d}'.format(x, abs(la))
# get the directory search stem
dstem = self._get_directory_stem(dem_type)
if not os.path.isdir(dstem):
return # nothing to be done
# get file extension
fext = _SUPPORTED_DTED_FILE_TYPES[dem_type]['fext']
for entry in lat_lon_list:
if entry[2] is not None:
# we already found the file
continue
lonstr, latstr = get_box(entry[0], entry[1])
fil = os.path.join(dstem, lonstr, latstr + fext)
if os.path.isfile(fil):
entry[2] = fil
def get_file_list(self, lat_lon_box, dem_type=None):
"""
Get the file list required for the given coordinates.
Parameters
----------
lat_lon_box : numpy.ndarray|list|tuple
The bounding box of the form `[lat min, lat max, lon min, lon max]`.
dem_type : None|str|List[str]
The prioritized list of dem types to check. If `None`, then
:func:`get_default_prioritization` is used. Each entry must be one
of ("DTED1", "DTED2", "SRTM1", "SRTM2", "SRTM2F")
Returns
-------
List[str]
"""
# let's construct the list of lats that we must match
lat_start = int(numpy.floor(lat_lon_box[0]))
lat_end = int(numpy.ceil(lat_lon_box[1]))
if (lat_start > lat_end) or (lat_start < -90) or (lat_start > 90):
raise ValueError('Got malformed latitude in bounding box {}'.format(lat_lon_box))
if lat_start == lat_end:
lat_list = [lat_start, ]
else:
lat_list = list(range(lat_start, lat_end, 1))
# let's construct the list of lons that we must match.
lon_start = int(numpy.floor(lat_lon_box[2]))
lon_end = int(numpy.ceil(lat_lon_box[3]))
if (lon_start < -180) or (lon_end < -180) or (lon_start > 180) or (lon_end > 180):
raise ValueError('Got malformed longitude in bounding box {}'.format(lat_lon_box))
if lon_start > lon_end:
if not (lon_end < 0 < lon_start):
# this is assumed to NOT be a 180/-180 boundary crossing
raise ValueError(
'We have minimum longitude greater than maximum longitude {}'.format(lat_lon_box))
else:
# we have a 180/-180 boundary crossing
lon_list = list(range(lon_start, 180, 1)) + list(range(-180, lon_end, 1))
else:
if lon_start == lon_end:
lon_list = [lon_start, ]
else:
lon_list = list(range(lon_start, lon_end, 1))
# construct our workspace
lat_lon_list = []
for corner_lat in lat_list:
for corner_lon in lon_list:
lat_lon_list.append([corner_lat, corner_lon, None])
# validate the dem types list
if dem_type is None:
dem_type = get_default_prioritization()
elif isinstance(dem_type, str):
dem_type = [dem_type, ]
# loop over the prioritized list of types and check
for entry in dem_type:
if not isinstance(entry, str):
raise TypeError(
'Got entry {} of dem_type, this is required to be of string type'.format(entry))
# validate dem_type options
this_entry = entry.upper()
if this_entry not in _SUPPORTED_DTED_FILE_TYPES:
raise ValueError(
'Got dem_type {}, but it must be one of the supported '
'types {}'.format(entry, list(_SUPPORTED_DTED_FILE_TYPES.keys())))
self._get_file_list(lat_lon_list, this_entry) # NB: this modifies lat_lon_list in place
# extract files and warn about missing entries
files = []
missing_boxes = []
for entry in lat_lon_list:
if entry[2] is not None:
files.append(entry[2])
else:
missing_boxes.append('({}, {})'.format(entry[0], entry[1]))
if len(missing_boxes) > 0:
msg = 'Missing expected DEM files for squares with lower left lat/lon corner {}'.format(missing_boxes)
if self._missing_error:
raise ValueError(msg)
else:
logger.warning(
msg + '\n\tThis should result in the assumption that the altitude in\n\t'
'that section is given by Mean Sea Level.')
return files
class DTEDReader(object):
"""
Reader/interpreter for DTED files, generally expected to be a helper class.
As such, some implementation choices have been made for computational efficiency,
and not user convenience.
"""
__slots__ = ('_file_name', '_origin', '_spacing', '_bounding_box', '_shape', '_mem_map')
def __init__(self, file_name):
self._file_name = file_name
with open(self._file_name, 'rb') as fi:
# NB: DTED is always big-endian
# the first 80 characters are header
# characters 80:728 are data set identification record
# characters 728:3428 are accuracy record
# the remainder is data records, but DTED is not quite a raster
header = struct.unpack('>80s', fi.read(80))[0].decode('utf-8')
if header[:3] != 'UHL':
raise SarpyIOError('File {} does not appear to be a DTED file.'.format(self._file_name))
lon = float(header[4:7]) + float(header[7:9])/60. + float(header[9:11])/3600.
lon = -lon if header[11] == 'W' else lon
lat = float(header[12:15]) + float(header[15:17])/60. + float(header[17:19])/3600.
lat = -lat if header[19] == 'S' else lat
self._origin = numpy.array([lon, lat], dtype=numpy.float64)
self._spacing = numpy.array([float(header[20:24]), float(header[24:28])], dtype=numpy.float64)/36000.
self._shape = numpy.array([int(header[47:51]), int(header[51:55])], dtype=numpy.int64)
self._bounding_box = numpy.zeros((4, ), dtype=numpy.float64)
self._bounding_box[0::2] = self._origin
self._bounding_box[1::2] = self._origin + self._spacing*(self._shape - 1)
# starting at 3428, the rest of the file is data records, but not quite a raster
# each "row" is a data record with 8 extra bytes at the beginning,
# and 4 extra (checksum) at the end - look to MIL-PRF-89020B for an explanation
# To enable memory map usage, we will spoof it as a raster and adjust column indices
shp = (int(self._shape[0]), int(self._shape[1]) + 6)
self._mem_map = numpy.memmap(self._file_name,
dtype=numpy.dtype('>i2'),
mode='r',
offset=3428,
shape=shp)
@property
def origin(self):
"""
numpy.ndarray: The origin of this DTED, of the form `[longitude, latitude]`.
"""
return numpy.copy(self._origin)
@property
def bounding_box(self):
"""
numpy.ndarray: The bounding box of the form
`[longitude min, longitude max, latitude min, latitude max]`.
"""
return numpy.copy(self._bounding_box)
def __getitem__(self, item):
def new_col_int(val, begin):
if val is None:
if begin:
return 4
else:
return -2
return val + 4 if val >= 0 else val - 2
# we need to manipulate in the second dimension
if isinstance(item, tuple):
if len(item) > 2:
raise ValueError('Cannot slice on more than 2 dimensions')
it = item[1]
if isinstance(it, int):
it1 = new_col_int(it, True)
elif isinstance(it, slice):
start = new_col_int(it.start, True)
stop = new_col_int(it.stop, False)
it1 = slice(start, stop, it.step)
elif isinstance(item[1], numpy.ndarray):
it1 = numpy.copy(item[1])
it1[it1 >= 0] += 4
it1[it1 < 0] -= 2
else:
raise ValueError('Cannot slice using {}'.format(type(item[1])))
data = self._mem_map.__getitem__((item[0], it1))
else:
data = self._mem_map[item, 4:-2]
return self._repair_values(data)
@staticmethod
def _repair_values(elevations):
"""
This is a helper method for repairing the weird entries in a DTED.
The array is modified in place.
Parameters
----------
elevations : numpy.ndarray
Returns
-------
numpy.ndarray
"""
elevations = numpy.copy(elevations)
# BASED ON MIL-PRF-89020B SECTION 3.11.1, 3.11.2
# There are some byte-swapping details that are poorly explained.
# The following steps appear to correct for the "complemented" values.
# Find negative voids and repair them
neg_voids = (elevations < -15000)
elevations[neg_voids] = numpy.abs(elevations[neg_voids]) - 32768
# Find positive voids and repair them
pos_voids = (elevations > 15000)
elevations[pos_voids] = 32768 - elevations[pos_voids]
return elevations
def _linear(self, ix, dx, iy, dy):
# type: (numpy.ndarray, numpy.ndarray, numpy.ndarray, numpy.ndarray) -> numpy.ndarray
a = (1 - dx) * self._lookup_elevation(ix, iy) + dx * self._lookup_elevation(ix + 1, iy)
b = (1 - dx) * self._lookup_elevation(ix, iy+1) + dx * self._lookup_elevation(ix+1, iy+1)
return (1 - dy) * a + dy * b
def _lookup_elevation(self, ix, iy):
# type: (numpy.ndarray, numpy.ndarray) -> numpy.ndarray
t_ix = numpy.copy(ix)
t_ix[t_ix >= self._shape[0]] = self._shape[0] - 1
t_ix[t_ix < 0] = 0
# adjust iy to account for 8 extra bytes at the beginning of each column
t_iy = iy + 4
t_iy[t_iy >= self._shape[1]+4] = self._shape[1] + 3
t_iy[t_iy < 4] = 4
return self._repair_values(self._mem_map[t_ix, t_iy])
def in_bounds(self, lat, lon):
"""
Determine which of the given points are inside the extent of this DTED.
Parameters
----------
lat : numpy.ndarray
lon : numpy.ndarray
Returns
-------
numpy.ndarray
boolean array of the same shape as lat/lon
"""
return (lon >= self._bounding_box[0]) & (lon <= self._bounding_box[1]) & \
(lat >= self._bounding_box[2]) & (lat <= self._bounding_box[3])
def _get_elevation(self, lat, lon):
# type: (numpy.ndarray, numpy.ndarray) -> numpy.ndarray
# we implicitly require that lat/lon make sense and are contained in this DTED
# get indices
fx = (lon - self._origin[0])/self._spacing[0]
fy = (lat - self._origin[1])/self._spacing[1]
# get integer indices via floor
ix = numpy.cast[numpy.int32](numpy.floor(fx))
iy = numpy.cast[numpy.int32](numpy.floor(fy))
return self._linear(ix, fx-ix, iy, fy-iy)
def get_elevation(self, lat, lon, block_size=50000):
"""
Interpolate the elevation values for lat/lon. This is relative to the EGM96
geoid by DTED specification.
Parameters
----------
lat : numpy.ndarray
lon : numpy.ndarray
block_size : None|int
If `None`, then the entire calculation will proceed as a single block.
Otherwise, block processing using blocks of the given size will be used.
The minimum value used for this is 50,000, and any smaller value will be
replaced with 50,000. Default is 50,000.
Returns
-------
numpy.ndarray
Elevation values of the same shape as lat/lon.
"""
o_shape, lat, lon = argument_validation(lat, lon)
out = numpy.full(lat.shape, numpy.nan, dtype=numpy.float64)
if block_size is None:
boolc = self.in_bounds(lat, lon)
if numpy.any(boolc):
out[boolc] = self._get_elevation(lat[boolc], lon[boolc])
else:
block_size = min(50000, int(block_size))
start_block = 0
while start_block < lat.size:
end_block = min(lat.size, start_block + block_size)
lat1 = lat[start_block:end_block]
lon1 = lon[start_block:end_block]
boolc = self.in_bounds(lat1, lon1)
out1 = numpy.full(lat1.shape, numpy.nan, dtype=numpy.float64)
out1[boolc] = self._get_elevation(lat1[boolc], lon[boolc])
out[start_block:end_block] = out1
start_block = end_block
if o_shape == ():
return float(out[0])
else:
return numpy.reshape(out, o_shape)
def _find_overlap(self, lat_lon_box):
"""
Gets the overlap slice argument for the lat/lon bounding box.
Parameters
----------
lat_lon_box : None|numpy.ndarray
Of the form `[
Returns
-------
(slice, slice)
"""
if lat_lon_box is None:
first_row = 0
last_row = self._shape[0]
first_col = 0
last_col = self._shape[1]
else:
first_row = (lat_lon_box[2] - self._origin[0])/self._spacing[0]
last_row = (lat_lon_box[3] - self._origin[0])/self._spacing[0]
first_col = (lat_lon_box[0] - self._origin[1])/self._spacing[1]
last_col = (lat_lon_box[1] - self._origin[1])/self._spacing[1]
if first_row > self._shape[0] or last_row < 0 or first_col > self._shape[1] or last_col < 0:
return None
first_row = int(max(0, numpy.floor(first_row)))
last_row = int(min(self._shape[0], numpy.ceil(last_row)))
first_col = int(max(0, numpy.floor(first_col)))
last_col = int(min(self._shape[1], numpy.ceil(last_col)))
return slice(first_row, last_row, 1), slice(first_col, last_col, 1)
def get_max(self, lat_lon_box=None):
"""
Gets the maximum observed DEM value, possibly contained in the given
rectangular area of interest.
Parameters
----------
lat_lon_box : None|numpy.ndarray
None or of the form `[lat min, lat max, lon min, lon max]`.
Returns
-------
float|None
"""
arg = self._find_overlap(lat_lon_box)
if arg is None:
return None
return numpy.max(self.__getitem__(arg))
def get_min(self, lat_lon_box=None):
"""
Gets the minimum observed DEM value, possibly contained in the given
rectangular area of interest.
Parameters
----------
lat_lon_box : None|numpy.ndarray
None or of the form `[lat min, lat max, lon min, lon max]`.
Returns
-------
float|None
"""
arg = self._find_overlap(lat_lon_box)
if arg is None:
return None
return numpy.min(self.__getitem__(arg))
class DTEDInterpolator(DEMInterpolator):
"""
DEM Interpolator using DTED/SRTM files for the DEM information.
"""
__slots__ = ('_readers', '_geoid', '_ref_geoid')
def __init__(self, files, geoid_file, lat_lon_box=None):
if isinstance(files, str):
files = [files, ]
# get a reader object for each file
self._readers = [DTEDReader(fil) for fil in files]
# get the geoid object - we should prefer egm96 .pgm files, since that's the DTED spec
# in reality, it makes very little difference, though
if isinstance(geoid_file, str):
if os.path.isdir(geoid_file):
geoid_file = GeoidHeight.from_directory(geoid_file, search_files=('egm96-5.pgm', 'egm96-15.pgm'))
else:
geoid_file = GeoidHeight(geoid_file)
if not isinstance(geoid_file, GeoidHeight):
raise TypeError(
'geoid_file is expected to be the path where one of the standard '
'egm .pgm files can be found, or an instance of GeoidHeight reader. '
'Got {}'.format(type(geoid_file)))
self._geoid = geoid_file
self._lat_lon_box = lat_lon_box
if len(self._readers) == 0:
self._ref_geoid = 0
else:
# use the origin of the first reader
ref_point = self._readers[0].origin
self._ref_geoid = float(self._geoid.get(ref_point[1], ref_point[0]))
self._max_geoid = None
self._min_geoid = None
@classmethod
def from_coords_and_list(cls, lat_lon_box, dted_list, dem_type=None, geoid_file=None):
"""
Construct a `DTEDInterpolator` from a coordinate collection and `DTEDList` object.
.. Note:: This depends on using :func:`DTEDList.get_file_list`
to get the relevant file list.
Parameters
----------
lat_lon_box : numpy.ndarray|list|tuple
Of the form `[lat min, lat max, lon min, lon max]`.
dted_list : DTEDList|str
The dted list object or root directory
dem_type : None|str|List[str]
The DEM type or list of DEM types in order of priority.
geoid_file : None|str|GeoidHeight
The `GeoidHeight` object, an egm file name, or root directory containing
one of the egm files in the sub-directory "geoid". If `None`, then default
to the root directory of `dted_list`.
Returns
-------
DTEDInterpolator
"""
if isinstance(dted_list, str):
dted_list = DTEDList(dted_list)
if not isinstance(dted_list, DTEDList):
raise ValueError(
'dted_list os required to be a path (directory) or DTEDList instance.')
# default the geoid argument to the root directory of the dted_list
if geoid_file is None:
geoid_file = dted_list.root_dir
return cls(dted_list.get_file_list(lat_lon_box, dem_type=dem_type), geoid_file, lat_lon_box=lat_lon_box)
@classmethod
def from_reference_point(cls, ref_point, dted_list, dem_type=None, geoid_file=None, pad_value=0.1):
"""
Construct a DTEDInterpolator object by padding around the reference point by
`pad_value` latitude degrees (1 degree ~ 111 km or 69 miles).
.. Note:: The degeneracy at the poles is not handled, because DTED are not
defined there anyways.
Parameters
----------
ref_point : numpy.ndarray|list|tuple
This is assumed to be of the form `[lat, lon, ...]`, and entries
beyond the first two are ignored.
dted_list : DTEDList|str
The dted list object or root directory
dem_type : None|str|List[str]
The DEM type or list of DEM types in order of priority.
geoid_file : None|str|GeoidHeight
The `GeoidHeight` object, an egm file name, or root directory containing
one of the egm files in the sub-directory "geoid". If `None`, then default
to the root directory of `dted_list`.
pad_value : float
The degree value to pad by.
Returns
-------
DTEDInterpolator
"""
pad_value = float(pad_value)
if pad_value > 0.5:
pad_value = 0.5
if pad_value < 0.05:
pad_value = 0.05
lat_diff = pad_value
lat_max = min(ref_point[0] + lat_diff, 90)
lat_min = max(ref_point[0] - lat_diff, -90)
lon_diff = min(15, lat_diff/(numpy.sin(numpy.deg2rad(ref_point[0]))))
lon_max = ref_point[1] + lon_diff
if lon_max > 180:
lon_max -= 360
lon_min = ref_point[1] - lon_diff
if lon_min < -180:
lon_min += 360
return cls.from_coords_and_list(
[lat_min, lat_max, lon_min, lon_max], dted_list, dem_type=dem_type, geoid_file=geoid_file)
@property
def geoid(self): # type: () -> GeoidHeight
"""
GeoidHeight: Get the geoid height calculator
"""
return self._geoid
def _get_elevation_geoid_from_reader(self, reader, lat, lon):
mask = reader.in_bounds(lat, lon)
values = numpy.full(lat.shape, numpy.nan, dtype=numpy.float64)
if numpy.any(mask):
# noinspection PyProtectedMember
values[mask] = reader._get_elevation(lat[mask], lon[mask])
return mask, values
def _get_elevation_geoid(self, lat, lon):
out = numpy.full(lat.shape, numpy.nan, dtype=numpy.float64)
remaining = numpy.ones(lat.shape, dtype=numpy.bool_)
for reader in self._readers:
if not numpy.any(remaining):
break
mask, values = self._get_elevation_geoid_from_reader(reader, lat[remaining], lon[remaining])
if numpy.any(mask):
work_mask = numpy.copy(remaining)
work_mask[remaining] = mask # mask as a subset of remaining
out[work_mask] = values[mask]
remaining[work_mask] = False
return out
def get_elevation_hae(self, lat, lon, block_size=50000):
"""
Get the elevation value relative to the WGS-84 ellipsoid.
.. Note:: DTED elevation is relative to the egm96 geoid, and we are simply adding
values determined by a geoid calculator. Using a the egm2008 model will result
in only minor differences.
Parameters
----------
lat : numpy.ndarray|list|tuple|int|float
lon : numpy.ndarray|list|tuple|int|float
block_size : None|int
If `None`, then the entire calculation will proceed as a single block.
Otherwise, block processing using blocks of the given size will be used.
A minimum value of 50000 will be enforced here.
Returns
-------
numpy.ndarray
The elevation relative to the WGS-84 ellipsoid.
"""
return self.get_elevation_geoid(lat, lon, block_size=block_size) + \
self._geoid.get(lat, lon, block_size=block_size)
def get_elevation_geoid(self, lat, lon, block_size=50000):
"""
Get the elevation value relative to the geoid.
.. Note:: DTED elevation is relative to the egm96 geoid, though using the egm2008
model will result in only minor differences.
Parameters
----------
lat : numpy.ndarray|list|tuple|int|float
lon : numpy.ndarray|list|tuple|int|float
block_size : None|int
If `None`, then the entire calculation will proceed as a single block.
Otherwise, block processing using blocks of the given size will be used.
The minimum value used for this is 50000, and any smaller value will be
replaced with 50000. Default is 50000.
Returns
-------
numpy.ndarray
the elevation relative to the geoid
"""
o_shape, lat, lon = argument_validation(lat, lon)
if block_size is None:
out = self._get_elevation_geoid(lat, lon)
else:
block_size = min(50000, int(block_size))
out = numpy.full(lat.shape, numpy.nan, dtype=numpy.float64)
start_block = 0
while start_block < lat.size:
end_block = min(lat.size, start_block+block_size)
out[start_block:end_block] = self._get_elevation_geoid(
lat[start_block:end_block], lon[start_block:end_block])
start_block = end_block
out[numpy.isnan(out)] = 0.0 # set missing values to geoid=0 (MSL)
if o_shape == ():
return float(out[0])
else:
return numpy.reshape(out, o_shape)
def _get_ref_geoid(self, lat_lon_box):
if lat_lon_box is None:
# use the origin of the first reader
ref_point = self._readers[0].origin
return float(self._geoid.get(ref_point[1], ref_point[0]))
else:
return float(self._geoid.get(lat_lon_box[0], lat_lon_box[2]))
def get_max_hae(self, lat_lon_box=None):
return self.get_max_geoid(lat_lon_box=lat_lon_box) + self._get_ref_geoid(lat_lon_box)
def get_min_hae(self, lat_lon_box=None):
return self.get_min_geoid(lat_lon_box=lat_lon_box) + self._get_ref_geoid(lat_lon_box)
def get_max_geoid(self, lat_lon_box=None):
if len(self._readers) < 1:
return self._get_ref_geoid(lat_lon_box)
obs_maxes = [reader.get_max(lat_lon_box=lat_lon_box) for reader in self._readers]
return float(max(value for value in obs_maxes if value is not None))
def get_min_geoid(self, lat_lon_box=None):
if len(self._readers) < 1:
return self._get_ref_geoid(lat_lon_box)
obs_mins = [reader.get_min(lat_lon_box=lat_lon_box) for reader in self._readers]
return float(min(value for value in obs_mins if value is not None))
| 29,459 | 35.733167 | 114 | py |
sarpy | sarpy-master/sarpy/io/DEM/geoid.py | """
Provides a class for calculating the height of the WGS84 geoid above the ellipsoid
at any given latitude and longitude.
**Some Accuracy Details:**
Using the 5 minute pgm and linear interpolation, the average error `|calculated - real|`
is around 5 millimeters, and the worst case error is around 30 centimeters. Using cubic
interpolation, the average error drops to about 3 mm, and the worst case is about 17 cm.
Using the 1 minute pgm and linear interpolation, the average error is around 0.5 mm and
worst case error around 1 cm. Using cubic interpolation, the average error is still around
0.5 mm, and worst case error around 2 mm.
The accuracy obtained using the 5, 2.5, or 1 minute pgm are likely all more than suitable
for any SAR application. The the accuracy clearly increases with finer grid.
**Some Processing Speed and Resource Details:**
A memory map into the pgm file is established, which requires relatively little "real" RAM,
but an amount of virtual memory on par with the file size. The speed seems to generally scale
close to inverse linearly with pixel size, so using the 5 minute pgm is generally ~4-5 times
faster than using the 1 minute pgm.
Using the 5 minute pgm, processing rate of around 4-8 million points per second for linear
interpolation and 1-2 million points per second for cubic interpolation. Using the 1 minute pgm,
this processing rate drops to 1-2 million points per second for linear interpolation, and
~million points per second using the cubic interpolation. These rates depend of a variety
of factors including processor speed, hard drive speed, and how your operating system handles
memory maps.
The 5 minute pgm is about 25 times smaller at around 18 MB, while the 1 minute pgm file is
around 450 MB.
**File Locations:**
As of January 2020, the egm2008 pgm files are available for download at
https://geographiclib.sourceforge.io/html/geoid.html
Specifically 1 minute data is available at
https://sourceforge.net/projects/geographiclib/files/geoids-distrib/egm2008-1.tar.bz2
or
https://sourceforge.net/projects/geographiclib/files/geoids-distrib/egm2008-1.zip
Specifically the 5 minute data is available at
https://sourceforge.net/projects/geographiclib/files/geoids-distrib/egm2008-5.tar.bz2
or
https://sourceforge.net/projects/geographiclib/files/geoids-distrib/egm2008-5.zip
"""
import os
import numpy
from sarpy.io.DEM.utils import argument_validation
from sarpy.io.general.base import SarpyIOError
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
############
# module variables
_C0 = 240
_C3 = numpy.array((
(9, -18, -88, 0, 96, 90, 0, 0, -60, -20),
(-9, 18, 8, 0, -96, 30, 0, 0, 60, -20),
(9, -88, -18, 90, 96, 0, -20, -60, 0, 0),
(186, -42, -42, -150, -96, -150, 60, 60, 60, 60),
(54, 162, -78, 30, -24, -90, -60, 60, -60, 60),
(-9, -32, 18, 30, 24, 0, 20, -60, 0, 0),
(-9, 8, 18, 30, -96, 0, -20, 60, 0, 0),
(54, -78, 162, -90, -24, 30, 60, -60, 60, -60),
(-54, 78, 78, 90, 144, 90, -60, -60, -60, -60),
(9, -8, -18, -30, -24, 0, 20, 60, 0, 0),
(-9, 18, -32, 0, 24, 30, 0, 0, -60, 20),
(9, -18, -8, 0, -24, -30, 0, 0, 60, 20)), dtype=numpy.float64)
_C0N = 372
_C3N = numpy.array((
(0, 0, -131, 0, 138, 144, 0, 0, -102, -31),
(0, 0, 7, 0, -138, 42, 0, 0, 102, -31),
(62, 0, -31, 0, 0, -62, 0, 0, 0, 31),
(124, 0, -62, 0, 0, -124, 0, 0, 0, 62),
(124, 0, -62, 0, 0, -124, 0, 0, 0, 62),
(62, 0, -31, 0, 0, -62, 0, 0, 0, 31),
(0, 0, 45, 0, -183, -9, 0, 93, 18, 0),
(0, 0, 216, 0, 33, 87, 0, -93, 12, -93),
(0, 0, 156, 0, 153, 99, 0, -93, -12, -93),
(0, 0, -45, 0, -3, 9, 0, 93, -18, 0),
(0, 0, -55, 0, 48, 42, 0, 0, -84, 31),
(0, 0, -7, 0, -48, -42, 0, 0, 84, 31)), dtype=numpy.float64)
_C0S = 372
_C3S = numpy.array((
(18, -36, -122, 0, 120, 135, 0, 0, -84, -31),
(-18, 36, -2, 0, -120, 51, 0, 0, 84, -31),
(36, -165, -27, 93, 147, -9, 0, -93, 18, 0),
(210, 45, -111, -93, -57, -192, 0, 93, 12, 93),
(162, 141, -75, -93, -129, -180, 0, 93, -12, 93),
(-36, -21, 27, 93, 39, 9, 0, -93, -18, 0),
(0, 0, 62, 0, 0, 31, 0, 0, 0, -31),
(0, 0, 124, 0, 0, 62, 0, 0, 0, -62),
(0, 0, 124, 0, 0, 62, 0, 0, 0, -62),
(0, 0, 62, 0, 0, 31, 0, 0, 0, -31),
(-18, 36, -64, 0, 66, 51, 0, 0, -102, 31),
(18, -36, 2, 0, -66, -51, 0, 0, 102, 31)), dtype=numpy.float64)
_SEARCH_FILES = ('egm2008-5.pgm', 'egm2008-2_5.pgm', 'egm2008-1.pgm', 'egm96-5.pgm', 'egm96-15.pgm')
def find_geoid_file_from_dir(dir_name, search_files=None):
"""
Find the geoid file.
Parameters
----------
dir_name : str
search_files : str|List[str]
Returns
-------
str
"""
geoid_dir = os.path.join(dir_name, 'geoid')
if not os.path.exists(geoid_dir):
raise SarpyIOError(
'Input is a directory, and beneath it we expect to find '
'files in directory "geoid"')
if search_files is None:
search_files = []
elif isinstance(search_files, str):
search_files = [search_files, ]
else:
search_files = list(search_files)
for entry in _SEARCH_FILES:
if entry not in search_files:
search_files.append(entry)
our_file = None
for fil in search_files:
file_name = os.path.join(geoid_dir, fil)
if os.path.exists(file_name):
our_file = file_name
break
if our_file is None:
raise SarpyIOError(
'input is a directory and we expect to find one of the files {} '
'in the directory "geoid" beneath it'.format(search_files))
return our_file
class GeoidHeight(object):
"""
Calculator for the height of the WGS84 geoid above the ellipsoid at any
given latitude and longitude, based on one of the egm .pgm files.
We are set up to use a dem/geoid parent directory. In this case, we expect
our egm .pgm to be in the `<root_dir>/geoid` directory, and we will search
in order of preference
('egm2008-1.pgm', 'egm2008-2_5.pgm', 'egm2008-5.pgm', 'egm96-5.pgm', 'egm96-15.pgm')
"""
__slots__ = (
'_offset', '_scale', '_width', '_height', '_header_length', '_memory_map',
'_lon_res', '_lat_res')
def __init__(self, file_name):
"""
Parameters
----------
file_name : str
path to a egm2008 pgm file
"""
self._offset = None
self._scale = None
if os.path.isdir(file_name):
file_name = find_geoid_file_from_dir(file_name)
with open(file_name, "rb") as f:
line = f.readline()
if line != b"P5\012" and line != b"P5\015\012":
raise SarpyIOError("No PGM header")
headerlen = len(line)
while True:
line = f.readline().decode('utf-8')
if len(line) == 0:
raise SarpyIOError("EOF before end of file header")
headerlen += len(line)
if line.startswith('# Offset '):
try:
self._offset = int(line[9:])
except ValueError as e:
raise SarpyIOError("Error reading offset", e)
elif line.startswith('# Scale '):
try:
self._scale = float(line[8:])
except ValueError as e:
raise SarpyIOError("Error reading scale", e)
elif not line.startswith('#'):
try:
slin = line.split()
self._width, self._height = int(slin[0]), int(slin[1])
except ValueError as e:
raise SarpyIOError("Bad PGM width&height line", e)
break
line = f.readline().decode('utf-8')
headerlen += len(line)
levels = int(line)
if levels != 65535:
raise SarpyIOError("PGM file must have 65535 gray levels")
if self._offset is None:
raise SarpyIOError("PGM file does not contain offset")
if self._scale is None:
raise SarpyIOError("PGM file does not contain scale")
if self._width < 2 or self._height < 2:
raise SarpyIOError("Raster size too small")
self._header_length = headerlen
self._memory_map = numpy.memmap(file_name,
dtype=numpy.dtype('>u2'),
mode='r',
offset=self._header_length,
shape=(self._height, self._width))
self._lon_res = self._width/360.0
self._lat_res = (self._height - 1)/180.0
def _get_raw(self, ix, iy):
# these manipulations are required for edge effects
boolc = (iy < 0)
iy[boolc] *= -1
# ix[boolc] += int(self._width/2) # why is this here?
boolc = (iy >= self._height)
iy[boolc] = 2*(self._height - 1) - iy[boolc]
# ix[boolc] += int(self._width/2) # why is this here?
boolc = (ix < 0)
ix[boolc] += self._width
boolc = (ix >= self._width)
ix[boolc] -= self._width
return self._memory_map[iy, ix]
def _linear(self, ix, dx, iy, dy):
a = (1 - dx) * self._get_raw(ix, iy) + dx * self._get_raw(ix + 1, iy)
b = (1 - dx) * self._get_raw(ix, iy+1) + dx * self._get_raw(ix+1, iy+1)
return (1 - dy) * a + dy * b
def _cubic(self, ix, dx, iy, dy):
v = numpy.vstack((
self._get_raw(ix, iy - 1),
self._get_raw(ix + 1, iy - 1),
self._get_raw(ix - 1, iy),
self._get_raw(ix, iy),
self._get_raw(ix + 1, iy),
self._get_raw(ix + 2, iy),
self._get_raw(ix - 1, iy + 1),
self._get_raw(ix, iy + 1),
self._get_raw(ix + 1, iy + 1),
self._get_raw(ix + 2, iy + 1),
self._get_raw(ix, iy + 2),
self._get_raw(ix + 1, iy + 2)))
t = numpy.zeros((10, ix.size), dtype=numpy.float64)
b1 = (iy == 0)
b2 = (iy == self._height - 2)
b3 = ~(b1 | b2)
if numpy.any(b1):
t[:, b1] = (_C3N.T/_C0N).dot(v[:, b1])
if numpy.any(b2):
t[:, b2] = (_C3S.T/_C0S).dot(v[:, b2])
if numpy.any(b3):
t[:, b3] = (_C3.T/_C0).dot(v[:, b3])
return t[0] + \
dx*(t[1] + dx*(t[3] + dx*t[6])) + \
dy*(t[2] + dx*(t[4] + dx*t[7]) + dy*(t[5] + dx*t[8] + dy*t[9]))
def _do_block(self, lat, lon, cubic):
fx = lon*self._lon_res
fx[fx < 0] += 360*self._lon_res
fy = (90 - lat)*self._lat_res
ix = numpy.cast[numpy.int32](numpy.floor(fx))
iy = numpy.cast[numpy.int32](numpy.floor(fy))
dx = fx - ix
dy = fy - iy
iy[iy == self._height - 1] -= 1 # edge effects?
if cubic:
return self._offset + self._scale*self._cubic(ix, dx, iy, dy)
else:
return self._offset + self._scale*self._linear(ix, dx, iy, dy)
def get(self, lat, lon, cubic=True, block_size=50000):
"""
Calculate the height of the geoid above the ellipsoid in meters at the given points.
Parameters
----------
lat : numpy.ndarray
lon : numpy.ndarray
cubic : bool
Use a simple cubic spline interpolation, otherwise us simple linear.
Default is `True`.
block_size : None|int
If `None`, then the entire calculation will proceed as a single block.
Otherwise, block processing using blocks of the given size will be used.
The minimum value used for this is 50,000, and any smaller value will be
replaced with 50,000. Default is 50,000.
Returns
-------
numpy.ndarray
"""
o_shape, lat, lon = argument_validation(lat, lon)
if block_size is None:
out = self._do_block(lat, lon, cubic)
else:
block_size = max(50000, int(block_size))
out = numpy.empty(lat.shape, dtype=numpy.float64)
start_block = 0
while start_block < lat.size:
end_block = min(start_block+block_size, lat.size)
out[start_block:end_block] = self._do_block(
lat[start_block:end_block], lon[start_block:end_block], cubic)
start_block = end_block
if o_shape == ():
return float(out[0])
else:
return numpy.reshape(out, o_shape)
def __call__(self, lat, lon):
return self.get(lat, lon)
@classmethod
def from_directory(cls, dir_name, search_files=None):
"""
Create the GeoidHeight object from a search directory.
Parameters
----------
dir_name : str
search_files : str|List[str]
Returns
-------
GeoidHeight
"""
our_file = find_geoid_file_from_dir(dir_name, search_files=search_files)
return cls(our_file)
| 13,323 | 35.50411 | 100 | py |
sarpy | sarpy-master/sarpy/io/DEM/geotiff1deg.py | """
Classes and methods for parsing and using digital elevation models (DEM) in GeoTIFF format.
This code makes the following assumptions.
1. The GeoTIFF files tile the earth with one degree offsets in both latitude and longitude.
2. There is one pixel of overlap between adjacent tiles.
3. The south-west corner of each tile is at an integer (degrees) latitude and longitude.
4. The latitude and longitude of south-west corner points is encoded in the GeoTIFF filename.
5. The anti-meridian is at W180 rather than at E180 so that valid longitude values are (-180 <= lon < 180) degrees.
"""
import glob
import logging
import pathlib
import warnings
import numpy as np
from PIL import Image
from PIL import TiffTags
from scipy.interpolate import RegularGridInterpolator
from sarpy.io.DEM.DEM import DEMList
from sarpy.io.DEM.DEM import DEMInterpolator
from sarpy.io.DEM.geoid import GeoidHeight
logger = logging.getLogger(__name__)
__classification__ = "UNCLASSIFIED"
__author__ = "Valkyrie Systems Corporation"
Image.MAX_IMAGE_PIXELS = None # get rid of decompression bomb checking
class GeoTIFF1DegReader:
"""Class to read in a GeoTIFF file, if necessary, and cache the data."""
def __init__(self, filename):
self._filename = filename
self._dem_data = None
self._tiff_tags = None
@property
def filename(self):
return self._filename
@property
def dem_data(self):
if self._dem_data is None:
self._read() # pragma no cover
return self._dem_data
@property
def tiff_tags(self):
if self._tiff_tags is None:
self._read() # pragma no cover
return self._tiff_tags
def _read(self):
# Note: the dem_data must have dtype=np.float64 otherwise the interpolator
# created by RegularGridInterpolator will raise a TypeError exception.
with Image.open(self._filename) as img:
self._tiff_tags = {TiffTags.TAGS[key]: val for key, val in img.tag.items()}
self._dem_data = np.asarray(img, dtype=np.float64)
class GeoTIFF1DegInterpolator(DEMInterpolator):
"""
This class contains methods used to read DEM data from GeoTIFF files and interpolate the height values, as needed.
Args
----
dem_filename_pattern : str
This is a format string that provides a glob pattern that will uniquely specify a DEM file from
the Lat/Lon of the SW corner of the DEM tile. See the GeoTIFF1DegList docstring for more details.
ref_surface: str (default: "EGM2008")
A case-insensitive string specifying the DEM reference surface. (eg., "WGS84" | "EGM2008" | "EGM96" | "EGM84")
geoid_path: str | pathlib.Path | None (default: None)
Optional filename of a specific Geoid file or a directory containing geoid files to choose from.
If a directory is specified, then one or more of the following geoid files (in order of preference)
will be chosen from this directory.
'egm2008-1.pgm', 'egm2008-2_5.pgm', 'egm2008-5.pgm',
'egm96-5.pgm', 'egm96-15.pgm', 'egm84-15.pgm', 'egm84-30.pgm'
missing_error: bool (default: False)
Optional flag indicating whether an exception will be raised when missing DEM data files are encountered.
If True then a ValueError will be raised when a needed DEM data file can not be found.
If False then a DEM value of zero will be used when a needed DEM data file is not found.
interp_method: str (default: 'linear')
Optional interpolation method. Any scipy.interpolate.RegularGridInterpolator method is valid here.
max_readers: init (default: 4)
Optional maximum number of DEM file readers. A DEM file reader will read a DEM file and cache the results.
DEM file readers can use a lot of memory (~8 bytes x number-of-DEM-samples), but will make processing faster.
"""
__slots__ = ('_geoid_path', '_interp_method', '_ref_surface', '_geotiff_list_obj',
'_bounding_box_cache', '_max_readers', '_readers')
def __init__(self, dem_filename_pattern, ref_surface='EGM2008', geoid_path=None, *,
missing_error=False, interp_method="linear", max_readers=4):
self._geoid_path = pathlib.Path(geoid_path) if geoid_path else None
self._interp_method = str(interp_method)
self._ref_surface = str(ref_surface).upper()
self._geotiff_list_obj = GeoTIFF1DegList(dem_filename_pattern, missing_error=missing_error)
self._bounding_box_cache = {}
self._max_readers = max(1, int(max_readers))
self._readers = []
# get the geoid object - we prefer egm2008*.pgm files, but in reality, it makes very little difference.
if self._geoid_path and self._geoid_path.is_file():
self._geoid_obj = GeoidHeight(str(self._geoid_path))
elif self._geoid_path and self._geoid_path.is_dir():
search_files = ('egm2008-1.pgm', 'egm2008-2_5.pgm', 'egm2008-5.pgm',
'egm96-5.pgm', 'egm96-15.pgm', 'egm84-15.pgm', 'egm84-30.pgm')
self._geoid_obj = GeoidHeight.from_directory(str(self._geoid_path), search_files=search_files)
else:
self._geoid_obj = None
@property
def interp_method(self):
return self._interp_method
@interp_method.setter
def interp_method(self, val):
self._interp_method = str(val)
def _read_dem_file(self, filename):
"""
Get the DEM values and TIFF tags from the reader cache, if possible,
otherwise create a new reader object in the reader cache and
return its DEM data and TIFF tags.
"""
for rdr in self._readers:
if filename == rdr.filename:
reader = rdr
break
else:
if len(self._readers) >= self._max_readers:
self._readers.pop(0)
reader = GeoTIFF1DegReader(filename)
self._readers.append(reader)
return reader.tiff_tags, reader.dem_data
def get_elevation_native(self, lat, lon, block_size=None):
"""
Get the elevation value relative to the DEM file's reference surface.
Parameters
----------
lat : numpy.ndarray | list | tuple | int | float
lon : numpy.ndarray | list | tuple | int | float
block_size : int | None (default: None)
Block processing is not supported; this argument is present to maintain a common interface with
the DEMInterpolator parent class. A value other than None will result in a warning.
Returns
-------
numpy.ndarray
The elevation relative to the reference surface of the DEM.
"""
if block_size is not None:
warnings.warn("Block processing is not implemented. Full size processing will be used.") # pragma nocover
lat = np.atleast_1d(lat)
lon = np.atleast_1d(lon)
if lat.shape != lon.shape:
raise ValueError("The lat and lon arrays are not the same shape.")
lat_lon_pairs = np.stack([lat.flatten(), lon.flatten()], axis=-1)
unique_sw_corners = np.unique(np.floor(lat_lon_pairs), axis=0)
# Get the list of filenames for the unique SW corners, if they exist.
filename_info = []
for sw_lat, sw_lon in unique_sw_corners:
# Adding a fractional offset to the otherwise integer SW corner Lat/Lon values
# will guarantee that no more than one filename will be found.
files = self._geotiff_list_obj.find_dem_files(sw_lat + 0.1, sw_lon + 0.1)
if files:
filename_info.append({"filename": files[0], "sw_lat": sw_lat, "sw_lon": sw_lon})
height = np.zeros(lat.size)
for info in filename_info:
filename = info["filename"]
tile_sw_lat = info["sw_lat"]
tile_sw_lon = info["sw_lon"]
tile_ne_lat = tile_sw_lat + 1
tile_ne_lon = tile_sw_lon + 1
tiff_tags, dem_data = self._read_dem_file(filename)
gpars = tiff_tags.get('GeoAsciiParamsTag', ('',))[0].upper()
implied_ref_surface = ('EGM84' if any([p in gpars for p in ['EGM84', 'EGM 84', 'EGM-84']]) else
'EGM96' if any([p in gpars for p in ['EGM96', 'EGM 96', 'EGM-96']]) else
'EGM2008' if any([p in gpars for p in ['EGM2008', 'EGM 2008', 'EGM-2008']]) else
'EGM2020' if any([p in gpars for p in ['EGM2020', 'EGM 2020', 'EGM-2020']]) else
'WGS84' if any([p in gpars for p in ['WGS84', 'WGS 84', 'WGS-84']]) else
'Unknown')
if ((self._ref_surface.startswith('EGM') and implied_ref_surface.startswith('WGS')) or
(self._ref_surface.startswith('WGS') and implied_ref_surface.startswith('EGM'))):
msg = (f"{filename}\n"
f"The GeoAsciiParamsTag tag implies that the reference surface is {implied_ref_surface},\n"
f"but the explicit reference surface was defined to be {self._ref_surface}.\n"
f"This might cause the elevation values to be calculated incorrectly.\n")
logger.warning(msg)
tile_num_lats, tile_num_lons = dem_data.shape
tile_lats = np.linspace(tile_ne_lat, tile_sw_lat, tile_num_lats)
tile_lons = np.linspace(tile_sw_lon, tile_ne_lon, tile_num_lons)
# Old versions of scipy.interpolate require that axis samples be in strictly ascending order.
# Unfortunately, the tile_lats are in strictly descending order. To get the interpolator to
# work regardless of package version, we will negate tile_lats and the lat-part of lat_lon_pairs.
neg_tile_lats = -tile_lats
neg_lat_lon_pairs = [(-lat, lon) for lat, lon in lat_lon_pairs]
interp = RegularGridInterpolator((neg_tile_lats, tile_lons), dem_data, method=self._interp_method,
bounds_error=False, fill_value=np.nan)
interp_height = interp(neg_lat_lon_pairs)
mask = np.logical_not(np.isnan(interp_height))
height[mask] = interp_height[mask]
return height.reshape(lat.shape)
def get_elevation_hae(self, lat, lon, block_size=None):
"""
Get the elevation value relative to the WGS84 ellipsoid.
Parameters
----------
lat : numpy.ndarray | list | tuple | int | float
lon : numpy.ndarray | list | tuple | int | float
block_size : int | None (default: None)
Block processing is not supported; this argument is present to maintain a common interface with
the DEMInterpolator parent class. A value other than None will result in a warning.
Returns
-------
numpy.ndarray
The elevation relative to the ellipsoid
"""
height_native = self.get_elevation_native(lat, lon, block_size=block_size)
if self._ref_surface.startswith('WGS'):
return height_native
elif self._ref_surface.startswith("EGM"):
if self._geoid_obj is None:
raise ValueError("The geoid_dir parameter was not defined so geoid calculations are disabled.")
return height_native + self._geoid_obj.get(lat, lon, block_size=block_size)
else:
raise ValueError(f"The reference surface is {self._ref_surface}, which is not supported")
def get_elevation_geoid(self, lat, lon, block_size=None):
"""
Get the elevation value relative to the geoid.
Parameters
----------
lat : numpy.ndarray | list | tuple | int | float
lon : numpy.ndarray | list | tuple | int | float
block_size : int | None (default: None)
Block processing is not supported; this argument is present to maintain a common interface with
the DEMInterpolator parent class. A value other than None will result in a warning.
Returns
-------
numpy.ndarray
the elevation relative to the geoid
"""
height_native = self.get_elevation_native(lat, lon, block_size)
if self._ref_surface.startswith("EGM"):
return height_native
elif self._ref_surface.startswith('WGS'):
if self._geoid_obj is None:
raise ValueError("The geoid_dir parameter was not defined so geoid calculations are disabled.")
return height_native - self._geoid_obj.get(lat, lon, block_size=block_size)
else:
raise ValueError(f"The reference surface is {self._ref_surface}, which is not supported.")
def get_max_hae(self, lat_lon_box=None):
"""
Get the maximum dem value with respect to the ellipsoid, which should be assumed **approximately** correct.
Parameters
----------
lat_lon_box : list | numpy.ndarray
Any area of interest of the form `[lat min lat max, lon min, lon max]`.
Returns
-------
float
"""
result = self.get_min_max_native(lat_lon_box)
if self._ref_surface.startswith('WGS'):
return result['max']['height']
else:
return self.get_elevation_hae(result['max']['lat'], result['max']['lon'])[0]
def get_min_hae(self, lat_lon_box=None):
"""
Get the minimum dem value with respect to the ellipsoid, which should be assumed **approximately** correct.
Parameters
----------
lat_lon_box : list | numpy.ndarray
Any area of interest of the form `[lat min lat max, lon min, lon max]`.
Returns
-------
float
"""
result = self.get_min_max_native(lat_lon_box)
if self._ref_surface.startswith('WGS'):
return result['min']['height']
else:
return self.get_elevation_hae(result['min']['lat'], result['min']['lon'])[0]
def get_max_geoid(self, lat_lon_box=None):
"""
Get the maximum dem value with respect to the geoid, which should be assumed **approximately** correct.
Parameters
----------
lat_lon_box : list | numpy.ndarray
Any area of interest of the form `[lat min lat max, lon min, lon max]`.
Returns
-------
float
"""
result = self.get_min_max_native(lat_lon_box)
if self._ref_surface.startswith('EGM'):
return result['max']['height']
else:
return self.get_elevation_geoid(result['max']['lat'], result['max']['lon'])[0]
def get_min_geoid(self, lat_lon_box=None):
"""
Get the minimum dem value with respect to geoid, which should be assumed **approximately** correct.
Parameters
----------
lat_lon_box : list | numpy.ndarray
Any area of interest of the form `[lat min lat max, lon min, lon max]`.
Returns
-------
float
"""
result = self.get_min_max_native(lat_lon_box)
if self._ref_surface.startswith('EGM'):
return result['min']['height']
else:
return self.get_elevation_geoid(result['min']['lat'], result['min']['lon'])[0]
def get_min_max_native(self, lat_lon_box):
"""
Get the minimum and maximum dem value with respect to the native reference surface of the DEM.
Parameters
----------
lat_lon_box : List | numpy.ndarray
The bounding box to search `[lat min lat max, lon min, lon max]`.
Returns
-------
dict: {"box": lat_lon_box,
"min": {"lat": lat_deg, "lon": lon_deg, "height": height},
"max": {"lat": lat_deg, "lon": lon_deg, "height": height}
}
"""
if np.all(self._bounding_box_cache.get("box", []) == lat_lon_box):
# If we have already done this calculation then don't do it again.
return self._bounding_box_cache
box_lat_min, box_lat_max, box_lon_min, box_lon_max = lat_lon_box
filename_info = []
for sw_lat in np.arange(np.floor(box_lat_min), np.floor(box_lat_max) + 1):
for sw_lon in np.arange(np.floor(box_lon_min), np.floor(box_lon_max) + 1):
files = self._geotiff_list_obj.find_dem_files(sw_lat + 0.1, sw_lon + 0.1)
if files:
filename_info.append({"filename": files[0], "sw_lat": sw_lat, "sw_lon": sw_lon})
# Initialize so that the global min and max occur at the same lat/lon and have a value of zero.
# These values will be returned if the bounding box is completely outside the available DEM tiles.
global_min_lat = box_lat_min
global_max_lat = box_lat_min
global_min_lon = box_lon_min
global_max_lon = box_lon_min
global_min = np.inf if filename_info else 0
global_max = -np.inf if filename_info else 0
for info in filename_info:
filename = info["filename"]
tile_sw_lat = info["sw_lat"]
tile_sw_lon = info["sw_lon"]
tile_ne_lat = tile_sw_lat + 1
tiff_tags, dem_data = self._read_dem_file(filename)
tile_num_lats = tiff_tags['ImageLength'][0]
tile_num_lons = tiff_tags['ImageWidth'][0]
# Lat index is in descending order, so calculate the offset from the north edge (lowest index)
lat_start_offset = max(0, tile_ne_lat - box_lat_max)
lat_stop_offset = min(1, tile_ne_lat - box_lat_min)
# Lon index is in ascending order, so calculate the offset from the west edge (lowest index)
lon_start_offset = max(0, box_lon_min - tile_sw_lon)
lon_stop_offset = min(1, box_lon_max - tile_sw_lon)
# Lat index is in descending order, so start at the north edge (lowest index)
row_start = int(np.ceil(lat_start_offset * (tile_num_lats - 1)))
row_stop = int(np.floor(lat_stop_offset * (tile_num_lats - 1)))
# Lon index is in ascending order, so start at the west edge (lowest index)
col_start = int(np.ceil(lon_start_offset * (tile_num_lons - 1)))
col_stop = int(np.floor(lon_stop_offset * (tile_num_lons - 1)))
dem_slice = dem_data[row_start:row_stop + 1, col_start:col_stop + 1]
max_index = np.unravel_index(np.argmax(dem_slice), shape=dem_slice.shape)
min_index = np.unravel_index(np.argmin(dem_slice), shape=dem_slice.shape)
if global_max < dem_slice[max_index]:
global_max = dem_slice[max_index]
global_max_lat = tile_ne_lat - lat_start_offset - max_index[0] / (tile_num_lats - 1)
global_max_lon = tile_sw_lon + lon_start_offset + max_index[1] / (tile_num_lons - 1)
if global_min > dem_slice[min_index]:
global_min = dem_slice[min_index]
global_min_lat = tile_ne_lat - lat_start_offset - min_index[0] / (tile_num_lats - 1)
global_min_lon = tile_sw_lon + lon_start_offset + min_index[1] / (tile_num_lons - 1)
self._bounding_box_cache = {"box": lat_lon_box,
"min": {"lat": global_min_lat, "lon": global_min_lon, "height": float(global_min)},
"max": {"lat": global_max_lat, "lon": global_max_lon, "height": float(global_max)}
}
return self._bounding_box_cache
# ---------------------------------------------------------------------------------------------------------------------
# GeoTIFF1DegList
# ---------------------------------------------------------------------------------------------------------------------
class GeoTIFF1DegList(DEMList):
"""
GeoTIFF subclass of sarpy.io.DEM.DEMList
This class contains methods used to determine which GeoTIFF files are needed to cover
a specified geodetic bounding box.
Args
----
dem_filename_pattern : str
This is a format string that specifies the glob pattern that will uniquely specify a DEM file from
the Lat/Lon of the SW corner of the DEM tile. See the note below for more details.
missing_error: bool (default: False)
Optional flag indicating whether an exception will be raised when missing DEM data files are encountered.
If True then a ValueError will be raised when a needed DEM data file can not be found.
If False then a DEM value of zero will be used when a needed DEM data file is not found.
Notes
-----
The DEM files must have the SW corner Lat/Lon encoded in their filenames.
The dem_filename_pattern argument contains a format string that, when populated,
will create a glob pattern that will specify the desired DEM file. The following
arguments are provided to the format string.
lat = int(numpy.floor(lat))
lon = int(numpy.floor(lon))
abslat = int(abs(numpy.floor(lat)))
abslon = int(abs(numpy.floor(lon)))
ns = 's' if lat < 0 else 'n'
NS = 'S' if lat < 0 else 'N'
ew = 'w' if lon < 0 else 'e'
EW = 'W' if lon < 0 else 'E'
An example (with Linux file separators):
"/dem_root/tdt_{ns}{abslat:02}{ew}{abslon:03}_*/DEM/TDT_{NS}{abslat:02}{EW}{abslon:03}_*_DEM.tif"
will match filenames like:
"/dem_root/tdt_n45e013_02/DEM/TDT_N45E013_02_DEM.tif"
"/dem_root/tdt_s09w140_01/DEM/TDT_S09W140_01_DEM.tif"
"""
__slots__ = ('_dem_filename_pattern', '_missing_error')
def __init__(self, dem_filename_pattern, missing_error=False):
self._dem_filename_pattern = str(dem_filename_pattern)
self._missing_error = bool(missing_error)
@staticmethod
def filename_from_lat_lon(lat, lon, pattern):
"""
This method will return the filename glob of the GeoTIFF file that contains the specified latitude/longitude.
"""
pars = {
"lat": int(np.floor(lat)),
"lon": int(np.floor(lon)),
"abslat": int(abs(np.floor(lat))),
"abslon": int(abs(np.floor(lon))),
"ns": 's' if lat < 0 else 'n',
"ew": 'w' if lon < 0 else 'e',
"NS": 'S' if lat < 0 else 'N',
"EW": 'W' if lon < 0 else 'E',
}
class SkipMissing(dict):
def __missing__(self, key):
return f'{{{key}}}'
return pattern.format_map(SkipMissing(**pars))
def find_dem_files(self, lat, lon):
"""
Return a list of filenames of GeoTIFF files that contain DEM data for the specified Lat/Lon point.
Since DEM files overlap, there might be more than one file that contains the specified Lat/Lon point.
Args
----
lat: int | float
The latitude in degrees (-90 <= lat <= 90)
lon: int | float
The longitude in degrees (-180 <= lon < 180)
Returns
-------
filenames: list(str)
A list of filenames of DEM data files, if they exists, otherwise []
"""
msg = [] if -90.0 <= lat <= 90.0 else ["The latitude value must be between [-90, +90]"]
msg += [] if -180.0 <= lon < 180.0 else ["The longitude value must be between [-180, +180)"]
if msg:
raise ValueError('\n'.join(msg))
sw_lats = [89 if lat == 90 else np.floor(lat)] # The latitude of the south-west corner in integer degrees
sw_lons = [np.floor(lon)] # The longitude of the south-west corner in integer degrees
if lat == np.floor(lat) and np.abs(lat) < 90:
# lat is an integer, so it is in the overlap region of at least two files
sw_lats.append(np.floor(lat) - 1)
if lon == np.floor(lon):
# lon is an integer, so it is in the overlap region of at least two files.
sw_lons.append(179 if lon == -180 else np.floor(lon) - 1)
filenames = []
for sw_lat in sw_lats:
for sw_lon in sw_lons:
glob_pattern = self.filename_from_lat_lon(int(sw_lat), int(sw_lon), self._dem_filename_pattern)
for filename in glob.glob(glob_pattern):
if pathlib.Path(filename).is_file():
# The glob should not return more than one filename,
# but if it does then keep only the first.
filenames.append(filename)
break
else:
msg = f'Missing expected DEM file for tile with lower left lat/lon corner ({sw_lat}, {sw_lon})'
if self._missing_error:
raise ValueError(msg)
else:
logger.warning(
msg + '\n\tThis should result in the assumption that the altitude in\n\t'
'that section is zero relative to the reference surface.')
return filenames
def get_file_list(self, lat_lon_box):
"""
This will return the list of files associated with covering the `lat_lon_box` using a DEM.
If the bounding box spans the anti-meridian (180th meridian), then the maximum longitude
will be less than the minimum longitude.
Args
----
lat_lon_box : numpy.ndarray | list | tuple
The bounding box of the form `[lat min, lat max, lon min, lon max]` in degrees.
Returns
-------
filenames: List[str]
A list of filenames, without duplication, of the files needed to cover the bounding box.
"""
filenames = []
lat_min, lat_max, lon_min, lon_max = lat_lon_box
msg = ["The minimum latitude value must be between [-90, +90]"] if not (-90.0 <= lat_min <= 90.0) else []
msg += ["The maximum latitude value must be between [-90, +90]"] if not (-90.0 <= lat_max <= 90.0) else []
msg += ["The minimum longitude value must be between [-180, +180)"] if not (-180.0 <= lon_min < 180.0) else []
msg += ["The maximum longitude value must be between [-180, +180)"] if not (-180.0 <= lon_max < 180.0) else []
if msg:
raise ValueError('\n'.join(msg))
if lon_max < lon_min:
lon_max += 360
for lat_inc in np.arange(np.ceil(lat_max) - np.floor(lat_min)):
lat = lat_min + lat_inc
for lon_inc in np.arange(np.ceil(lon_max) - np.floor(lon_min)):
lon = (lon_min + lon_inc + 180) % 360 - 180
new_filenames = self.find_dem_files(lat, lon)
new_unique_filenames = [file for file in new_filenames if file not in filenames]
filenames.extend(new_unique_filenames)
return filenames
| 27,090 | 42.624799 | 119 | py |
sarpy | sarpy-master/sarpy/io/DEM/utils.py |
import numpy
__author__ = "Thomas McCullough"
__classification__ = "UNCLASSIFIED"
def argument_validation(lat, lon):
if not isinstance(lat, numpy.ndarray):
lat = numpy.array(lat)
if not isinstance(lon, numpy.ndarray):
lon = numpy.array(lon)
if lat.shape != lon.shape:
raise ValueError(
'lat and lon must have the same shape, got '
'lat.shape = {}, lon.shape = {}'.format(lat.shape, lon.shape))
o_shape = lat.shape
lat = numpy.reshape(lat, (-1,))
lon = numpy.reshape(lon, (-1,))
return o_shape, lat, lon
| 584 | 25.590909 | 74 | py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.