idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
58,500
def has_matching_etag ( remote_storage , source_storage , path , prefixed_path ) : storage_etag = get_etag ( remote_storage , path , prefixed_path ) local_etag = get_file_hash ( source_storage , path ) return storage_etag == local_etag
Compare etag of path in source storage with remote .
58,501
def should_copy_file ( remote_storage , path , prefixed_path , source_storage ) : if has_matching_etag ( remote_storage , source_storage , path , prefixed_path ) : logger . info ( "%s: Skipping based on matching file hashes" % path ) return False destroy_etag ( path ) logger . info ( "%s: Hashes did not match" % path ) return True
Returns True if the file should be copied otherwise False .
58,502
def set_options ( self , ** options ) : ignore_etag = options . pop ( 'ignore_etag' , False ) disable = options . pop ( 'disable_collectfast' , False ) if ignore_etag : warnings . warn ( "--ignore-etag is deprecated since 0.5.0, use " "--disable-collectfast instead." ) if ignore_etag or disable : self . collectfast_enabled = False super ( Command , self ) . set_options ( ** options )
Set options and handle deprecation .
58,503
def handle ( self , ** options ) : super ( Command , self ) . handle ( ** options ) return "{} static file{} copied." . format ( self . num_copied_files , '' if self . num_copied_files == 1 else 's' )
Override handle to supress summary output
58,504
def do_copy_file ( self , args ) : path , prefixed_path , source_storage = args reset_connection ( self . storage ) if self . collectfast_enabled and not self . dry_run : try : if not should_copy_file ( self . storage , path , prefixed_path , source_storage ) : return False except Exception as e : if settings . debug : raise self . stdout . write ( smart_str ( "Ignored error in Collectfast:\n%s\n "default collectstatic." % e ) ) self . num_copied_files += 1 return super ( Command , self ) . copy_file ( path , prefixed_path , source_storage )
Determine if file should be copied or not and handle exceptions .
58,505
def copy_file ( self , path , prefixed_path , source_storage ) : args = ( path , prefixed_path , source_storage ) if settings . threads : self . tasks . append ( args ) else : self . do_copy_file ( args )
Appends path to task queue if threads are enabled otherwise copies the file with a blocking call .
58,506
def delete_file ( self , path , prefixed_path , source_storage ) : if not self . collectfast_enabled : return super ( Command , self ) . delete_file ( path , prefixed_path , source_storage ) if not self . dry_run : self . log ( "Deleting '%s'" % path ) self . storage . delete ( prefixed_path ) else : self . log ( "Pretending to delete '%s'" % path ) return True
Override delete_file to skip modified time and exists lookups .
58,507
def join ( rasters ) : raster = rasters [ 0 ] mask_band = None nodata = None with raster . _raster_opener ( raster . source_file ) as r : nodata = r . nodata mask_flags = r . mask_flag_enums per_dataset_mask = all ( [ rasterio . enums . MaskFlags . per_dataset in flags for flags in mask_flags ] ) if per_dataset_mask and nodata is None : mask_band = 0 return GeoRaster2 . from_rasters ( rasters , relative_to_vrt = False , nodata = nodata , mask_band = mask_band )
This method takes a list of rasters and returns a raster that is constructed of all of them
58,508
def merge_all ( rasters , roi = None , dest_resolution = None , merge_strategy = MergeStrategy . UNION , shape = None , ul_corner = None , crs = None , pixel_strategy = PixelStrategy . FIRST , resampling = Resampling . nearest ) : first_raster = rasters [ 0 ] if roi : crs = crs or roi . crs dest_resolution = dest_resolution or _dest_resolution ( first_raster , crs ) empty = GeoRaster2 . empty_from_roi ( roi , resolution = dest_resolution , band_names = first_raster . band_names , dtype = first_raster . dtype , shape = shape , ul_corner = ul_corner , crs = crs ) all_band_names , projected_rasters = _prepare_rasters ( rasters , merge_strategy , empty , resampling = resampling ) assert len ( projected_rasters ) == len ( rasters ) prepared_rasters = _apply_pixel_strategy ( projected_rasters , pixel_strategy ) prepared_rasters = _explode_rasters ( prepared_rasters , all_band_names ) if all_band_names : prepared_rasters = _merge_common_bands ( prepared_rasters ) raster = reduce ( _stack_bands , prepared_rasters ) return empty . copy_with ( image = raster . image , band_names = raster . band_names ) else : raise ValueError ( "result contains no bands, use another merge strategy" )
Merge a list of rasters cropping by a region of interest . There are cases that the roi is not precise enough for this cases one can use the upper left corner the shape and crs to precisely define the roi . When roi is provided the ul_corner shape and crs are ignored
58,509
def _merge_common_bands ( rasters ) : all_bands = IndexedSet ( [ rs . band_names [ 0 ] for rs in rasters ] ) def key ( rs ) : return all_bands . index ( rs . band_names [ 0 ] ) rasters_final = [ ] for band_name , rasters_group in groupby ( sorted ( rasters , key = key ) , key = key ) : rasters_final . append ( reduce ( _fill_pixels , rasters_group ) ) return rasters_final
Combine the common bands .
58,510
def _explode_raster ( raster , band_names = [ ] ) : if not band_names : band_names = raster . band_names else : band_names = list ( IndexedSet ( raster . band_names ) . intersection ( band_names ) ) return [ _Raster ( image = raster . bands_data ( [ band_name ] ) , band_names = [ band_name ] ) for band_name in band_names ]
Splits a raster into multiband rasters .
58,511
def _fill_pixels ( one , other ) : assert len ( one . band_names ) == len ( other . band_names ) == 1 , "Rasters are not single band" if one . band_names != other . band_names : raise ValueError ( "rasters have no bands in common, use another merge strategy" ) new_image = one . image . copy ( ) other_image = other . image other_values_mask = ( np . ma . getmaskarray ( other_image ) [ 0 ] | ( ~ np . ma . getmaskarray ( one . image ) [ 0 ] ) ) other_values_mask = other_values_mask [ None , ... ] new_image [ ~ other_values_mask ] = other_image [ ~ other_values_mask ] return _Raster ( image = new_image , band_names = one . band_names )
Merges two single band rasters with the same band by filling the pixels according to depth .
58,512
def _stack_bands ( one , other ) : assert set ( one . band_names ) . intersection ( set ( other . band_names ) ) == set ( ) if one . band_names == other . band_names : raise ValueError ( "rasters have the same bands, use another merge strategy" ) new_mask = np . ma . getmaskarray ( one . image ) [ 0 ] | np . ma . getmaskarray ( other . image ) [ 0 ] new_image = np . ma . masked_array ( np . concatenate ( [ one . image . data , other . image . data ] ) , mask = [ new_mask ] * ( one . image . shape [ 0 ] + other . image . shape [ 0 ] ) ) new_bands = one . band_names + other . band_names return _Raster ( image = new_image , band_names = new_bands )
Merges two rasters with non overlapping bands by stacking the bands .
58,513
def merge_two ( one , other , merge_strategy = MergeStrategy . UNION , silent = False , pixel_strategy = PixelStrategy . FIRST ) : other_res = _prepare_other_raster ( one , other ) if other_res is None : if silent : return one else : raise ValueError ( "rasters do not intersect" ) else : other = other . copy_with ( image = other_res . image , band_names = other_res . band_names ) all_band_names , projected_rasters = _prepare_rasters ( [ other ] , merge_strategy , first = one ) if not all_band_names and not silent : raise ValueError ( "rasters have no bands in common, use another merge strategy" ) prepared_rasters = _apply_pixel_strategy ( projected_rasters , pixel_strategy ) prepared_rasters = _explode_rasters ( prepared_rasters , all_band_names ) prepared_rasters = _merge_common_bands ( _explode_raster ( one , all_band_names ) + prepared_rasters ) raster = reduce ( _stack_bands , prepared_rasters ) return one . copy_with ( image = raster . image , band_names = raster . band_names )
Merge two rasters into one .
58,514
def _set_image ( self , image , nodata = None ) : if isinstance ( image , np . ma . core . MaskedArray ) : masked = image elif isinstance ( image , np . core . ndarray ) : masked = self . _build_masked_array ( image , nodata ) else : raise GeoRaster2NotImplementedError ( 'only ndarray or masked array supported, got %s' % type ( image ) ) if len ( masked . shape ) == 3 : self . _image = masked elif len ( masked . shape ) == 2 : self . _image = masked [ np . newaxis , : , : ] else : raise GeoRaster2Error ( 'expected 2d or 3d image, got shape=%s' % masked . shape ) if self . _shape is None : self . _set_shape ( self . _image . shape ) self . _image_after_load_validations ( ) if self . _image_readonly : self . _image . setflags ( write = 0 )
Set self . _image .
58,515
def from_wms ( cls , filename , vector , resolution , destination_file = None ) : doc = wms_vrt ( filename , bounds = vector , resolution = resolution ) . tostring ( ) filename = cls . _save_to_destination_file ( doc , destination_file ) return GeoRaster2 . open ( filename )
Create georaster from the web service definition file .
58,516
def from_rasters ( cls , rasters , relative_to_vrt = True , destination_file = None , nodata = None , mask_band = None ) : if isinstance ( rasters , list ) : doc = raster_list_vrt ( rasters , relative_to_vrt , nodata , mask_band ) . tostring ( ) else : doc = raster_collection_vrt ( rasters , relative_to_vrt , nodata , mask_band ) . tostring ( ) filename = cls . _save_to_destination_file ( doc , destination_file ) return GeoRaster2 . open ( filename )
Create georaster out of a list of rasters .
58,517
def open ( cls , filename , band_names = None , lazy_load = True , mutable = False , ** kwargs ) : if mutable : geo_raster = MutableGeoRaster ( filename = filename , band_names = band_names , ** kwargs ) else : geo_raster = cls ( filename = filename , band_names = band_names , ** kwargs ) if not lazy_load : geo_raster . _populate_from_rasterio_object ( read_image = True ) return geo_raster
Read a georaster from a file .
58,518
def tags ( cls , filename , namespace = None ) : return cls . _raster_opener ( filename ) . tags ( ns = namespace )
Extract tags from file .
58,519
def image ( self ) : if self . _image is None : self . _populate_from_rasterio_object ( read_image = True ) return self . _image
Raster bitmap in numpy array .
58,520
def crs ( self ) : if self . _crs is None : self . _populate_from_rasterio_object ( read_image = False ) return self . _crs
Raster crs .
58,521
def shape ( self ) : if self . _shape is None : self . _populate_from_rasterio_object ( read_image = False ) return self . _shape
Raster shape .
58,522
def source_file ( self ) : if self . _filename is None : self . _filename = self . _as_in_memory_geotiff ( ) . _filename return self . _filename
When using open returns the filename used
58,523
def blockshapes ( self ) : if self . _blockshapes is None : if self . _filename : self . _populate_from_rasterio_object ( read_image = False ) else : self . _blockshapes = [ ( self . height , self . width ) for z in range ( self . num_bands ) ] return self . _blockshapes
Raster all bands block shape .
58,524
def get ( self , point ) : if not ( isinstance ( point , GeoVector ) and point . type == 'Point' ) : raise TypeError ( 'expect GeoVector(Point), got %s' % ( point , ) ) target = self . to_raster ( point ) return self . image [ : , int ( target . y ) , int ( target . x ) ]
Get the pixel values at the requested point .
58,525
def copy ( self , mutable = False ) : if self . not_loaded ( ) : _cls = self . __class__ if mutable : _cls = MutableGeoRaster return _cls . open ( self . _filename ) return self . copy_with ( mutable = mutable )
Return a copy of this GeoRaster with no modifications .
58,526
def _resize ( self , ratio_x , ratio_y , resampling ) : new_width = int ( np . ceil ( self . width * ratio_x ) ) new_height = int ( np . ceil ( self . height * ratio_y ) ) dest_affine = self . affine * Affine . scale ( 1 / ratio_x , 1 / ratio_y ) if self . not_loaded ( ) : window = rasterio . windows . Window ( 0 , 0 , self . width , self . height ) resized_raster = self . get_window ( window , xsize = new_width , ysize = new_height , resampling = resampling ) else : resized_raster = self . _reproject ( new_width , new_height , dest_affine , resampling = resampling ) return resized_raster
Return raster resized by ratio .
58,527
def to_pillow_image ( self , return_mask = False ) : img = np . rollaxis ( np . rollaxis ( self . image . data , 2 ) , 2 ) img = Image . fromarray ( img [ : , : , 0 ] ) if img . shape [ 2 ] == 1 else Image . fromarray ( img ) if return_mask : mask = np . ma . getmaskarray ( self . image ) mask = Image . fromarray ( np . rollaxis ( np . rollaxis ( mask , 2 ) , 2 ) . astype ( np . uint8 ) [ : , : , 0 ] ) return img , mask else : return img
Return Pillow . Image and optionally also mask .
58,528
def from_bytes ( cls , image_bytes , affine , crs , band_names = None ) : b = io . BytesIO ( image_bytes ) image = imageio . imread ( b ) roll = np . rollaxis ( image , 2 ) if band_names is None : band_names = [ 0 , 1 , 2 ] elif isinstance ( band_names , str ) : band_names = [ band_names ] return GeoRaster2 ( image = roll [ : 3 , : , : ] , affine = affine , crs = crs , band_names = band_names )
Create GeoRaster from image BytesIo object .
58,529
def _repr_html_ ( self ) : TileServer . run_tileserver ( self , self . footprint ( ) ) capture = "raster: %s" % self . _filename mp = TileServer . folium_client ( self , self . footprint ( ) , capture = capture ) return mp . _repr_html_ ( )
Required for jupyter notebook to show raster as an interactive map .
58,530
def image_corner ( self , corner ) : if corner not in self . corner_types ( ) : raise GeoRaster2Error ( 'corner %s invalid, expected: %s' % ( corner , self . corner_types ( ) ) ) x = 0 if corner [ 1 ] == 'l' else self . width y = 0 if corner [ 0 ] == 'u' else self . height return Point ( x , y )
Return image corner in pixels as shapely . Point .
58,531
def center ( self ) : image_center = Point ( self . width / 2 , self . height / 2 ) return self . to_world ( image_center )
Return footprint center in world coordinates as GeoVector .
58,532
def bounds ( self ) : corners = [ self . image_corner ( corner ) for corner in self . corner_types ( ) ] return Polygon ( [ [ corner . x , corner . y ] for corner in corners ] )
Return image rectangle in pixels as shapely . Polygon .
58,533
def _calc_footprint ( self ) : corners = [ self . corner ( corner ) for corner in self . corner_types ( ) ] coords = [ ] for corner in corners : shape = corner . get_shape ( corner . crs ) coords . append ( [ shape . x , shape . y ] ) shp = Polygon ( coords ) self . _footprint = GeoVector ( shp , self . crs ) return self . _footprint
Return rectangle in world coordinates as GeoVector .
58,534
def to_raster ( self , vector ) : return transform ( vector . get_shape ( vector . crs ) , vector . crs , self . crs , dst_affine = ~ self . affine )
Return the vector in pixel coordinates as shapely . Geometry .
58,535
def reduce ( self , op ) : per_band = [ getattr ( np . ma , op ) ( self . image . data [ band , np . ma . getmaskarray ( self . image ) [ band , : , : ] == np . False_ ] ) for band in range ( self . num_bands ) ] return per_band
Reduce the raster to a score using op operation .
58,536
def mask ( self , vector , mask_shape_nodata = False ) : from telluric . collections import BaseCollection cropped = self . crop ( vector ) if isinstance ( vector , BaseCollection ) : shapes = [ cropped . to_raster ( feature ) for feature in vector ] else : shapes = [ cropped . to_raster ( vector ) ] mask = geometry_mask ( shapes , ( cropped . height , cropped . width ) , Affine . identity ( ) , invert = mask_shape_nodata ) masked = cropped . deepcopy_with ( ) masked . image . mask |= mask return masked
Set pixels outside vector as nodata .
58,537
def mask_by_value ( self , nodata ) : return self . copy_with ( image = np . ma . masked_array ( self . image . data , mask = self . image . data == nodata ) )
Return raster with a mask calculated based on provided value . Only pixels with value = nodata will be masked .
58,538
def save_cloud_optimized ( self , dest_url , resampling = Resampling . gauss , blocksize = 256 , overview_blocksize = 256 , creation_options = None ) : src = self with tempfile . NamedTemporaryFile ( suffix = '.tif' ) as tf : src . save ( tf . name , overviews = False ) convert_to_cog ( tf . name , dest_url , resampling , blocksize , overview_blocksize , creation_options ) geotiff = GeoRaster2 . open ( dest_url ) return geotiff
Save as Cloud Optimized GeoTiff object to a new file .
58,539
def _get_window_out_shape ( self , bands , window , xsize , ysize ) : if xsize and ysize is None : ratio = window . width / xsize ysize = math . ceil ( window . height / ratio ) elif ysize and xsize is None : ratio = window . height / ysize xsize = math . ceil ( window . width / ratio ) elif xsize is None and ysize is None : ysize = math . ceil ( window . height ) xsize = math . ceil ( window . width ) return ( len ( bands ) , ysize , xsize )
Get the outshape of a window .
58,540
def _read_with_mask ( raster , masked ) : if masked is None : mask_flags = raster . mask_flag_enums per_dataset_mask = all ( [ rasterio . enums . MaskFlags . per_dataset in flags for flags in mask_flags ] ) masked = per_dataset_mask return masked
returns if we should read from rasterio using the masked
58,541
def get_window ( self , window , bands = None , xsize = None , ysize = None , resampling = Resampling . cubic , masked = None , affine = None ) : bands = bands or list ( range ( 1 , self . num_bands + 1 ) ) out_shape = self . _get_window_out_shape ( bands , window , xsize , ysize ) try : read_params = { "window" : window , "resampling" : resampling , "boundless" : True , "out_shape" : out_shape , } filename = self . _raster_backed_by_a_file ( ) . _filename with self . _raster_opener ( filename ) as raster : read_params [ "masked" ] = self . _read_with_mask ( raster , masked ) array = raster . read ( bands , ** read_params ) nodata = 0 if not np . ma . isMaskedArray ( array ) else None affine = affine or self . _calculate_new_affine ( window , out_shape [ 2 ] , out_shape [ 1 ] ) raster = self . copy_with ( image = array , affine = affine , nodata = nodata ) return raster except ( rasterio . errors . RasterioIOError , rasterio . _err . CPLE_HttpResponseError ) as e : raise GeoRaster2IOError ( e )
Get window from raster .
58,542
def _get_tile_when_web_mercator_crs ( self , x_tile , y_tile , zoom , bands = None , masked = None , resampling = Resampling . cubic ) : roi = GeoVector . from_xyz ( x_tile , y_tile , zoom ) coordinates = roi . get_bounds ( WEB_MERCATOR_CRS ) window = self . _window ( coordinates , to_round = False ) bands = bands or list ( range ( 1 , self . num_bands + 1 ) ) ratio = MERCATOR_RESOLUTION_MAPPING [ zoom ] / self . resolution ( ) affine = self . window_transform ( window ) affine = affine * Affine . scale ( ratio , ratio ) window = Window ( round ( window . col_off ) , round ( window . row_off ) , round ( window . width ) , round ( window . height ) ) return self . get_window ( window , bands = bands , xsize = 256 , ysize = 256 , masked = masked , affine = affine )
The reason we want to treat this case in a special way is that there are cases where the rater is aligned so you need to be precise on which raster you want
58,543
def get_tile ( self , x_tile , y_tile , zoom , bands = None , masked = None , resampling = Resampling . cubic ) : if self . crs == WEB_MERCATOR_CRS : return self . _get_tile_when_web_mercator_crs ( x_tile , y_tile , zoom , bands , masked , resampling ) roi = GeoVector . from_xyz ( x_tile , y_tile , zoom ) left , bottom , right , top = roi . get_bounds ( WEB_MERCATOR_CRS ) new_affine = rasterio . warp . calculate_default_transform ( WEB_MERCATOR_CRS , self . crs , 256 , 256 , left , bottom , right , top ) [ 0 ] new_resolution = resolution_from_affine ( new_affine ) buffer_ratio = int ( os . environ . get ( "TELLURIC_GET_TILE_BUFFER" , 10 ) ) roi_buffer = roi . buffer ( math . sqrt ( roi . area * buffer_ratio / 100 ) ) raster = self . crop ( roi_buffer , resolution = new_resolution , masked = masked , bands = bands , resampling = resampling ) raster = raster . reproject ( dst_crs = WEB_MERCATOR_CRS , resolution = MERCATOR_RESOLUTION_MAPPING [ zoom ] , dst_bounds = roi_buffer . get_bounds ( WEB_MERCATOR_CRS ) , resampling = Resampling . cubic_spline ) raster = raster . crop ( roi ) . resize ( dest_width = 256 , dest_height = 256 ) return raster
Convert mercator tile to raster window .
58,544
def colorize ( self , colormap , band_name = None , vmin = None , vmax = None ) : vmin = vmin if vmin is not None else min ( self . min ( ) ) vmax = vmax if vmax is not None else max ( self . max ( ) ) cmap = matplotlib . cm . get_cmap ( colormap ) band_index = 0 if band_name is None : if self . num_bands > 1 : warnings . warn ( "Using the first band to colorize the raster" , GeoRaster2Warning ) else : band_index = self . band_names . index ( band_name ) normalized = ( self . image [ band_index , : , : ] - vmin ) / ( vmax - vmin ) image_data = cmap ( normalized ) image_data = image_data [ : , : , 0 : 3 ] image_data = image_data * 255 image_data = image_data . astype ( np . uint8 ) image_data = np . rollaxis ( image_data , 2 ) mask = _join_masks_from_masked_array ( self . image ) mask = np . stack ( [ mask [ 0 , : , : ] ] * 3 ) array = np . ma . array ( image_data . data , mask = mask ) . filled ( 0 ) array = np . ma . array ( array , mask = mask ) return self . copy_with ( image = array , band_names = [ 'red' , 'green' , 'blue' ] )
Apply a colormap on a selected band .
58,545
def chunks ( self , shape = 256 , pad = False ) : _self = self . _raster_backed_by_a_file ( ) if isinstance ( shape , int ) : shape = ( shape , shape ) ( width , height ) = shape col_steps = int ( _self . width / width ) row_steps = int ( _self . height / height ) col_extra_step = 1 if _self . width % width > 0 else 0 row_extra_step = 1 if _self . height % height > 0 else 0 for col_step in range ( 0 , col_steps + col_extra_step ) : col_off = col_step * width if not pad and col_step == col_steps : window_width = _self . width % width else : window_width = width for row_step in range ( 0 , row_steps + row_extra_step ) : row_off = row_step * height if not pad and row_step == row_steps : window_height = _self . height % height else : window_height = height window = Window ( col_off = col_off , row_off = row_off , width = window_width , height = window_height ) cur_raster = _self . get_window ( window ) yield RasterChunk ( raster = cur_raster , offsets = ( col_off , row_off ) )
This method returns GeoRaster chunks out of the original raster .
58,546
def dissolve ( collection , aggfunc = None ) : new_properties = { } if aggfunc : temp_properties = defaultdict ( list ) for feature in collection : for key , value in feature . attributes . items ( ) : temp_properties [ key ] . append ( value ) for key , values in temp_properties . items ( ) : try : new_properties [ key ] = aggfunc ( values ) except Exception : pass return GeoFeature ( collection . cascaded_union , new_properties )
Dissolves features contained in a FeatureCollection and applies an aggregation function to its properties .
58,547
def filter ( self , intersects ) : try : crs = self . crs vector = intersects . geometry if isinstance ( intersects , GeoFeature ) else intersects prepared_shape = prep ( vector . get_shape ( crs ) ) hits = [ ] for feature in self : target_shape = feature . geometry . get_shape ( crs ) if prepared_shape . overlaps ( target_shape ) or prepared_shape . intersects ( target_shape ) : hits . append ( feature ) except IndexError : hits = [ ] return FeatureCollection ( hits )
Filter results that intersect a given GeoFeature or Vector .
58,548
def sort ( self , by , desc = False ) : if callable ( by ) : key = by else : def key ( feature ) : return feature [ by ] sorted_features = sorted ( list ( self ) , reverse = desc , key = key ) return self . __class__ ( sorted_features )
Sorts by given property or function ascending or descending order .
58,549
def groupby ( self , by ) : results = OrderedDict ( ) for feature in self : if callable ( by ) : value = by ( feature ) else : value = feature [ by ] results . setdefault ( value , [ ] ) . append ( feature ) if hasattr ( self , "_schema" ) : schema = getattr ( self , "_schema" ) return _CollectionGroupBy ( results , schema = schema )
Groups collection using a value of a property .
58,550
def dissolve ( self , by = None , aggfunc = None ) : if by : agg = partial ( dissolve , aggfunc = aggfunc ) return self . groupby ( by ) . agg ( agg ) else : return FeatureCollection ( [ dissolve ( self , aggfunc ) ] )
Dissolve geometries and rasters within groupby .
58,551
def rasterize ( self , dest_resolution , * , polygonize_width = 0 , crs = WEB_MERCATOR_CRS , fill_value = None , bounds = None , dtype = None , ** polygonize_kwargs ) : from telluric . georaster import merge_all , MergeStrategy from telluric . rasterization import rasterize , NODATA_DEPRECATION_WARNING if not isinstance ( polygonize_width , int ) : raise TypeError ( "The width in pixels must be an integer" ) if polygonize_kwargs . pop ( "nodata_value" , None ) : warnings . warn ( NODATA_DEPRECATION_WARNING , DeprecationWarning ) if polygonize_width == 1 : polygonize_kwargs . update ( cap_style_point = CAP_STYLE . square ) if ( self . crs is not None and self . crs != crs ) : reprojected = self . reproject ( crs ) else : reprojected = self width = polygonize_width * dest_resolution polygonized = [ feature . polygonize ( width , ** polygonize_kwargs ) for feature in reprojected ] shapes = [ feature . geometry . get_shape ( crs ) for feature in polygonized if not feature . is_empty ] if bounds is None : bounds = self . envelope if bounds . area == 0.0 : raise ValueError ( "Specify non-empty ROI" ) if not len ( self ) : fill_value = None if callable ( fill_value ) : if dtype is None : raise ValueError ( "dtype must be specified for multivalue rasterization" ) rasters = [ ] for feature in self : rasters . append ( feature . geometry . rasterize ( dest_resolution , fill_value = fill_value ( feature ) , bounds = bounds , dtype = dtype , crs = crs ) ) return merge_all ( rasters , bounds . reproject ( crs ) , dest_resolution , merge_strategy = MergeStrategy . INTERSECTION ) else : return rasterize ( shapes , crs , bounds . get_shape ( crs ) , dest_resolution , fill_value = fill_value , dtype = dtype )
Binarize a FeatureCollection and produce a raster with the target resolution .
58,552
def save ( self , filename , driver = None , schema = None ) : if driver is None : driver = DRIVERS . get ( os . path . splitext ( filename ) [ - 1 ] ) if schema is None : schema = self . schema if driver == "GeoJSON" : with contextlib . suppress ( FileNotFoundError ) : os . remove ( filename ) crs = WGS84_CRS else : crs = self . crs with fiona . open ( filename , 'w' , driver = driver , schema = schema , crs = crs ) as sink : for feature in self : new_feature = self . _adapt_feature_before_write ( feature ) sink . write ( new_feature . to_record ( crs ) )
Saves collection to file .
58,553
def apply ( self , ** kwargs ) : def _apply ( f ) : properties = copy . deepcopy ( f . properties ) for prop , value in kwargs . items ( ) : if callable ( value ) : properties [ prop ] = value ( f ) else : properties [ prop ] = value return f . copy_with ( properties = properties ) new_fc = self . map ( _apply ) new_schema = self . schema . copy ( ) property_names_set = kwargs . keys ( ) prop_types_map = FeatureCollection . guess_types_by_feature ( new_fc [ 0 ] , property_names_set ) for key , value_type in prop_types_map . items ( ) : new_schema [ "properties" ] [ key ] = FIELD_TYPES_MAP_REV . get ( value_type , 'str' ) new_fc . _schema = new_schema return new_fc
Return a new FeatureCollection with the results of applying the statements in the arguments to each element .
58,554
def validate ( self ) : if self . _schema is not None : with MemoryFile ( ) as memfile : with memfile . open ( driver = "ESRI Shapefile" , schema = self . schema ) as target : for _item in self . _results : item = GeoFeature ( _item . geometry , _item . properties ) target . write ( item . to_record ( item . crs ) )
if schema exists we run shape file validation code of fiona by trying to save to in MemoryFile
58,555
def open ( cls , filename , crs = None ) : with fiona . Env ( ) : with fiona . open ( filename , 'r' ) as source : original_crs = CRS ( source . crs ) schema = source . schema length = len ( source ) crs = crs or original_crs ret_val = cls ( filename , crs , schema , length ) return ret_val
Creates a FileCollection from a file in disk .
58,556
def filter ( self , func ) : results = OrderedDict ( ) for name , group in self : if func ( group ) : results [ name ] = group return self . __class__ ( results )
Filter out Groups based on filtering function .
58,557
def reset_context ( ** options ) : local_context . _options = { } local_context . _options . update ( options ) log . debug ( "New TelluricContext context %r created" , local_context . _options )
Reset context to default .
58,558
def get_context ( ) : if not local_context . _options : raise TelluricContextError ( "TelluricContext context not exists" ) else : log . debug ( "Got a copy of context %r options" , local_context . _options ) return local_context . _options . copy ( )
Get a mapping of current options .
58,559
def set_context ( ** options ) : if not local_context . _options : raise TelluricContextError ( "TelluricContext context not exists" ) else : local_context . _options . update ( options ) log . debug ( "Updated existing %r with options %r" , local_context . _options , options )
Set options in the existing context .
58,560
def transform_properties ( properties , schema ) : new_properties = properties . copy ( ) for prop_value , ( prop_name , prop_type ) in zip ( new_properties . values ( ) , schema [ "properties" ] . items ( ) ) : if prop_value is None : continue elif prop_type == "time" : new_properties [ prop_name ] = parse_date ( prop_value ) . time ( ) elif prop_type == "date" : new_properties [ prop_name ] = parse_date ( prop_value ) . date ( ) elif prop_type == "datetime" : new_properties [ prop_name ] = parse_date ( prop_value ) return new_properties
Transform properties types according to a schema .
58,561
def serialize_properties ( properties ) : new_properties = properties . copy ( ) for attr_name , attr_value in new_properties . items ( ) : if isinstance ( attr_value , datetime ) : new_properties [ attr_name ] = attr_value . isoformat ( ) elif not isinstance ( attr_value , ( dict , list , tuple , str , int , float , bool , type ( None ) ) ) : new_properties [ attr_name ] = str ( attr_value ) return new_properties
Serialize properties .
58,562
def from_record ( cls , record , crs , schema = None ) : properties = cls . _to_properties ( record , schema ) vector = GeoVector ( shape ( record [ 'geometry' ] ) , crs ) if record . get ( 'raster' ) : assets = { k : dict ( type = RASTER_TYPE , product = 'visual' , ** v ) for k , v in record . get ( 'raster' ) . items ( ) } else : assets = record . get ( 'assets' , { } ) return cls ( vector , properties , assets )
Create GeoFeature from a record .
58,563
def copy_with ( self , geometry = None , properties = None , assets = None ) : def copy_assets_object ( asset ) : obj = asset . get ( "__object" ) if hasattr ( "copy" , obj ) : new_obj = obj . copy ( ) if obj : asset [ "__object" ] = new_obj geometry = geometry or self . geometry . copy ( ) new_properties = copy . deepcopy ( self . properties ) if properties : new_properties . update ( properties ) if not assets : assets = copy . deepcopy ( self . assets ) map ( copy_assets_object , assets . values ( ) ) else : assets = { } return self . __class__ ( geometry , new_properties , assets )
Generate a new GeoFeature with different geometry or preperties .
58,564
def from_raster ( cls , raster , properties , product = 'visual' ) : footprint = raster . footprint ( ) assets = raster . to_assets ( product = product ) return cls ( footprint , properties , assets )
Initialize a GeoFeature object with a GeoRaster
58,565
def has_raster ( self ) : return any ( asset . get ( 'type' ) == RASTER_TYPE for asset in self . assets . values ( ) )
True if any of the assets is type raster .
58,566
def transform ( shape , source_crs , destination_crs = None , src_affine = None , dst_affine = None ) : if destination_crs is None : destination_crs = WGS84_CRS if src_affine is not None : shape = ops . transform ( lambda r , q : ~ src_affine * ( r , q ) , shape ) shape = generate_transform ( source_crs , destination_crs ) ( shape ) if dst_affine is not None : shape = ops . transform ( lambda r , q : dst_affine * ( r , q ) , shape ) return shape
Transforms shape from one CRS to another .
58,567
def simple_plot ( feature , * , mp = None , ** map_kwargs ) : from telluric . collections import BaseCollection if mp is None : mp = folium . Map ( tiles = "Stamen Terrain" , ** map_kwargs ) if feature . is_empty : warnings . warn ( "The geometry is empty." ) else : if isinstance ( feature , BaseCollection ) : feature = feature [ : SIMPLE_PLOT_MAX_ROWS ] folium . GeoJson ( mapping ( feature ) , name = 'geojson' , overlay = True ) . add_to ( mp ) shape = feature . envelope . get_shape ( WGS84_CRS ) mp . fit_bounds ( [ shape . bounds [ : 1 : - 1 ] , shape . bounds [ 1 : : - 1 ] ] ) return mp
Plots a GeoVector in a simple Folium map .
58,568
def zoom_level_from_geometry ( geometry , splits = 4 ) : from telluric . vectors import generate_tile_coordinates levels = [ ] for chunk in generate_tile_coordinates ( geometry , ( splits , splits ) ) : levels . append ( mercantile . bounding_tile ( * chunk . get_shape ( WGS84_CRS ) . bounds ) . z ) return median_low ( levels )
Generate optimum zoom level for geometry .
58,569
def layer_from_element ( element , style_function = None ) : from telluric . collections import BaseCollection if isinstance ( element , BaseCollection ) : styled_element = element . map ( lambda feat : style_element ( feat , style_function ) ) else : styled_element = style_element ( element , style_function ) return GeoJSON ( data = mapping ( styled_element ) , name = 'GeoJSON' )
Return Leaflet layer from shape .
58,570
def plot ( feature , mp = None , style_function = None , ** map_kwargs ) : map_kwargs . setdefault ( 'basemap' , basemaps . Stamen . Terrain ) if feature . is_empty : warnings . warn ( "The geometry is empty." ) mp = Map ( ** map_kwargs ) if mp is None else mp else : if mp is None : center = feature . envelope . centroid . reproject ( WGS84_CRS ) zoom = zoom_level_from_geometry ( feature . envelope ) mp = Map ( center = ( center . y , center . x ) , zoom = zoom , ** map_kwargs ) mp . add_layer ( layer_from_element ( feature , style_function ) ) return mp
Plots a GeoVector in an ipyleaflet map .
58,571
def tileserver_optimized_raster ( src , dest ) : src_raster = tl . GeoRaster2 . open ( src ) bounding_box = src_raster . footprint ( ) . get_shape ( tl . constants . WGS84_CRS ) . bounds tile = mercantile . bounding_tile ( * bounding_box ) dest_resolution = mercator_upper_zoom_level ( src_raster ) bounds = tl . GeoVector . from_xyz ( tile . x , tile . y , tile . z ) . get_bounds ( tl . constants . WEB_MERCATOR_CRS ) create_options = { "tiled" : "YES" , "blocksize" : 256 , "compress" : "DEFLATE" , "photometric" : "MINISBLACK" } with TemporaryDirectory ( ) as temp_dir : temp_file = os . path . join ( temp_dir , 'temp.tif' ) warp ( src , temp_file , dst_crs = tl . constants . WEB_MERCATOR_CRS , resolution = dest_resolution , dst_bounds = bounds , create_options = create_options ) with rasterio . Env ( GDAL_TIFF_INTERNAL_MASK = True , GDAL_TIFF_OVR_BLOCKSIZE = 256 ) : resampling = rasterio . enums . Resampling . gauss with rasterio . open ( temp_file , 'r+' ) as tmp_raster : factors = _calc_overviews_factors ( tmp_raster ) tmp_raster . build_overviews ( factors , resampling = resampling ) tmp_raster . update_tags ( ns = 'rio_overview' , resampling = resampling . name ) telluric_tags = _get_telluric_tags ( src ) if telluric_tags : tmp_raster . update_tags ( ** telluric_tags ) rasterio_sh . copy ( temp_file , dest , COPY_SRC_OVERVIEWS = True , tiled = True , compress = 'DEFLATE' , photometric = 'MINISBLACK' )
This method converts a raster to a tileserver optimized raster . The method will reproject the raster to align to the xyz system in resolution and projection It will also create overviews And finally it will arragne the raster in a cog way . You could take the dest file upload it to a web server that supports ranges and user GeoRaster . get_tile on it You are geranteed that you will get as minimal data as possible
58,572
def get_dimension ( geometry ) : coordinates = geometry [ "coordinates" ] type_ = geometry [ "type" ] if type_ in ( 'Point' , ) : return len ( coordinates ) elif type_ in ( 'LineString' , 'MultiPoint' ) : return len ( coordinates [ 0 ] ) elif type_ in ( 'Polygon' , 'MultiLineString' ) : return len ( coordinates [ 0 ] [ 0 ] ) elif type_ in ( 'MultiPolygon' , ) : return len ( coordinates [ 0 ] [ 0 ] [ 0 ] ) else : raise ValueError ( "Invalid type '{}'" . format ( type_ ) )
Gets the dimension of a Fiona - like geometry element .
58,573
def from_geojson ( cls , filename ) : with open ( filename ) as fd : geometry = json . load ( fd ) if 'type' not in geometry : raise TypeError ( "%s is not a valid geojson." % ( filename , ) ) return cls ( to_shape ( geometry ) , WGS84_CRS )
Load vector from geojson .
58,574
def to_geojson ( self , filename ) : with open ( filename , 'w' ) as fd : json . dump ( self . to_record ( WGS84_CRS ) , fd )
Save vector as geojson .
58,575
def from_bounds ( cls , xmin , ymin , xmax , ymax , crs = DEFAULT_CRS ) : return cls ( Polygon . from_bounds ( xmin , ymin , xmax , ymax ) , crs )
Creates GeoVector object from bounds .
58,576
def from_xyz ( cls , x , y , z ) : bb = xy_bounds ( x , y , z ) return cls . from_bounds ( xmin = bb . left , ymin = bb . bottom , xmax = bb . right , ymax = bb . top , crs = WEB_MERCATOR_CRS )
Creates GeoVector from Mercator slippy map values .
58,577
def cascaded_union ( cls , vectors , dst_crs , prevalidate = False ) : try : shapes = [ geometry . get_shape ( dst_crs ) for geometry in vectors ] if prevalidate : if not all ( [ sh . is_valid for sh in shapes ] ) : warnings . warn ( "Some invalid shapes found, discarding them." ) except IndexError : crs = DEFAULT_CRS shapes = [ ] return cls ( cascaded_union ( [ sh for sh in shapes if sh . is_valid ] ) . simplify ( 0 ) , crs = dst_crs )
Generate a GeoVector from the cascade union of the impute vectors .
58,578
def from_record ( cls , record , crs ) : if 'type' not in record : raise TypeError ( "The data isn't a valid record." ) return cls ( to_shape ( record ) , crs )
Load vector from record .
58,579
def get_bounding_box ( self , crs ) : return self . from_bounds ( * self . get_bounds ( crs ) , crs = crs )
Gets bounding box as GeoVector in a specified CRS .
58,580
def polygonize ( self , width , cap_style_line = CAP_STYLE . flat , cap_style_point = CAP_STYLE . round ) : shape = self . _shape if isinstance ( shape , ( LineString , MultiLineString ) ) : return self . __class__ ( shape . buffer ( width / 2 , cap_style = cap_style_line ) , self . crs ) elif isinstance ( shape , ( Point , MultiPoint ) ) : return self . __class__ ( shape . buffer ( width / 2 , cap_style = cap_style_point ) , self . crs ) else : return self
Turns line or point into a buffered polygon .
58,581
def tiles ( self , zooms , truncate = False ) : west , south , east , north = self . get_bounds ( WGS84_CRS ) return tiles ( west , south , east , north , zooms , truncate )
Iterator over the tiles intersecting the bounding box of the vector
58,582
def _join_masks_from_masked_array ( data ) : if not isinstance ( data . mask , np . ndarray ) : mask = np . empty ( data . data . shape , dtype = np . bool ) mask . fill ( data . mask ) return mask mask = data . mask [ 0 ] . copy ( ) for i in range ( 1 , len ( data . mask ) ) : mask = np . logical_or ( mask , data . mask [ i ] ) return mask [ np . newaxis , : , : ]
Union of masks .
58,583
def _creation_options_for_cog ( creation_options , source_profile , blocksize ) : if not ( creation_options ) : creation_options = { } creation_options [ "blocksize" ] = blocksize creation_options [ "tiled" ] = True defaults = { "nodata" : None , "compress" : "lzw" } for key in [ "nodata" , "compress" ] : if key not in creation_options : creation_options [ key ] = source_profile . get ( key , defaults . get ( key ) ) return creation_options
it uses the profile of the source raster override anything using the creation_options and guarantees we will have tiled raster and blocksize
58,584
def convert_to_cog ( source_file , destination_file , resampling = rasterio . enums . Resampling . gauss , blocksize = 256 , overview_blocksize = 256 , creation_options = None ) : with rasterio . open ( source_file ) as src : source_profile = src . profile creation_options = _creation_options_for_cog ( creation_options , source_profile , blocksize ) with rasterio . Env ( GDAL_TIFF_INTERNAL_MASK = True , GDAL_TIFF_OVR_BLOCKSIZE = overview_blocksize ) : with TemporaryDirectory ( ) as temp_dir : temp_file = os . path . join ( temp_dir , 'temp.tif' ) rasterio_sh . copy ( source_file , temp_file , ** creation_options ) with rasterio . open ( temp_file , 'r+' ) as dest : factors = _calc_overviews_factors ( dest ) dest . build_overviews ( factors , resampling = resampling ) dest . update_tags ( ns = 'rio_overview' , resampling = resampling . name ) telluric_tags = _get_telluric_tags ( source_file ) if telluric_tags : dest . update_tags ( ** telluric_tags ) rasterio_sh . copy ( temp_file , destination_file , COPY_SRC_OVERVIEWS = True , ** creation_options )
Convert source file to a Cloud Optimized GeoTiff new file .
58,585
def warp ( source_file , destination_file , dst_crs = None , resolution = None , dimensions = None , src_bounds = None , dst_bounds = None , src_nodata = None , dst_nodata = None , target_aligned_pixels = False , check_invert_proj = True , creation_options = None , resampling = Resampling . cubic , ** kwargs ) : with rasterio . Env ( CHECK_WITH_INVERT_PROJ = check_invert_proj ) : with rasterio . open ( source_file ) as src : out_kwargs = src . profile . copy ( ) dst_crs , dst_transform , dst_width , dst_height = calc_transform ( src , dst_crs , resolution , dimensions , src_bounds , dst_bounds , target_aligned_pixels ) if src_nodata is not None : out_kwargs . update ( { 'nodata' : src_nodata } ) if dst_nodata is not None : if src_nodata is None and src . meta [ 'nodata' ] is None : raise ValueError ( 'src_nodata must be provided because dst_nodata is not None' ) else : out_kwargs . update ( { 'nodata' : dst_nodata } ) out_kwargs . update ( { 'crs' : dst_crs , 'transform' : dst_transform , 'width' : dst_width , 'height' : dst_height } ) if ( 'blockxsize' in out_kwargs and dst_width < out_kwargs [ 'blockxsize' ] ) : del out_kwargs [ 'blockxsize' ] if ( 'blockysize' in out_kwargs and dst_height < out_kwargs [ 'blockysize' ] ) : del out_kwargs [ 'blockysize' ] if creation_options is not None : out_kwargs . update ( ** creation_options ) with rasterio . open ( destination_file , 'w' , ** out_kwargs ) as dst : reproject ( source = rasterio . band ( src , src . indexes ) , destination = rasterio . band ( dst , dst . indexes ) , src_transform = src . transform , src_crs = src . crs , src_nodata = src_nodata , dst_transform = out_kwargs [ 'transform' ] , dst_crs = out_kwargs [ 'crs' ] , dst_nodata = dst_nodata , resampling = resampling , ** kwargs )
Warp a raster dataset .
58,586
def build_overviews ( source_file , factors = None , minsize = 256 , external = False , blocksize = 256 , interleave = 'pixel' , compress = 'lzw' , resampling = Resampling . gauss , ** kwargs ) : with rasterio . open ( source_file , 'r+' ) as dst : if factors is None : factors = _calc_overviews_factors ( SimpleNamespace ( width = dst . width , height = dst . height ) , minsize ) with rasterio . Env ( GDAL_TIFF_OVR_BLOCKSIZE = blocksize , INTERLEAVE_OVERVIEW = interleave , COMPRESS_OVERVIEW = compress , TIFF_USE_OVR = external , ** kwargs ) : dst . build_overviews ( factors , resampling )
Build overviews at one or more decimation factors for all bands of the dataset .
58,587
def build_vrt ( source_file , destination_file , ** kwargs ) : with rasterio . open ( source_file ) as src : vrt_doc = boundless_vrt_doc ( src , ** kwargs ) . tostring ( ) with open ( destination_file , 'wb' ) as dst : dst . write ( vrt_doc ) return destination_file
Make a VRT XML document and write it in file .
58,588
def stretch_histogram ( img , dark_clip_percentile = None , bright_clip_percentile = None , dark_clip_value = None , bright_clip_value = None , ignore_zero = True ) : if ( dark_clip_percentile is not None and dark_clip_value is not None ) or ( bright_clip_percentile is not None and bright_clip_value is not None ) : raise KeyError ( 'Provided parameters for both by-percentile and by-value stretch, need only one of those.' ) if dark_clip_percentile is None and dark_clip_value is None : dark_clip_percentile = 0.001 if bright_clip_percentile is None and bright_clip_value is None : bright_clip_percentile = 0.001 if dark_clip_percentile is not None : dark_clip_value = np . percentile ( img [ img != 0 ] if ignore_zero else img , 100 * dark_clip_percentile ) if bright_clip_percentile is not None : bright_clip_value = np . percentile ( img [ img != 0 ] if ignore_zero else img , 100 * ( 1 - bright_clip_percentile ) ) dst_min = np . iinfo ( img . dtype ) . min dst_max = np . iinfo ( img . dtype ) . max if bright_clip_value == dark_clip_value : raise HistogramStretchingError gain = ( dst_max - dst_min ) / ( bright_clip_value - dark_clip_value ) offset = - gain * dark_clip_value + dst_min stretched = np . empty_like ( img , dtype = img . dtype ) if len ( img . shape ) == 2 : stretched [ : , : ] = np . clip ( gain * img [ : , : ] . astype ( np . float32 ) + offset , dst_min , dst_max ) . astype ( img . dtype ) else : for band in range ( img . shape [ 0 ] ) : stretched [ band , : , : ] = np . clip ( gain * img [ band , : , : ] . astype ( np . float32 ) + offset , dst_min , dst_max ) . astype ( img . dtype ) return stretched
Stretch img histogram .
58,589
def _distribution_info ( self ) : print ( 'Gathering information...' ) system = platform . system ( ) system = 'cygwin' if 'CYGWIN' in system else system processor = platform . processor ( ) machine = '64bit' if sys . maxsize > 2 ** 32 else '32bit' print ( 'SYSTEM: ' , system ) print ( 'PROCESSOR:' , processor ) print ( 'MACHINE: ' , machine ) return self . _dists [ ( system , machine ) ]
Creates the distribution name and the expected extension for the CSPICE package and returns it .
58,590
def _download ( self ) : if ssl . OPENSSL_VERSION < 'OpenSSL 1.0.1g' : import urllib3 . contrib . pyopenssl urllib3 . contrib . pyopenssl . inject_into_urllib3 ( ) import certifi import urllib3 try : proxies = { } for key , value in os . environ . items ( ) : if '_proxy' in key . lower ( ) : proxies [ key . lower ( ) . replace ( '_proxy' , '' ) ] = value if 'https' in proxies : https = urllib3 . ProxyManager ( proxies [ 'https' ] , cert_reqs = 'CERT_REQUIRED' , ca_certs = certifi . where ( ) ) elif 'http' in proxies : https = urllib3 . ProxyManager ( proxies [ 'http' ] , cert_reqs = 'CERT_REQUIRED' , ca_certs = certifi . where ( ) ) else : https = urllib3 . PoolManager ( cert_reqs = 'CERT_REQUIRED' , ca_certs = certifi . where ( ) ) response = https . request ( 'GET' , self . _rcspice , timeout = urllib3 . Timeout ( 10 ) ) except urllib3 . exceptions . HTTPError as err : raise RuntimeError ( err . message ) self . _local = io . BytesIO ( response . data ) else : try : response = urllib . request . urlopen ( self . _rcspice , timeout = 10 ) except urllib . error . URLError as err : raise RuntimeError ( err . reason ) self . _local = io . BytesIO ( response . read ( ) )
Support function that encapsulates the OpenSSL transfer of the CSPICE package to the self . _local io . ByteIO stream .
58,591
def _unpack ( self ) : if self . _ext == 'zip' : with ZipFile ( self . _local , 'r' ) as archive : archive . extractall ( self . _root ) else : cmd = 'gunzip | tar xC ' + self . _root proc = subprocess . Popen ( cmd , shell = True , stdin = subprocess . PIPE ) proc . stdin . write ( self . _local . read ( ) ) self . _local . close ( )
Unpacks the CSPICE package on the given root directory . Note that Package could either be the zipfile . ZipFile class for Windows platforms or tarfile . TarFile for other platforms .
58,592
def spiceErrorCheck ( f ) : @ functools . wraps ( f ) def with_errcheck ( * args , ** kwargs ) : try : res = f ( * args , ** kwargs ) checkForSpiceError ( f ) return res except : raise return with_errcheck
Decorator for spiceypy hooking into spice error system . If an error is detected an output similar to outmsg
58,593
def spiceFoundExceptionThrower ( f ) : @ functools . wraps ( f ) def wrapper ( * args , ** kwargs ) : res = f ( * args , ** kwargs ) if config . catch_false_founds : found = res [ - 1 ] if isinstance ( found , bool ) and not found : raise stypes . SpiceyError ( "Spice returns not found for function: {}" . format ( f . __name__ ) , found = found ) elif hasattr ( found , '__iter__' ) and not all ( found ) : raise stypes . SpiceyError ( "Spice returns not found in a series of calls for function: {}" . format ( f . __name__ ) , found = found ) else : actualres = res [ 0 : - 1 ] if len ( actualres ) == 1 : return actualres [ 0 ] else : return actualres else : return res return wrapper
Decorator for wrapping functions that use status codes
58,594
def appndc ( item , cell ) : assert isinstance ( cell , stypes . SpiceCell ) if isinstance ( item , list ) : for c in item : libspice . appndc_c ( stypes . stringToCharP ( c ) , cell ) else : item = stypes . stringToCharP ( item ) libspice . appndc_c ( item , cell )
Append an item to a character cell .
58,595
def appndd ( item , cell ) : assert isinstance ( cell , stypes . SpiceCell ) if hasattr ( item , "__iter__" ) : for d in item : libspice . appndd_c ( ctypes . c_double ( d ) , cell ) else : item = ctypes . c_double ( item ) libspice . appndd_c ( item , cell )
Append an item to a double precision cell .
58,596
def appndi ( item , cell ) : assert isinstance ( cell , stypes . SpiceCell ) if hasattr ( item , "__iter__" ) : for i in item : libspice . appndi_c ( ctypes . c_int ( i ) , cell ) else : item = ctypes . c_int ( item ) libspice . appndi_c ( item , cell )
Append an item to an integer cell .
58,597
def axisar ( axis , angle ) : axis = stypes . toDoubleVector ( axis ) angle = ctypes . c_double ( angle ) r = stypes . emptyDoubleMatrix ( ) libspice . axisar_c ( axis , angle , r ) return stypes . cMatrixToNumpy ( r )
Construct a rotation matrix that rotates vectors by a specified angle about a specified axis .
58,598
def badkpv ( caller , name , comp , insize , divby , intype ) : caller = stypes . stringToCharP ( caller ) name = stypes . stringToCharP ( name ) comp = stypes . stringToCharP ( comp ) insize = ctypes . c_int ( insize ) divby = ctypes . c_int ( divby ) intype = ctypes . c_char ( intype . encode ( encoding = 'UTF-8' ) ) return bool ( libspice . badkpv_c ( caller , name , comp , insize , divby , intype ) )
Determine if a kernel pool variable is present and if so that it has the correct size and type .
58,599
def bltfrm ( frmcls , outCell = None ) : frmcls = ctypes . c_int ( frmcls ) if not outCell : outCell = stypes . SPICEINT_CELL ( 1000 ) libspice . bltfrm_c ( frmcls , outCell ) return outCell
Return a SPICE set containing the frame IDs of all built - in frames of a specified class .