idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
13,600 | def setup ( level = logging . WARNING , outputs = [ output . STDERR ] , program_name = None , capture_warnings = True ) : root_logger = logging . getLogger ( None ) for handler in list ( root_logger . handlers ) : root_logger . removeHandler ( handler ) for out in outputs : if isinstance ( out , str ) : out = output . ... | Setup Python logging . |
13,601 | def set_default_log_levels ( loggers_and_log_levels ) : for logger , level in loggers_and_log_levels : if isinstance ( level , str ) : level = level . upper ( ) logging . getLogger ( logger ) . setLevel ( level ) | Set default log levels for some loggers . |
13,602 | def create_swag_from_ctx ( ctx ) : swag_opts = { } if ctx . type == 'file' : swag_opts = { 'swag.type' : 'file' , 'swag.data_dir' : ctx . data_dir , 'swag.data_file' : ctx . data_file } elif ctx . type == 's3' : swag_opts = { 'swag.type' : 's3' , 'swag.bucket_name' : ctx . bucket_name , 'swag.data_file' : ctx . data_fi... | Creates SWAG client from the current context . |
13,603 | def file ( ctx , data_dir , data_file ) : if not ctx . file : ctx . data_file = data_file if not ctx . data_dir : ctx . data_dir = data_dir ctx . type = 'file' | Use the File SWAG Backend |
13,604 | def s3 ( ctx , bucket_name , data_file , region ) : if not ctx . data_file : ctx . data_file = data_file if not ctx . bucket_name : ctx . bucket_name = bucket_name if not ctx . region : ctx . region = region ctx . type = 's3' | Use the S3 SWAG backend . |
13,605 | def list ( ctx ) : if ctx . namespace != 'accounts' : click . echo ( click . style ( 'Only account data is available for listing.' , fg = 'red' ) ) return swag = create_swag_from_ctx ( ctx ) accounts = swag . get_all ( ) _table = [ [ result [ 'name' ] , result . get ( 'id' ) ] for result in accounts ] click . echo ( ta... | List SWAG account info . |
13,606 | def list_service ( ctx , name ) : swag = create_swag_from_ctx ( ctx ) accounts = swag . get_service_enabled ( name ) _table = [ [ result [ 'name' ] , result . get ( 'id' ) ] for result in accounts ] click . echo ( tabulate ( _table , headers = [ "Account Name" , "Account Number" ] ) ) | Retrieve accounts pertaining to named service . |
13,607 | def migrate ( ctx , start_version , end_version ) : if ctx . type == 'file' : if ctx . data_file : file_path = ctx . data_file else : file_path = os . path . join ( ctx . data_file , ctx . namespace + '.json' ) with open ( file_path , 'r' ) as f : data = json . loads ( f . read ( ) ) data = run_migration ( data , start... | Transition from one SWAG schema to another . |
13,608 | def propagate ( ctx ) : data = [ ] if ctx . type == 'file' : if ctx . data_file : file_path = ctx . data_file else : file_path = os . path . join ( ctx . data_dir , ctx . namespace + '.json' ) with open ( file_path , 'r' ) as f : data = json . loads ( f . read ( ) ) swag_opts = { 'swag.type' : 'dynamodb' } swag = SWAGM... | Transfers SWAG data from one backend to another |
13,609 | def create ( ctx , data ) : swag = create_swag_from_ctx ( ctx ) data = json . loads ( data . read ( ) ) for account in data : swag . create ( account , dry_run = ctx . dry_run ) | Create a new SWAG item . |
13,610 | def deploy_service ( ctx , path , name , regions , disabled ) : enabled = False if disabled else True swag = create_swag_from_ctx ( ctx ) accounts = swag . get_all ( search_filter = path ) log . debug ( 'Searching for accounts. Found: {} JMESPath: `{}`' . format ( len ( accounts ) , path ) ) for a in accounts : try : i... | Deploys a new service JSON to multiple accounts . NAME is the service name you wish to deploy . |
13,611 | def seed_aws_data ( ctx , data ) : swag = create_swag_from_ctx ( ctx ) for k , v in json . loads ( data . read ( ) ) . items ( ) : for account in v [ 'accounts' ] : data = { 'description' : 'This is an AWS owned account used for {}' . format ( k ) , 'id' : account [ 'account_id' ] , 'contacts' : [ ] , 'owner' : 'aws' ,... | Seeds SWAG from a list of known AWS accounts . |
13,612 | def seed_aws_organization ( ctx , owner ) : swag = create_swag_from_ctx ( ctx ) accounts = swag . get_all ( ) _ids = [ result . get ( 'id' ) for result in accounts ] client = boto3 . client ( 'organizations' ) paginator = client . get_paginator ( 'list_accounts' ) response_iterator = paginator . paginate ( ) count = 0 ... | Seeds SWAG from an AWS organziation . |
13,613 | def load_file ( client , bucket , data_file ) : logger . debug ( 'Loading item from s3. Bucket: {bucket} Key: {key}' . format ( bucket = bucket , key = data_file ) ) try : data = _get_from_s3 ( client , bucket , data_file ) except ClientError as ce : if ce . response [ 'Error' ] [ 'Code' ] == 'NoSuchKey' : return { } e... | Tries to load JSON data from S3 . |
13,614 | def save_file ( client , bucket , data_file , items , dry_run = None ) : logger . debug ( 'Writing {number_items} items to s3. Bucket: {bucket} Key: {key}' . format ( number_items = len ( items ) , bucket = bucket , key = data_file ) ) if not dry_run : return _put_to_s3 ( client , bucket , data_file , json . dumps ( it... | Tries to write JSON data to data file in S3 . |
13,615 | def create ( self , item , dry_run = None ) : logger . debug ( 'Creating new item. Item: {item} Path: {data_file}' . format ( item = item , data_file = self . data_file ) ) items = load_file ( self . client , self . bucket_name , self . data_file ) items = append_item ( self . namespace , self . version , item , items ... | Creates a new item in file . |
13,616 | def health_check ( self ) : logger . debug ( 'Health Check on S3 file for: {namespace}' . format ( namespace = self . namespace ) ) try : self . client . head_object ( Bucket = self . bucket_name , Key = self . data_file ) return True except ClientError as e : logger . debug ( 'Error encountered with S3. Assume unheal... | Uses head object to make sure the file exists in S3 . |
13,617 | def health_check ( self ) : logger . debug ( 'Health Check on Table: {namespace}' . format ( namespace = self . namespace ) ) try : self . get_all ( ) return True except ClientError as e : logger . exception ( e ) logger . error ( 'Error encountered with Database. Assume unhealthy' ) return False | Gets a single item to determine if Dynamo is functioning . |
13,618 | def parse_swag_config_options ( config ) : options = { } for key , val in config . items ( ) : if key . startswith ( 'swag.backend.' ) : options [ key [ 12 : ] ] = val if key . startswith ( 'swag.' ) : options [ key [ 5 : ] ] = val if options . get ( 'type' ) == 's3' : return S3OptionsSchema ( strict = True ) . load ( ... | Ensures that options passed to the backend are valid . |
13,619 | def deprecated ( message ) : def wrapper ( fn ) : def deprecated_method ( * args , ** kargs ) : warnings . warn ( message , DeprecationWarning , 2 ) return fn ( * args , ** kargs ) deprecated_method . __name__ = fn . __name__ deprecated_method . __doc__ = "%s\n\n%s" % ( message , fn . __doc__ ) return deprecated_method... | Deprecated function decorator . |
13,620 | def is_sub_dict ( sub_dict , dictionary ) : for key in sub_dict . keys ( ) : if key not in dictionary : return False if ( type ( sub_dict [ key ] ) is not dict ) and ( sub_dict [ key ] != dictionary [ key ] ) : return False if ( type ( sub_dict [ key ] ) is dict ) and ( not is_sub_dict ( sub_dict [ key ] , dictionary [... | Legacy filter for determining if a given dict is present . |
13,621 | def get_by_name ( account_name , bucket , region = 'us-west-2' , json_path = 'accounts.json' , alias = None ) : for account in get_all_accounts ( bucket , region , json_path ) [ 'accounts' ] : if 'aws' in account [ 'type' ] : if account [ 'name' ] == account_name : return account elif alias : for a in account [ 'alias'... | Given an account name attempts to retrieve associated account info . |
13,622 | def get_all_accounts ( bucket , region = 'us-west-2' , json_path = 'accounts.json' , ** filters ) : swag_opts = { 'swag.type' : 's3' , 'swag.bucket_name' : bucket , 'swag.bucket_region' : region , 'swag.data_file' : json_path , 'swag.schema_version' : 1 } swag = SWAGManager ( ** parse_swag_config_options ( swag_opts ) ... | Fetches all the accounts from SWAG . |
13,623 | def load_file ( data_file ) : try : with open ( data_file , 'r' , encoding = 'utf-8' ) as f : return json . loads ( f . read ( ) ) except JSONDecodeError as e : return [ ] | Tries to load JSON from data file . |
13,624 | def save_file ( data_file , data , dry_run = None ) : if dry_run : return with open ( data_file , 'w' , encoding = 'utf-8' ) as f : if sys . version_info > ( 3 , 0 ) : f . write ( json . dumps ( data ) ) else : f . write ( json . dumps ( data ) . decode ( 'utf-8' ) ) | Writes JSON data to data file . |
13,625 | def health_check ( self ) : logger . debug ( 'Health Check on file for: {namespace}' . format ( namespace = self . namespace ) ) return os . path . isfile ( self . data_file ) | Checks to make sure the file is there . |
13,626 | def configure ( self , * args , ** kwargs ) : self . version = kwargs [ 'schema_version' ] self . namespace = kwargs [ 'namespace' ] self . backend = get ( kwargs [ 'type' ] ) ( * args , ** kwargs ) self . context = kwargs . pop ( 'schema_context' , { } ) | Configures a SWAG manager . Overrides existing configuration . |
13,627 | def create ( self , item , dry_run = None ) : return self . backend . create ( validate ( item , version = self . version , context = self . context ) , dry_run = dry_run ) | Create a new item in backend . |
13,628 | def delete ( self , item , dry_run = None ) : return self . backend . delete ( item , dry_run = dry_run ) | Delete an item in backend . |
13,629 | def update ( self , item , dry_run = None ) : return self . backend . update ( validate ( item , version = self . version , context = self . context ) , dry_run = dry_run ) | Update an item in backend . |
13,630 | def get_all ( self , search_filter = None ) : items = self . backend . get_all ( ) if not items : if self . version == 1 : return { self . namespace : [ ] } return [ ] if search_filter : items = jmespath . search ( search_filter , items ) return items | Fetch all data from backend . |
13,631 | def get_service_enabled ( self , name , accounts_list = None , search_filter = None , region = None ) : if not accounts_list : accounts = self . get_all ( search_filter = search_filter ) else : accounts = accounts_list if self . version == 1 : accounts = accounts [ 'accounts' ] enabled = [ ] for account in accounts : i... | Get a list of accounts where a service has been enabled . |
13,632 | def get_service ( self , name , search_filter ) : if self . version == 1 : service_filter = "service.{name}" . format ( name = name ) return jmespath . search ( service_filter , self . get ( search_filter ) ) else : service_filter = "services[?name=='{}']" . format ( name ) return one ( jmespath . search ( service_filt... | Fetch service metadata . |
13,633 | def get_service_name ( self , name , search_filter ) : service_filter = "services[?name=='{}'].metadata.name" . format ( name ) return one ( jmespath . search ( service_filter , self . get ( search_filter ) ) ) | Fetch account name as referenced by a particular service . |
13,634 | def get_by_name ( self , name , alias = None ) : search_filter = "[?name=='{}']" . format ( name ) if alias : if self . version == 1 : search_filter = "accounts[?name=='{name}' || contains(alias, '{name}')]" . format ( name = name ) elif self . version == 2 : search_filter = "[?name=='{name}' || contains(aliases, '{nam... | Fetch all accounts with name specified optionally include aliases . |
13,635 | def run_migration ( data , version_start , version_end ) : items = [ ] if version_start == 1 and version_end == 2 : for item in data [ 'accounts' ] : items . append ( v2 . upgrade ( item ) ) if version_start == 2 and version_end == 1 : for item in data : items . append ( v2 . downgrade ( item ) ) items = { 'accounts' :... | Runs migration against a data set . |
13,636 | def validate_type ( self , data ) : fields_to_validate = [ 'type' , 'environment' , 'owner' ] for field in fields_to_validate : value = data . get ( field ) allowed_values = self . context . get ( field ) if allowed_values and value not in allowed_values : raise ValidationError ( 'Must be one of {}' . format ( allowed_... | Performs field validation against the schema context if values have been provided to SWAGManager via the swag . schema_context config object . |
13,637 | def validate_account_status ( self , data ) : deleted_status = 'deleted' region_status = data . get ( 'status' ) account_status = data . get ( 'account_status' ) for region in region_status : if region [ 'status' ] != deleted_status and account_status == deleted_status : raise ValidationError ( 'Account Status cannot b... | Performs field validation for account_status . If any region is not deleted account_status cannot be deleted |
13,638 | def validate_regions_schema ( self , data ) : region_schema = RegionSchema ( ) supplied_regions = data . get ( 'regions' , { } ) for region in supplied_regions . keys ( ) : result = region_schema . validate ( supplied_regions [ region ] ) if len ( result . keys ( ) ) > 0 : raise ValidationError ( result ) | Performs field validation for regions . This should be a dict with region names as the key and RegionSchema as the value |
13,639 | def coord_to_dimension ( coord ) : kwargs = { } if coord . units . is_time_reference ( ) : kwargs [ 'value_format' ] = get_date_format ( coord ) else : kwargs [ 'unit' ] = str ( coord . units ) return Dimension ( coord . name ( ) , ** kwargs ) | Converts an iris coordinate to a HoloViews dimension . |
13,640 | def sort_coords ( coord ) : import iris order = { 'T' : - 2 , 'Z' : - 1 , 'X' : 1 , 'Y' : 2 } axis = iris . util . guess_coord_axis ( coord ) return ( order . get ( axis , 0 ) , coord and coord . name ( ) ) | Sorts a list of DimCoords trying to ensure that dates and pressure levels appear first and the longitude and latitude appear last in the correct order . |
13,641 | def values ( cls , dataset , dim , expanded = True , flat = True , compute = True ) : dim = dataset . get_dimension ( dim , strict = True ) if dim in dataset . vdims : coord_names = [ c . name ( ) for c in dataset . data . dim_coords ] data = dataset . data . copy ( ) . data data = cls . canonicalize ( dataset , data ,... | Returns an array of the values along the supplied dimension . |
13,642 | def groupby ( cls , dataset , dims , container_type = HoloMap , group_type = None , ** kwargs ) : import iris if not isinstance ( dims , list ) : dims = [ dims ] dims = [ dataset . get_dimension ( d , strict = True ) for d in dims ] constraints = [ d . name for d in dims ] slice_dims = [ d for d in dataset . kdims if d... | Groups the data by one or more dimensions returning a container indexed by the grouped dimensions containing slices of the cube wrapped in the group_type . This makes it very easy to break up a high - dimensional dataset into smaller viewable chunks . |
13,643 | def concat_dim ( cls , datasets , dim , vdims ) : import iris from iris . experimental . equalise_cubes import equalise_attributes cubes = [ ] for c , cube in datasets . items ( ) : cube = cube . copy ( ) cube . add_aux_coord ( iris . coords . DimCoord ( [ c ] , var_name = dim . name ) ) cubes . append ( cube ) cubes =... | Concatenates datasets along one dimension |
13,644 | def range ( cls , dataset , dimension ) : dim = dataset . get_dimension ( dimension , strict = True ) values = dataset . dimension_values ( dim . name , False ) return ( np . nanmin ( values ) , np . nanmax ( values ) ) | Computes the range along a particular dimension . |
13,645 | def redim ( cls , dataset , dimensions ) : new_dataset = dataset . data . copy ( ) for name , new_dim in dimensions . items ( ) : if name == new_dataset . name ( ) : new_dataset . rename ( new_dim . name ) for coord in new_dataset . dim_coords : if name == coord . name ( ) : coord . rename ( new_dim . name ) return new... | Rename coords on the Cube . |
13,646 | def length ( cls , dataset ) : return np . product ( [ len ( d . points ) for d in dataset . data . coords ( dim_coords = True ) ] , dtype = np . intp ) | Returns the total number of samples in the dataset . |
13,647 | def add_dimension ( cls , columns , dimension , dim_pos , values , vdim ) : if not vdim : raise Exception ( "Cannot add key dimension to a dense representation." ) raise NotImplementedError | Adding value dimensions not currently supported by iris interface . Adding key dimensions not possible on dense interfaces . |
13,648 | def select_to_constraint ( cls , dataset , selection ) : import iris def get_slicer ( start , end ) : def slicer ( cell ) : return start <= cell . point < end return slicer constraint_kwargs = { } for dim , constraint in selection . items ( ) : if isinstance ( constraint , slice ) : constraint = ( constraint . start , ... | Transform a selection dictionary to an iris Constraint . |
13,649 | def select ( cls , dataset , selection_mask = None , ** selection ) : import iris constraint = cls . select_to_constraint ( dataset , selection ) pre_dim_coords = [ c . name ( ) for c in dataset . data . dim_coords ] indexed = cls . indexed ( dataset , selection ) extracted = dataset . data . extract ( constraint ) if ... | Apply a selection to the data . |
13,650 | def convert_to_geotype ( element , crs = None ) : geotype = getattr ( gv_element , type ( element ) . __name__ , None ) if crs is None or geotype is None or isinstance ( element , _Element ) : return element return geotype ( element , crs = crs ) | Converts a HoloViews element type to the equivalent GeoViews element if given a coordinate reference system . |
13,651 | def add_crs ( op , element , ** kwargs ) : return element . map ( lambda x : convert_to_geotype ( x , kwargs . get ( 'crs' ) ) , Element ) | Converts any elements in the input to their equivalent geotypes if given a coordinate reference system . |
13,652 | def is_geographic ( element , kdims = None ) : if isinstance ( element , ( Overlay , NdOverlay ) ) : return any ( element . traverse ( is_geographic , [ _Element ] ) ) if kdims : kdims = [ element . get_dimension ( d ) for d in kdims ] else : kdims = element . kdims if len ( kdims ) != 2 and not isinstance ( element , ... | Utility to determine whether the supplied element optionally a subset of its key dimensions represent a geographic coordinate system . |
13,653 | def geoms ( self , scale = None , bounds = None , as_element = True ) : feature = self . data if scale is not None : feature = feature . with_scale ( scale ) if bounds : extent = ( bounds [ 0 ] , bounds [ 2 ] , bounds [ 1 ] , bounds [ 3 ] ) else : extent = None geoms = [ g for g in feature . intersecting_geometries ( e... | Returns the geometries held by the Feature . |
13,654 | def from_shapefile ( cls , shapefile , * args , ** kwargs ) : reader = Reader ( shapefile ) return cls . from_records ( reader . records ( ) , * args , ** kwargs ) | Loads a shapefile from disk and optionally merges it with a dataset . See from_records for full signature . |
13,655 | def from_records ( cls , records , dataset = None , on = None , value = None , index = [ ] , drop_missing = False , element = None , ** kwargs ) : if dataset is not None and not on : raise ValueError ( 'To merge dataset with shapes mapping ' 'must define attribute(s) to merge on.' ) if util . pd and isinstance ( datase... | Load data from a collection of cartopy . io . shapereader . Record objects and optionally merge it with a dataset to assign values to each polygon and form a chloropleth . Supplying just records will return an NdOverlayof Shape Elements with a numeric index . If a dataset is supplied a mapping between the attribute nam... |
13,656 | def get_cb_plot ( cb , plot = None ) : plot = plot or cb . plot if isinstance ( plot , GeoOverlayPlot ) : plots = [ get_cb_plot ( cb , p ) for p in plot . subplots . values ( ) ] plots = [ p for p in plots if any ( s in cb . streams and getattr ( s , '_triggering' , False ) for s in p . streams ) ] if plots : plot = pl... | Finds the subplot with the corresponding stream . |
13,657 | def skip ( cb , msg , attributes ) : if not all ( a in msg for a in attributes ) : return True plot = get_cb_plot ( cb ) return ( not getattr ( plot , 'geographic' , False ) or not hasattr ( plot . current_frame , 'crs' ) ) | Skips applying transforms if data is not geographic . |
13,658 | def project_ranges ( cb , msg , attributes ) : if skip ( cb , msg , attributes ) : return msg plot = get_cb_plot ( cb ) x0 , x1 = msg . get ( 'x_range' , ( 0 , 1000 ) ) y0 , y1 = msg . get ( 'y_range' , ( 0 , 1000 ) ) extents = x0 , y0 , x1 , y1 x0 , y0 , x1 , y1 = project_extents ( extents , plot . projection , plot .... | Projects ranges supplied by a callback . |
13,659 | def project_point ( cb , msg , attributes = ( 'x' , 'y' ) ) : if skip ( cb , msg , attributes ) : return msg plot = get_cb_plot ( cb ) x , y = msg . get ( 'x' , 0 ) , msg . get ( 'y' , 0 ) crs = plot . current_frame . crs coordinates = crs . transform_points ( plot . projection , np . array ( [ x ] ) , np . array ( [ y... | Projects a single point supplied by a callback |
13,660 | def project_drawn ( cb , msg ) : stream = cb . streams [ 0 ] old_data = stream . data stream . update ( data = msg [ 'data' ] ) element = stream . element stream . update ( data = old_data ) proj = cb . plot . projection if not isinstance ( element , _Element ) or element . crs == proj : return None crs = element . crs... | Projects a drawn element to the declared coordinate system |
13,661 | def clean_weight_files ( cls ) : deleted = [ ] for f in cls . _files : try : os . remove ( f ) deleted . append ( f ) except FileNotFoundError : pass print ( 'Deleted %d weight files' % len ( deleted ) ) cls . _files = [ ] | Cleans existing weight files . |
13,662 | def _get_projection ( el ) : result = None if hasattr ( el , 'crs' ) : result = ( int ( el . _auxiliary_component ) , el . crs ) return result | Get coordinate reference system from non - auxiliary elements . Return value is a tuple of a precedence integer and the projection to allow non - auxiliary components to take precedence . |
13,663 | def get_extents ( self , element , ranges , range_type = 'combined' ) : proj = self . projection if self . global_extent and range_type in ( 'combined' , 'data' ) : ( x0 , x1 ) , ( y0 , y1 ) = proj . x_limits , proj . y_limits return ( x0 , y0 , x1 , y1 ) extents = super ( ProjectionPlot , self ) . get_extents ( elemen... | Subclasses the get_extents method using the GeoAxes set_extent method to project the extents to the Elements coordinate reference system . |
13,664 | def wrap_lons ( lons , base , period ) : lons = lons . astype ( np . float64 ) return ( ( lons - base + period * 2 ) % period ) + base | Wrap longitude values into the range between base and base + period . |
13,665 | def geom_dict_to_array_dict ( geom_dict , coord_names = [ 'Longitude' , 'Latitude' ] ) : x , y = coord_names geom = geom_dict [ 'geometry' ] new_dict = { k : v for k , v in geom_dict . items ( ) if k != 'geometry' } array = geom_to_array ( geom ) new_dict [ x ] = array [ : , 0 ] new_dict [ y ] = array [ : , 1 ] if geom... | Converts a dictionary containing an geometry key to a dictionary of x - and y - coordinate arrays and if present a list - of - lists of hole array . |
13,666 | def polygons_to_geom_dicts ( polygons , skip_invalid = True ) : interface = polygons . interface . datatype if interface == 'geodataframe' : return [ row . to_dict ( ) for _ , row in polygons . data . iterrows ( ) ] elif interface == 'geom_dictionary' : return polygons . data polys = [ ] xdim , ydim = polygons . kdims ... | Converts a Polygons element into a list of geometry dictionaries preserving all value dimensions . |
13,667 | def path_to_geom_dicts ( path , skip_invalid = True ) : interface = path . interface . datatype if interface == 'geodataframe' : return [ row . to_dict ( ) for _ , row in path . data . iterrows ( ) ] elif interface == 'geom_dictionary' : return path . data geoms = [ ] invalid = False xdim , ydim = path . kdims for i , ... | Converts a Path element into a list of geometry dictionaries preserving all value dimensions . |
13,668 | def to_ccw ( geom ) : if isinstance ( geom , sgeom . Polygon ) and not geom . exterior . is_ccw : geom = sgeom . polygon . orient ( geom ) return geom | Reorients polygon to be wound counter - clockwise . |
13,669 | def geom_length ( geom ) : if geom . geom_type == 'Point' : return 1 if hasattr ( geom , 'exterior' ) : geom = geom . exterior if not geom . geom_type . startswith ( 'Multi' ) and hasattr ( geom , 'array_interface_base' ) : return len ( geom . array_interface_base [ 'data' ] ) // 2 else : length = 0 for g in geom : len... | Calculates the length of coordinates in a shapely geometry . |
13,670 | def geo_mesh ( element ) : if len ( element . vdims ) > 1 : xs , ys = ( element . dimension_values ( i , False , False ) for i in range ( 2 ) ) zs = np . dstack ( [ element . dimension_values ( i , False , False ) for i in range ( 2 , 2 + len ( element . vdims ) ) ] ) else : xs , ys , zs = ( element . dimension_values ... | Get mesh data from a 2D Element ensuring that if the data is on a cylindrical coordinate system and wraps globally that data actually wraps around . |
13,671 | def check_crs ( crs ) : import pyproj if isinstance ( crs , pyproj . Proj ) : out = crs elif isinstance ( crs , dict ) or isinstance ( crs , basestring ) : try : out = pyproj . Proj ( crs ) except RuntimeError : try : out = pyproj . Proj ( init = crs ) except RuntimeError : out = None else : out = None return out | Checks if the crs represents a valid grid projection or ESPG string . |
13,672 | def proj_to_cartopy ( proj ) : import cartopy . crs as ccrs try : from osgeo import osr has_gdal = True except ImportError : has_gdal = False proj = check_crs ( proj ) if proj . is_latlong ( ) : return ccrs . PlateCarree ( ) srs = proj . srs if has_gdal : s1 = osr . SpatialReference ( ) s1 . ImportFromProj4 ( proj . sr... | Converts a pyproj . Proj to a cartopy . crs . Projection |
13,673 | def load_tiff ( filename , crs = None , apply_transform = False , nan_nodata = False , ** kwargs ) : try : import xarray as xr except : raise ImportError ( 'Loading tiffs requires xarray to be installed' ) with warnings . catch_warnings ( ) : warnings . filterwarnings ( 'ignore' ) da = xr . open_rasterio ( filename ) r... | Returns an RGB or Image element loaded from a geotiff file . |
13,674 | def teardown_handles ( self ) : if not isinstance ( self . handles . get ( 'artist' ) , GoogleTiles ) : self . handles [ 'artist' ] . remove ( ) | If no custom update_handles method is supplied this method is called to tear down any previous handles before replacing them . |
13,675 | def find_geom ( geom , geoms ) : for i , g in enumerate ( geoms ) : if g is geom : return i | Returns the index of a geometry in a list of geometries avoiding expensive equality checks of in operator . |
13,676 | def compute_zoom_level ( bounds , domain , levels ) : area_fraction = min ( bounds . area / domain . area , 1 ) return int ( min ( round ( np . log2 ( 1 / area_fraction ) ) , levels ) ) | Computes a zoom level given a bounds polygon a polygon of the overall domain and the number of zoom levels to divide the data into . |
13,677 | def bounds_to_poly ( bounds ) : x0 , y0 , x1 , y1 = bounds return Polygon ( [ ( x0 , y0 ) , ( x1 , y0 ) , ( x1 , y1 ) , ( x0 , y1 ) ] ) | Constructs a shapely Polygon from the provided bounds tuple . |
13,678 | def get_assignment ( self ) : try : plan = json . loads ( open ( self . args . plan_file_path ) . read ( ) ) return plan_to_assignment ( plan ) except IOError : self . log . exception ( 'Given json file {file} not found.' . format ( file = self . args . plan_file_path ) , ) raise except ValueError : self . log . except... | Parse the given json plan in dict format . |
13,679 | def generate_requests ( hosts , jolokia_port , jolokia_prefix ) : session = FuturesSession ( ) for host in hosts : url = "http://{host}:{port}/{prefix}/read/{key}" . format ( host = host , port = jolokia_port , prefix = jolokia_prefix , key = UNDER_REPL_KEY , ) yield host , session . get ( url ) | Return a generator of requests to fetch the under replicated partition number from the specified hosts . |
13,680 | def read_cluster_status ( hosts , jolokia_port , jolokia_prefix ) : under_replicated = 0 missing_brokers = 0 for host , request in generate_requests ( hosts , jolokia_port , jolokia_prefix ) : try : response = request . result ( ) if 400 <= response . status_code <= 599 : print ( "Got status code {0}. Exiting." . forma... | Read and return the number of under replicated partitions and missing brokers from the specified hosts . |
13,681 | def print_brokers ( cluster_config , brokers ) : print ( "Will restart the following brokers in {0}:" . format ( cluster_config . name ) ) for id , host in brokers : print ( " {0}: {1}" . format ( id , host ) ) | Print the list of brokers that will be restarted . |
13,682 | def ask_confirmation ( ) : while True : print ( "Do you want to restart these brokers? " , end = "" ) choice = input ( ) . lower ( ) if choice in [ 'yes' , 'y' ] : return True elif choice in [ 'no' , 'n' ] : return False else : print ( "Please respond with 'yes' or 'no'" ) | Ask for confirmation to the user . Return true if the user confirmed the execution false otherwise . |
13,683 | def start_broker ( host , connection , start_command , verbose ) : _ , stdout , stderr = connection . sudo_command ( start_command ) if verbose : report_stdout ( host , stdout ) report_stderr ( host , stderr ) | Execute the start |
13,684 | def stop_broker ( host , connection , stop_command , verbose ) : _ , stdout , stderr = connection . sudo_command ( stop_command ) if verbose : report_stdout ( host , stdout ) report_stderr ( host , stderr ) | Execute the stop |
13,685 | def wait_for_stable_cluster ( hosts , jolokia_port , jolokia_prefix , check_interval , check_count , unhealthy_time_limit , ) : stable_counter = 0 max_checks = int ( math . ceil ( unhealthy_time_limit / check_interval ) ) for i in itertools . count ( ) : partitions , brokers = read_cluster_status ( hosts , jolokia_port... | Block the caller until the cluster can be considered stable . |
13,686 | def execute_rolling_restart ( brokers , jolokia_port , jolokia_prefix , check_interval , check_count , unhealthy_time_limit , skip , verbose , pre_stop_task , post_stop_task , start_command , stop_command , ssh_password = None ) : all_hosts = [ b [ 1 ] for b in brokers ] for n , host in enumerate ( all_hosts [ skip : ]... | Execute the rolling restart on the specified brokers . It checks the number of under replicated partitions on each broker using Jolokia . |
13,687 | def validate_opts ( opts , brokers_num ) : if opts . skip < 0 or opts . skip >= brokers_num : print ( "Error: --skip must be >= 0 and < #brokers" ) return True if opts . check_count < 0 : print ( "Error: --check-count must be >= 0" ) return True if opts . unhealthy_time_limit < 0 : print ( "Error: --unhealthy-time-limi... | Basic option validation . Returns True if the options are not valid False otherwise . |
13,688 | def validate_broker_ids_subset ( broker_ids , subset_ids ) : all_ids = set ( broker_ids ) valid = True for subset_id in subset_ids : valid = valid and subset_id in all_ids if subset_id not in all_ids : print ( "Error: user specified broker id {0} does not exist in cluster." . format ( subset_id ) ) return valid | Validate that user specified broker ids to restart exist in the broker ids retrieved from cluster config . |
13,689 | def run ( self , cluster_config , rg_parser , partition_measurer , cluster_balancer , args , ) : self . cluster_config = cluster_config self . args = args with ZK ( self . cluster_config ) as self . zk : self . log . debug ( 'Starting %s for cluster: %s and zookeeper: %s' , self . __class__ . __name__ , self . cluster_... | Initialize cluster_config args and zk then call run_command . |
13,690 | def execute_plan ( self , plan , allow_rf_change = False ) : if self . should_execute ( ) : result = self . zk . execute_plan ( plan , allow_rf_change = allow_rf_change ) if not result : self . log . error ( 'Plan execution unsuccessful.' ) sys . exit ( 1 ) else : self . log . info ( 'Plan sent to zookeeper for reassig... | Save proposed - plan and execute the same if requested . |
13,691 | def should_execute ( self ) : return self . args . apply and ( self . args . no_confirm or self . confirm_execution ( ) ) | Confirm if proposed - plan should be executed . |
13,692 | def is_reassignment_pending ( self ) : in_progress_plan = self . zk . get_pending_plan ( ) if in_progress_plan : in_progress_partitions = in_progress_plan [ 'partitions' ] self . log . info ( 'Previous re-assignment in progress for {count} partitions.' ' Current partitions in re-assignment queue: {partitions}' . format... | Return True if there are reassignment tasks pending . |
13,693 | def get_reduced_assignment ( self , original_assignment , cluster_topology , max_partition_movements , max_leader_only_changes , max_movement_size = DEFAULT_MAX_MOVEMENT_SIZE , force_progress = False , ) : new_assignment = cluster_topology . assignment if ( not original_assignment or not new_assignment or max_partition... | Reduce the assignment based on the total actions . |
13,694 | def _extract_actions_unique_topics ( self , movement_counts , max_movements , cluster_topology , max_movement_size ) : topic_actions = defaultdict ( list ) for t_p , replica_change_cnt in movement_counts : topic_actions [ t_p [ 0 ] ] . append ( ( t_p , replica_change_cnt ) ) extracted_actions = [ ] curr_movements = 0 c... | Extract actions limiting to given max value such that the resultant has the minimum possible number of duplicate topics . |
13,695 | def confirm_execution ( self ) : permit = '' while permit . lower ( ) not in ( 'yes' , 'no' ) : permit = input ( 'Execute Proposed Plan? [yes/no] ' ) if permit . lower ( ) == 'yes' : return True else : return False | Confirm from your if proposed - plan be executed . |
13,696 | def write_json_plan ( self , proposed_layout , proposed_plan_file ) : with open ( proposed_plan_file , 'w' ) as output : json . dump ( proposed_layout , output ) | Dump proposed json plan to given output file for future usage . |
13,697 | def swap_leader ( self , new_leader ) : assert ( new_leader in self . _replicas ) curr_leader = self . leader idx = self . _replicas . index ( new_leader ) self . _replicas [ 0 ] , self . _replicas [ idx ] = self . _replicas [ idx ] , self . _replicas [ 0 ] return curr_leader | Change the preferred leader with one of given replicas . |
13,698 | def replace ( self , source , dest ) : for i , broker in enumerate ( self . replicas ) : if broker == source : self . replicas [ i ] = dest return | Replace source broker with destination broker in replica set if found . |
13,699 | def count_siblings ( self , partitions ) : count = sum ( int ( self . topic == partition . topic ) for partition in partitions ) return count | Count siblings of partition in given partition - list . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.