idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
4,400
def content ( self , ** args ) : self . gist_name = '' if 'name' in args : self . gist_name = args [ 'name' ] self . gist_id = self . getMyID ( self . gist_name ) elif 'id' in args : self . gist_id = args [ 'id' ] else : raise Exception ( 'Either provide authenticated user\'s Unambigious Gistname or any unique Gistid' ) if self . gist_id : r = requests . get ( '%s' % BASE_URL + '/gists/%s' % self . gist_id , headers = self . gist . header ) if ( r . status_code == 200 ) : r_text = json . loads ( r . text ) if self . gist_name != '' : content = r . json ( ) [ 'files' ] [ self . gist_name ] [ 'content' ] else : for key , value in r . json ( ) [ 'files' ] . iteritems ( ) : content = r . json ( ) [ 'files' ] [ value [ 'filename' ] ] [ 'content' ] return content raise Exception ( 'No such gist found' )
Doesn t require manual fetching of gistID of a gist passing gistName will return the content of gist . In case names are ambigious provide GistID or it will return the contents of recent ambigious gistname
4,401
def edit ( self , ** args ) : self . gist_name = '' if 'description' in args : self . description = args [ 'description' ] else : self . description = '' if 'name' in args and 'id' in args : self . gist_name = args [ 'name' ] self . gist_id = args [ 'id' ] elif 'name' in args : self . gist_name = args [ 'name' ] self . gist_id = self . getMyID ( self . gist_name ) elif 'id' in args : self . gist_id = args [ 'id' ] else : raise Exception ( 'Gist Name/ID must be provided' ) if 'content' in args : self . content = args [ 'content' ] else : raise Exception ( 'Gist content can\'t be empty' ) if ( self . gist_name == '' ) : self . gist_name = self . getgist ( id = self . gist_id ) data = { "description" : self . description , "files" : { self . gist_name : { "content" : self . content } } } else : data = { "description" : self . description , "files" : { self . gist_name : { "content" : self . content } } } if self . gist_id : r = requests . patch ( '%s/gists/%s' % ( BASE_URL , self . gist_id ) , headers = self . gist . header , data = json . dumps ( data ) , ) if ( r . status_code == 200 ) : r_text = json . loads ( r . text ) response = { 'updated_content' : self . content , 'created_at' : r . json ( ) [ 'created_at' ] , 'comments' : r . json ( ) [ 'comments' ] } return response raise Exception ( 'No such gist found' )
Doesn t require manual fetching of gistID of a gist passing gistName will return edit the gist
4,402
def starred ( self , ** args ) : ids = [ ] r = requests . get ( '%s/gists/starred' % BASE_URL , headers = self . gist . header ) if 'limit' in args : limit = args [ 'limit' ] else : limit = len ( r . json ( ) ) if ( r . status_code == 200 ) : for g in range ( 0 , limit ) : ids . append ( '%s/%s/%s' % ( GIST_URL , r . json ( ) [ g ] [ 'user' ] [ 'login' ] , r . json ( ) [ g ] [ 'id' ] ) ) return ids raise Exception ( 'Username not found' )
List the authenticated user s starred gists
4,403
def links ( self , ** args ) : if 'name' in args : self . gist_name = args [ 'name' ] self . gist_id = self . getMyID ( self . gist_name ) elif 'id' in args : self . gist_id = args [ 'id' ] else : raise Exception ( 'Gist Name/ID must be provided' ) if self . gist_id : r = requests . get ( '%s/gists/%s' % ( BASE_URL , self . gist_id ) , headers = self . gist . header , ) if ( r . status_code == 200 ) : content = { 'Github-User' : r . json ( ) [ 'user' ] [ 'login' ] , 'GistID' : r . json ( ) [ 'id' ] , 'Gist-Link' : '%s/%s/%s' % ( GIST_URL , self . gist . username , r . json ( ) [ 'id' ] ) , 'Clone-Link' : '%s/%s.git' % ( GIST_URL , r . json ( ) [ 'id' ] ) , 'Embed-Script' : '<script src="%s/%s/%s.js"</script>' % ( GIST_URL , self . gist . username , r . json ( ) [ 'id' ] ) } return content raise Exception ( 'No such gist found' )
Return Gist URL - Link Clone - Link and Script - Link to embed
4,404
def load_forecasts ( self ) : run_date_str = self . run_date . strftime ( "%Y%m%d" ) forecast_file = self . forecast_path + "{0}/{1}_{2}_{3}_consensus_{0}.nc" . format ( run_date_str , self . ensemble_name , self . model_name , self . forecast_variable ) print ( "Forecast file: " + forecast_file ) forecast_data = Dataset ( forecast_file ) for size_threshold in self . size_thresholds : for smoothing_radius in self . smoothing_radii : for neighbor_radius in self . neighbor_radii : hour_var = "neighbor_prob_r_{0:d}_s_{1:d}_{2}_{3:0.2f}" . format ( neighbor_radius , smoothing_radius , self . forecast_variable , float ( size_threshold ) ) period_var = "neighbor_prob_{0:d}-hour_r_{1:d}_s_{2:d}_{3}_{4:0.2f}" . format ( self . end_hour - self . start_hour + 1 , neighbor_radius , smoothing_radius , self . forecast_variable , float ( size_threshold ) ) print ( "Loading forecasts {0} {1} {2} {3} {4}" . format ( self . run_date , self . model_name , self . forecast_variable , size_threshold , smoothing_radius ) ) if hour_var in forecast_data . variables . keys ( ) : self . hourly_forecasts [ hour_var ] = forecast_data . variables [ hour_var ] [ : ] if period_var in forecast_data . variables . keys ( ) : self . period_forecasts [ period_var ] = forecast_data . variables [ period_var ] [ : ] forecast_data . close ( )
Load neighborhood probability forecasts .
4,405
def load_coordinates ( self ) : coord_file = Dataset ( self . coordinate_file ) if "lon" in coord_file . variables . keys ( ) : self . coordinates [ "lon" ] = coord_file . variables [ "lon" ] [ : ] self . coordinates [ "lat" ] = coord_file . variables [ "lat" ] [ : ] else : self . coordinates [ "lon" ] = coord_file . variables [ "XLONG" ] [ 0 ] self . coordinates [ "lat" ] = coord_file . variables [ "XLAT" ] [ 0 ] coord_file . close ( )
Loads lat - lon coordinates from a netCDF file .
4,406
def evaluate_hourly_forecasts ( self ) : score_columns = [ "Run_Date" , "Forecast_Hour" , "Ensemble Name" , "Model_Name" , "Forecast_Variable" , "Neighbor_Radius" , "Smoothing_Radius" , "Size_Threshold" , "ROC" , "Reliability" ] all_scores = pd . DataFrame ( columns = score_columns ) for h , hour in enumerate ( range ( self . start_hour , self . end_hour + 1 ) ) : for neighbor_radius in self . neighbor_radii : n_filter = disk ( neighbor_radius ) for s , size_threshold in enumerate ( self . size_thresholds ) : print ( "Eval hourly forecast {0:02d} {1} {2} {3} {4:d} {5:d}" . format ( hour , self . model_name , self . forecast_variable , self . run_date , neighbor_radius , size_threshold ) ) hour_obs = fftconvolve ( self . raw_obs [ self . mrms_variable ] [ h ] >= self . obs_thresholds [ s ] , n_filter , mode = "same" ) hour_obs [ hour_obs > 1 ] = 1 hour_obs [ hour_obs < 1 ] = 0 if self . obs_mask : hour_obs = hour_obs [ self . raw_obs [ self . mask_variable ] [ h ] > 0 ] for smoothing_radius in self . smoothing_radii : hour_var = "neighbor_prob_r_{0:d}_s_{1:d}_{2}_{3:0.2f}" . format ( neighbor_radius , smoothing_radius , self . forecast_variable , size_threshold ) if self . obs_mask : hour_forecast = self . hourly_forecasts [ hour_var ] [ h ] [ self . raw_obs [ self . mask_variable ] [ h ] > 0 ] else : hour_forecast = self . hourly_forecasts [ hour_var ] [ h ] roc = DistributedROC ( thresholds = self . probability_levels , obs_threshold = 0.5 ) roc . update ( hour_forecast , hour_obs ) rel = DistributedReliability ( thresholds = self . probability_levels , obs_threshold = 0.5 ) rel . update ( hour_forecast , hour_obs ) row = [ self . run_date , hour , self . ensemble_name , self . model_name , self . forecast_variable , neighbor_radius , smoothing_radius , size_threshold , roc , rel ] all_scores . loc [ hour_var + "_{0:d}" . format ( hour ) ] = row return all_scores
Calculates ROC curves and Reliability scores for each forecast hour .
4,407
def evaluate_period_forecasts ( self ) : score_columns = [ "Run_Date" , "Ensemble Name" , "Model_Name" , "Forecast_Variable" , "Neighbor_Radius" , "Smoothing_Radius" , "Size_Threshold" , "ROC" , "Reliability" ] all_scores = pd . DataFrame ( columns = score_columns ) if self . coordinate_file is not None : coord_mask = np . where ( ( self . coordinates [ "lon" ] >= self . lon_bounds [ 0 ] ) & ( self . coordinates [ "lon" ] <= self . lon_bounds [ 1 ] ) & ( self . coordinates [ "lat" ] >= self . lat_bounds [ 0 ] ) & ( self . coordinates [ "lat" ] <= self . lat_bounds [ 1 ] ) & ( self . period_obs [ self . mask_variable ] > 0 ) ) else : coord_mask = None for neighbor_radius in self . neighbor_radii : n_filter = disk ( neighbor_radius ) for s , size_threshold in enumerate ( self . size_thresholds ) : period_obs = fftconvolve ( self . period_obs [ self . mrms_variable ] >= self . obs_thresholds [ s ] , n_filter , mode = "same" ) period_obs [ period_obs > 1 ] = 1 if self . obs_mask and self . coordinate_file is None : period_obs = period_obs [ self . period_obs [ self . mask_variable ] > 0 ] elif self . obs_mask and self . coordinate_file is not None : period_obs = period_obs [ coord_mask [ 0 ] , coord_mask [ 1 ] ] else : period_obs = period_obs . ravel ( ) for smoothing_radius in self . smoothing_radii : print ( "Eval period forecast {0} {1} {2} {3} {4} {5}" . format ( self . model_name , self . forecast_variable , self . run_date , neighbor_radius , size_threshold , smoothing_radius ) ) period_var = "neighbor_prob_{0:d}-hour_r_{1:d}_s_{2:d}_{3}_{4:0.2f}" . format ( self . end_hour - self . start_hour + 1 , neighbor_radius , smoothing_radius , self . forecast_variable , size_threshold ) if self . obs_mask and self . coordinate_file is None : period_forecast = self . period_forecasts [ period_var ] [ self . period_obs [ self . mask_variable ] > 0 ] elif self . obs_mask and self . coordinate_file is not None : period_forecast = self . period_forecasts [ period_var ] [ coord_mask [ 0 ] , coord_mask [ 1 ] ] else : period_forecast = self . period_forecasts [ period_var ] . ravel ( ) roc = DistributedROC ( thresholds = self . probability_levels , obs_threshold = 0.5 ) roc . update ( period_forecast , period_obs ) rel = DistributedReliability ( thresholds = self . probability_levels , obs_threshold = 0.5 ) rel . update ( period_forecast , period_obs ) row = [ self . run_date , self . ensemble_name , self . model_name , self . forecast_variable , neighbor_radius , smoothing_radius , size_threshold , roc , rel ] all_scores . loc [ period_var ] = row return all_scores
Evaluates ROC and Reliability scores for forecasts over the full period from start hour to end hour
4,408
def bootstrap_main ( args ) : version_info = sys . version_info if version_info . major != 3 or version_info . minor < 6 : return None , "python36" main_fn = load_module_as_package ( "nionui_app.nionswift" ) if main_fn : return main_fn ( [ "nionui_app.nionswift" ] + args , { "pyqt" : None } ) , None return None , "main"
Main function explicitly called from the C ++ code . Return the main application object .
4,409
def _migrate_library ( workspace_dir : pathlib . Path , do_logging : bool = True ) -> pathlib . Path : library_path_11 = workspace_dir / "Nion Swift Workspace.nslib" library_path_12 = workspace_dir / "Nion Swift Library 12.nslib" library_path_13 = workspace_dir / "Nion Swift Library 13.nslib" library_paths = ( library_path_11 , library_path_12 ) library_path_latest = library_path_13 if not os . path . exists ( library_path_latest ) : for library_path in reversed ( library_paths ) : if os . path . exists ( library_path ) : if do_logging : logging . info ( "Migrating library: %s -> %s" , library_path , library_path_latest ) shutil . copyfile ( library_path , library_path_latest ) break return library_path_latest
Migrate library to latest version .
4,410
def merge_input_csv_forecast_json ( input_csv_file , forecast_json_path , condition_models , dist_models ) : try : run_date = input_csv_file [ : - 4 ] . split ( "_" ) [ - 1 ] print ( run_date ) ens_member = "_" . join ( input_csv_file . split ( "/" ) [ - 1 ] [ : - 4 ] . split ( "_" ) [ 3 : - 1 ] ) ens_name = input_csv_file . split ( "/" ) [ - 1 ] . split ( "_" ) [ 2 ] input_data = pd . read_csv ( input_csv_file , index_col = "Step_ID" ) full_json_path = forecast_json_path + "{0}/{1}/" . format ( run_date , ens_member ) track_ids = sorted ( input_data [ "Track_ID" ] . unique ( ) ) model_pred_cols = [ ] condition_models_ns = [ ] dist_models_ns = [ ] gamma_params = [ "Shape" , "Location" , "Scale" ] for condition_model in condition_models : model_pred_cols . append ( condition_model . replace ( " " , "-" ) + "_Condition" ) condition_models_ns . append ( condition_model . replace ( " " , "-" ) ) for dist_model in dist_models : dist_models_ns . append ( dist_model . replace ( " " , "-" ) ) for param in gamma_params : model_pred_cols . append ( dist_model . replace ( " " , "-" ) + "_" + param ) pred_data = pd . DataFrame ( index = input_data . index , columns = model_pred_cols , dtype = float ) for track_id in track_ids : track_id_num = track_id . split ( "_" ) [ - 1 ] json_filename = full_json_path + "{0}_{1}_{2}_model_track_{3}.json" . format ( ens_name , run_date , ens_member , track_id_num ) json_file = open ( json_filename ) json_data = json . load ( json_file ) json_file . close ( ) for s , step in enumerate ( json_data [ "features" ] ) : step_id = track_id + "_{0:02d}" . format ( s ) for cond_model in condition_models_ns : pred_data . loc [ step_id , cond_model + "_Condition" ] = step [ "properties" ] [ "condition_" + cond_model ] for dist_model in dist_models_ns : pred_data . loc [ step_id , [ dist_model + "_" + p for p in gamma_params ] ] = step [ "properties" ] [ "dist_" + dist_model ] out_data = input_data . merge ( pred_data , left_index = True , right_index = True ) return out_data , ens_name , ens_member except Exception as e : print ( traceback . format_exc ( ) ) raise e
Reads forecasts from json files and merges them with the input data from the step csv files .
4,411
def mark_data_dirty ( self ) : self . __cache . set_cached_value_dirty ( self . __display_item , self . __cache_property_name ) self . __initialize_cache ( ) self . __cached_value_dirty = True
Called from item to indicate its data or metadata has changed .
4,412
def recompute_if_necessary ( self , ui ) : self . __initialize_cache ( ) if self . __cached_value_dirty : with self . __is_recomputing_lock : is_recomputing = self . __is_recomputing self . __is_recomputing = True if is_recomputing : pass else : def recompute ( ) : try : if self . __recompute_thread_cancel . wait ( 0.01 ) : return minimum_time = 0.5 current_time = time . time ( ) if current_time < self . __cached_value_time + minimum_time : if self . __recompute_thread_cancel . wait ( self . __cached_value_time + minimum_time - current_time ) : return self . recompute_data ( ui ) finally : self . __is_recomputing = False self . __recompute_thread = None with self . __is_recomputing_lock : self . __recompute_thread = threading . Thread ( target = recompute ) self . __recompute_thread . start ( )
Recompute the data on a thread if necessary .
4,413
def recompute_data ( self , ui ) : self . __initialize_cache ( ) with self . __recompute_lock : if self . __cached_value_dirty : try : calculated_data = self . get_calculated_data ( ui ) except Exception as e : import traceback traceback . print_exc ( ) traceback . print_stack ( ) raise self . __cache . set_cached_value ( self . __display_item , self . __cache_property_name , calculated_data ) self . __cached_value = calculated_data self . __cached_value_dirty = False self . __cached_value_time = time . time ( ) else : calculated_data = None if calculated_data is None : calculated_data = self . get_default_data ( ) if calculated_data is not None : self . __cache . set_cached_value ( self . __display_item , self . __cache_property_name , calculated_data ) self . __cached_value = calculated_data self . __cached_value_dirty = False self . __cached_value_time = time . time ( ) else : self . __cache . remove_cached_value ( self . __display_item , self . __cache_property_name ) self . __cached_value = None self . __cached_value_dirty = None self . __cached_value_time = 0 self . __recompute_lock . release ( ) if callable ( self . on_thumbnail_updated ) : self . on_thumbnail_updated ( ) self . __recompute_lock . acquire ( )
Compute the data associated with this processor .
4,414
def thumbnail_source_for_display_item ( self , ui , display_item : DisplayItem . DisplayItem ) -> ThumbnailSource : with self . __lock : thumbnail_source = self . __thumbnail_sources . get ( display_item ) if not thumbnail_source : thumbnail_source = ThumbnailSource ( ui , display_item ) self . __thumbnail_sources [ display_item ] = thumbnail_source def will_delete ( thumbnail_source ) : del self . __thumbnail_sources [ thumbnail_source . _display_item ] thumbnail_source . _on_will_delete = will_delete else : assert thumbnail_source . _ui == ui return thumbnail_source . add_ref ( )
Returned ThumbnailSource must be closed .
4,415
def getMyID ( self , gist_name ) : r = requests . get ( '%s' % BASE_URL + '/users/%s/gists' % self . user , headers = self . gist . header ) if ( r . status_code == 200 ) : r_text = json . loads ( r . text ) limit = len ( r . json ( ) ) for g , no in zip ( r_text , range ( 0 , limit ) ) : for ka , va in r . json ( ) [ no ] [ 'files' ] . iteritems ( ) : if str ( va [ 'filename' ] ) == str ( gist_name ) : return r . json ( ) [ no ] [ 'id' ] return 0 raise Exception ( 'Username not found' )
Getting gistID of a gist in order to make the workflow easy and uninterrupted .
4,416
def close ( self ) : assert self . __closed == False self . __closed = True self . finish_periodic ( ) for weak_dialog in self . __dialogs : dialog = weak_dialog ( ) if dialog : try : dialog . request_close ( ) except Exception as e : pass self . _file_menu = None self . _edit_menu = None self . _processing_menu = None self . _view_menu = None self . _window_menu = None self . _help_menu = None self . _library_menu = None self . _processing_arithmetic_menu = None self . _processing_reduce_menu = None self . _processing_transform_menu = None self . _processing_filter_menu = None self . _processing_fourier_menu = None self . _processing_graphics_menu = None self . _processing_sequence_menu = None self . _processing_redimension_menu = None self . _display_type_menu = None if self . __workspace_controller : self . __workspace_controller . close ( ) self . __workspace_controller = None self . __call_soon_event_listener . close ( ) self . __call_soon_event_listener = None self . __filtered_display_items_model . close ( ) self . __filtered_display_items_model = None self . filter_controller . close ( ) self . filter_controller = None self . __display_items_model . close ( ) self . __display_items_model = None self . document_model . remove_ref ( ) self . document_model = None self . did_close_event . fire ( self ) self . did_close_event = None super ( ) . close ( )
Close the document controller .
4,417
def add_periodic ( self , interval : float , listener_fn ) : class PeriodicListener : def __init__ ( self , interval : float , listener_fn ) : self . interval = interval self . __listener_fn = listener_fn if callable ( listener_fn ) : self . call = self . __listener_fn else : def void ( * args , ** kwargs ) : pass self . call = void self . next_scheduled_time = time . time ( ) + interval def close ( self ) : self . __listener_fn = None def void ( * args , ** kwargs ) : pass self . call = void listener = PeriodicListener ( interval , listener_fn ) def remove_listener ( weak_listener ) : with self . __weak_periodic_listeners_mutex : self . __weak_periodic_listeners . remove ( weak_listener ) weak_listener = weakref . ref ( listener , remove_listener ) with self . __weak_periodic_listeners_mutex : self . __weak_periodic_listeners . append ( weak_listener ) return listener
Add a listener function and return listener token . Token can be closed or deleted to unlisten .
4,418
def __update_display_items_model ( self , display_items_model : ListModel . FilteredListModel , data_group : typing . Optional [ DataGroup . DataGroup ] , filter_id : typing . Optional [ str ] ) -> None : with display_items_model . changes ( ) : if data_group is not None : display_items_model . container = data_group display_items_model . filter = ListModel . Filter ( True ) display_items_model . sort_key = None display_items_model . filter_id = None elif filter_id == "latest-session" : display_items_model . container = self . document_model display_items_model . filter = ListModel . EqFilter ( "session_id" , self . document_model . session_id ) display_items_model . sort_key = DataItem . sort_by_date_key display_items_model . sort_reverse = True display_items_model . filter_id = filter_id elif filter_id == "temporary" : display_items_model . container = self . document_model display_items_model . filter = ListModel . NotEqFilter ( "category" , "persistent" ) display_items_model . sort_key = DataItem . sort_by_date_key display_items_model . sort_reverse = True display_items_model . filter_id = filter_id elif filter_id == "none" : display_items_model . container = self . document_model display_items_model . filter = ListModel . Filter ( False ) display_items_model . sort_key = DataItem . sort_by_date_key display_items_model . sort_reverse = True display_items_model . filter_id = filter_id else : display_items_model . container = self . document_model display_items_model . filter = ListModel . EqFilter ( "category" , "persistent" ) display_items_model . sort_key = DataItem . sort_by_date_key display_items_model . sort_reverse = True display_items_model . filter_id = None
Update the data item model with a new container filter and sorting .
4,419
def focused_data_item ( self ) -> typing . Optional [ DataItem . DataItem ] : return self . __focused_display_item . data_item if self . __focused_display_item else None
Return the data item with keyboard focus .
4,420
def selected_display_item ( self ) -> typing . Optional [ DisplayItem . DisplayItem ] : display_item = self . focused_display_item if not display_item : selected_display_panel = self . selected_display_panel display_item = selected_display_panel . display_item if selected_display_panel else None return display_item
Return the selected display item .
4,421
def _get_two_data_sources ( self ) : selected_display_items = self . selected_display_items if len ( selected_display_items ) < 2 : selected_display_items = list ( ) display_item = self . selected_display_item if display_item : selected_display_items . append ( display_item ) if len ( selected_display_items ) == 1 : display_item = selected_display_items [ 0 ] data_item = display_item . data_item if display_item else None if display_item and len ( display_item . graphic_selection . indexes ) == 2 : index1 = display_item . graphic_selection . anchor_index index2 = list ( display_item . graphic_selection . indexes . difference ( { index1 } ) ) [ 0 ] graphic1 = display_item . graphics [ index1 ] graphic2 = display_item . graphics [ index2 ] if data_item : if data_item . is_datum_1d and isinstance ( graphic1 , Graphics . IntervalGraphic ) and isinstance ( graphic2 , Graphics . IntervalGraphic ) : crop_graphic1 = graphic1 crop_graphic2 = graphic2 elif data_item . is_datum_2d and isinstance ( graphic1 , Graphics . RectangleTypeGraphic ) and isinstance ( graphic2 , Graphics . RectangleTypeGraphic ) : crop_graphic1 = graphic1 crop_graphic2 = graphic2 else : crop_graphic1 = self . __get_crop_graphic ( display_item ) crop_graphic2 = crop_graphic1 else : crop_graphic1 = self . __get_crop_graphic ( display_item ) crop_graphic2 = crop_graphic1 else : crop_graphic1 = self . __get_crop_graphic ( display_item ) crop_graphic2 = crop_graphic1 return ( display_item , crop_graphic1 ) , ( display_item , crop_graphic2 ) if len ( selected_display_items ) == 2 : display_item1 = selected_display_items [ 0 ] crop_graphic1 = self . __get_crop_graphic ( display_item1 ) display_item2 = selected_display_items [ 1 ] crop_graphic2 = self . __get_crop_graphic ( display_item2 ) return ( display_item1 , crop_graphic1 ) , ( display_item2 , crop_graphic2 ) return None
Get two sensible data sources which may be the same .
4,422
def calculate_origin_and_size ( canvas_size , data_shape , image_canvas_mode , image_zoom , image_position ) -> typing . Tuple [ typing . Any , typing . Any ] : if data_shape is None : return None , None if image_canvas_mode == "fill" : data_shape = data_shape scale_h = float ( data_shape [ 1 ] ) / canvas_size [ 1 ] scale_v = float ( data_shape [ 0 ] ) / canvas_size [ 0 ] if scale_v < scale_h : image_canvas_size = ( canvas_size [ 0 ] , canvas_size [ 0 ] * data_shape [ 1 ] / data_shape [ 0 ] ) else : image_canvas_size = ( canvas_size [ 1 ] * data_shape [ 0 ] / data_shape [ 1 ] , canvas_size [ 1 ] ) image_canvas_origin = ( canvas_size [ 0 ] * 0.5 - image_canvas_size [ 0 ] * 0.5 , canvas_size [ 1 ] * 0.5 - image_canvas_size [ 1 ] * 0.5 ) elif image_canvas_mode == "fit" : image_canvas_size = canvas_size image_canvas_origin = ( 0 , 0 ) elif image_canvas_mode == "1:1" : image_canvas_size = data_shape image_canvas_origin = ( canvas_size [ 0 ] * 0.5 - image_canvas_size [ 0 ] * 0.5 , canvas_size [ 1 ] * 0.5 - image_canvas_size [ 1 ] * 0.5 ) elif image_canvas_mode == "2:1" : image_canvas_size = ( data_shape [ 0 ] * 0.5 , data_shape [ 1 ] * 0.5 ) image_canvas_origin = ( canvas_size [ 0 ] * 0.5 - image_canvas_size [ 0 ] * 0.5 , canvas_size [ 1 ] * 0.5 - image_canvas_size [ 1 ] * 0.5 ) else : image_canvas_size = ( canvas_size [ 0 ] * image_zoom , canvas_size [ 1 ] * image_zoom ) canvas_rect = Geometry . fit_to_size ( ( ( 0 , 0 ) , image_canvas_size ) , data_shape ) image_canvas_origin_y = ( canvas_size [ 0 ] * 0.5 ) - image_position [ 0 ] * canvas_rect [ 1 ] [ 0 ] - canvas_rect [ 0 ] [ 0 ] image_canvas_origin_x = ( canvas_size [ 1 ] * 0.5 ) - image_position [ 1 ] * canvas_rect [ 1 ] [ 1 ] - canvas_rect [ 0 ] [ 1 ] image_canvas_origin = ( image_canvas_origin_y , image_canvas_origin_x ) return image_canvas_origin , image_canvas_size
Calculate origin and size for canvas size data shape and image display parameters .
4,423
def auto_migrate_storage_system ( * , persistent_storage_system = None , new_persistent_storage_system = None , data_item_uuids = None , deletions : typing . List [ uuid . UUID ] = None , utilized_deletions : typing . Set [ uuid . UUID ] = None , ignore_older_files : bool = True ) : storage_handlers = persistent_storage_system . find_data_items ( ) ReaderInfo = collections . namedtuple ( "ReaderInfo" , [ "properties" , "changed_ref" , "large_format" , "storage_handler" , "identifier" ] ) reader_info_list = list ( ) for storage_handler in storage_handlers : try : large_format = isinstance ( storage_handler , HDF5Handler . HDF5Handler ) properties = Migration . transform_to_latest ( storage_handler . read_properties ( ) ) reader_info = ReaderInfo ( properties , [ False ] , large_format , storage_handler , storage_handler . reference ) reader_info_list . append ( reader_info ) except Exception as e : logging . debug ( "Error reading %s" , storage_handler . reference ) import traceback traceback . print_exc ( ) traceback . print_stack ( ) library_storage_properties = persistent_storage_system . library_storage_properties for deletion in copy . deepcopy ( library_storage_properties . get ( "data_item_deletions" , list ( ) ) ) : if not deletion in deletions : deletions . append ( deletion ) preliminary_library_updates = dict ( ) library_updates = dict ( ) if not ignore_older_files : Migration . migrate_to_latest ( reader_info_list , preliminary_library_updates ) good_reader_info_list = list ( ) count = len ( reader_info_list ) for index , reader_info in enumerate ( reader_info_list ) : storage_handler = reader_info . storage_handler properties = reader_info . properties try : version = properties . get ( "version" , 0 ) if version == DataItem . DataItem . writer_version : data_item_uuid = uuid . UUID ( properties [ "uuid" ] ) if not data_item_uuid in data_item_uuids : if str ( data_item_uuid ) in deletions : utilized_deletions . add ( data_item_uuid ) else : auto_migrate_data_item ( reader_info , persistent_storage_system , new_persistent_storage_system , index , count ) good_reader_info_list . append ( reader_info ) data_item_uuids . add ( data_item_uuid ) library_update = preliminary_library_updates . get ( data_item_uuid ) if library_update : library_updates [ data_item_uuid ] = library_update except Exception as e : logging . debug ( "Error reading %s" , storage_handler . reference ) import traceback traceback . print_exc ( ) traceback . print_stack ( ) return good_reader_info_list , library_updates
Migrate items from the storage system to the object context .
4,424
def from_pypirc ( pypi_repository ) : ret = { } pypirc_locations = PYPIRC_LOCATIONS for pypirc_path in pypirc_locations : pypirc_path = os . path . expanduser ( pypirc_path ) if os . path . isfile ( pypirc_path ) : parser = configparser . SafeConfigParser ( ) parser . read ( pypirc_path ) if 'distutils' not in parser . sections ( ) : continue if 'index-servers' not in parser . options ( 'distutils' ) : continue if pypi_repository not in parser . get ( 'distutils' , 'index-servers' ) : continue if pypi_repository in parser . sections ( ) : for option in parser . options ( pypi_repository ) : ret [ option ] = parser . get ( pypi_repository , option ) if not ret : raise ConfigError ( 'repository does not appear to be configured in pypirc ({})' . format ( pypi_repository ) + ', remember that it needs an entry in [distutils] and its own section' ) return ret
Load configuration from . pypirc file cached to only run once
4,425
def pypirc_temp ( index_url ) : pypirc_file = tempfile . NamedTemporaryFile ( suffix = '.pypirc' , delete = False ) print ( pypirc_file . name ) with open ( pypirc_file . name , 'w' ) as fh : fh . write ( PYPIRC_TEMPLATE . format ( index_name = PYPIRC_TEMP_INDEX_NAME , index_url = index_url ) ) return pypirc_file . name
Create a temporary pypirc file for interaction with twine
4,426
def get_api ( version : str , ui_version : str = None ) -> API_1 : ui_version = ui_version if ui_version else "~1.0" return _get_api_with_app ( version , ui_version , ApplicationModule . app )
Get a versioned interface matching the given version and ui_version .
4,427
def mask_xdata_with_shape ( self , shape : DataAndMetadata . ShapeType ) -> DataAndMetadata . DataAndMetadata : mask = self . _graphic . get_mask ( shape ) return DataAndMetadata . DataAndMetadata . from_data ( mask )
Return the mask created by this graphic as extended data .
4,428
def data ( self , data : numpy . ndarray ) -> None : self . __data_item . set_data ( numpy . copy ( data ) )
Set the data .
4,429
def display_xdata ( self ) -> DataAndMetadata . DataAndMetadata : display_data_channel = self . __display_item . display_data_channel return display_data_channel . get_calculated_display_values ( True ) . display_data_and_metadata
Return the extended data of this data item display .
4,430
def set_dimensional_calibrations ( self , dimensional_calibrations : typing . List [ CalibrationModule . Calibration ] ) -> None : self . __data_item . set_dimensional_calibrations ( dimensional_calibrations )
Set the dimensional calibrations .
4,431
def graphics ( self ) -> typing . List [ Graphic ] : return [ Graphic ( graphic ) for graphic in self . __display_item . graphics ]
Return the graphics attached to this data item .
4,432
def add_point_region ( self , y : float , x : float ) -> Graphic : graphic = Graphics . PointGraphic ( ) graphic . position = Geometry . FloatPoint ( y , x ) self . __display_item . add_graphic ( graphic ) return Graphic ( graphic )
Add a point graphic to the data item .
4,433
def mask_xdata ( self ) -> DataAndMetadata . DataAndMetadata : display_data_channel = self . __display_item . display_data_channel shape = display_data_channel . display_data_shape mask = numpy . zeros ( shape ) for graphic in self . __display_item . graphics : if isinstance ( graphic , ( Graphics . SpotGraphic , Graphics . WedgeGraphic , Graphics . RingGraphic , Graphics . LatticeGraphic ) ) : mask = numpy . logical_or ( mask , graphic . get_mask ( shape ) ) return DataAndMetadata . DataAndMetadata . from_data ( mask )
Return the mask by combining any mask graphics on this data item as extended data .
4,434
def data_item ( self ) -> DataItem : display_panel = self . __display_panel if not display_panel : return None data_item = display_panel . data_item return DataItem ( data_item ) if data_item else None
Return the data item associated with this display panel .
4,435
def set_data_item ( self , data_item : DataItem ) -> None : display_panel = self . __display_panel if display_panel : display_item = data_item . _data_item . container . get_display_item_for_data_item ( data_item . _data_item ) if data_item . _data_item . container else None display_panel . set_display_panel_display_item ( display_item )
Set the data item associated with this display panel .
4,436
def add_data_item ( self , data_item : DataItem ) -> None : display_item = data_item . _data_item . container . get_display_item_for_data_item ( data_item . _data_item ) if data_item . _data_item . container else None if display_item : self . __data_group . append_display_item ( display_item )
Add a data item to the group .
4,437
def close ( self ) -> None : self . __data_channel_buffer . stop ( ) self . __data_channel_buffer . close ( ) self . __data_channel_buffer = None if not self . __was_playing : self . __hardware_source . stop_playing ( )
Close the task .
4,438
def record ( self , frame_parameters : dict = None , channels_enabled : typing . List [ bool ] = None , timeout : float = None ) -> typing . List [ DataAndMetadata . DataAndMetadata ] : if frame_parameters : self . __hardware_source . set_record_frame_parameters ( self . __hardware_source . get_frame_parameters_from_dict ( frame_parameters ) ) if channels_enabled is not None : for channel_index , channel_enabled in enumerate ( channels_enabled ) : self . __hardware_source . set_channel_enabled ( channel_index , channel_enabled ) self . __hardware_source . start_recording ( ) return self . __hardware_source . get_next_xdatas_to_finish ( timeout )
Record data and return a list of data_and_metadata objects .
4,439
def create_record_task ( self , frame_parameters : dict = None , channels_enabled : typing . List [ bool ] = None ) -> RecordTask : return RecordTask ( self . __hardware_source , frame_parameters , channels_enabled )
Create a record task for this hardware source .
4,440
def grab_next_to_finish ( self , timeout : float = None ) -> typing . List [ DataAndMetadata . DataAndMetadata ] : self . start_playing ( ) return self . __hardware_source . get_next_xdatas_to_finish ( timeout )
Grabs the next frame to finish and returns it as data and metadata .
4,441
def set_control_output ( self , name : str , value : float , * , options : dict = None ) -> None : self . __instrument . set_control_output ( name , value , options )
Set the value of a control asynchronously .
4,442
def get_property_as_float ( self , name : str ) -> float : return float ( self . __instrument . get_property ( name ) )
Return the value of a float property .
4,443
def set_property_as_float ( self , name : str , value : float ) -> None : self . __instrument . set_property ( name , float ( value ) )
Set the value of a float property .
4,444
def data_items ( self ) -> typing . List [ DataItem ] : return [ DataItem ( data_item ) for data_item in self . __document_model . data_items ]
Return the list of data items .
4,445
def display_items ( self ) -> typing . List [ Display ] : return [ Display ( display_item ) for display_item in self . __document_model . display_items ]
Return the list of display items .
4,446
def get_source_data_items ( self , data_item : DataItem ) -> typing . List [ DataItem ] : return [ DataItem ( data_item ) for data_item in self . _document_model . get_source_data_items ( data_item . _data_item ) ] if data_item else None
Return the list of data items that are data sources for the data item .
4,447
def get_dependent_data_items ( self , data_item : DataItem ) -> typing . List [ DataItem ] : return [ DataItem ( data_item ) for data_item in self . _document_model . get_dependent_data_items ( data_item . _data_item ) ] if data_item else None
Return the dependent data items the data item argument .
4,448
def create_data_item ( self , title : str = None ) -> DataItem : data_item = DataItemModule . DataItem ( ) data_item . ensure_data_source ( ) if title is not None : data_item . title = title self . __document_model . append_data_item ( data_item ) return DataItem ( data_item )
Create an empty data item in the library .
4,449
def create_data_item_from_data ( self , data : numpy . ndarray , title : str = None ) -> DataItem : return self . create_data_item_from_data_and_metadata ( DataAndMetadata . DataAndMetadata . from_data ( data ) , title )
Create a data item in the library from an ndarray .
4,450
def create_data_item_from_data_and_metadata ( self , data_and_metadata : DataAndMetadata . DataAndMetadata , title : str = None ) -> DataItem : data_item = DataItemModule . new_data_item ( data_and_metadata ) if title is not None : data_item . title = title self . __document_model . append_data_item ( data_item ) return DataItem ( data_item )
Create a data item in the library from a data and metadata object .
4,451
def copy_data_item ( self , data_item : DataItem ) -> DataItem : data_item = copy . deepcopy ( data_item . _data_item ) self . __document_model . append_data_item ( data_item ) return DataItem ( data_item )
Copy a data item .
4,452
def snapshot_data_item ( self , data_item : DataItem ) -> DataItem : data_item = data_item . _data_item . snapshot ( ) self . __document_model . append_data_item ( data_item ) return DataItem ( data_item )
Snapshot a data item . Similar to copy but with a data snapshot .
4,453
def get_data_item_by_uuid ( self , data_item_uuid : uuid_module . UUID ) -> DataItem : data_item = self . _document_model . get_data_item_by_uuid ( data_item_uuid ) return DataItem ( data_item ) if data_item else None
Get the data item with the given UUID .
4,454
def get_graphic_by_uuid ( self , graphic_uuid : uuid_module . UUID ) -> Graphic : for display_item in self . _document_model . display_items : for graphic in display_item . graphics : if graphic . uuid == graphic_uuid : return Graphic ( graphic ) return None
Get the graphic with the given UUID .
4,455
def has_library_value ( self , key : str ) -> bool : desc = Metadata . session_key_map . get ( key ) if desc is not None : field_id = desc [ 'path' ] [ - 1 ] return bool ( getattr ( ApplicationData . get_session_metadata_model ( ) , field_id , None ) ) return False
Return whether the library value for the given key exists .
4,456
def get_library_value ( self , key : str ) -> typing . Any : desc = Metadata . session_key_map . get ( key ) if desc is not None : field_id = desc [ 'path' ] [ - 1 ] return getattr ( ApplicationData . get_session_metadata_model ( ) , field_id ) raise KeyError ( )
Get the library value for the given key .
4,457
def set_library_value ( self , key : str , value : typing . Any ) -> None : desc = Metadata . session_key_map . get ( key ) if desc is not None : field_id = desc [ 'path' ] [ - 1 ] setattr ( ApplicationData . get_session_metadata_model ( ) , field_id , value ) return raise KeyError ( )
Set the library value for the given key .
4,458
def delete_library_value ( self , key : str ) -> None : desc = Metadata . session_key_map . get ( key ) if desc is not None : field_id = desc [ 'path' ] [ - 1 ] setattr ( ApplicationData . get_session_metadata_model ( ) , field_id , None ) return raise KeyError ( )
Delete the library value for the given key .
4,459
def all_display_panels ( self ) -> typing . List [ DisplayPanel ] : return [ DisplayPanel ( display_panel ) for display_panel in self . __document_controller . workspace_controller . display_panels ]
Return the list of display panels currently visible .
4,460
def get_display_panel_by_id ( self , identifier : str ) -> DisplayPanel : display_panel = next ( ( display_panel for display_panel in self . __document_controller . workspace_controller . display_panels if display_panel . identifier . lower ( ) == identifier . lower ( ) ) , None ) return DisplayPanel ( display_panel ) if display_panel else None
Return display panel with the identifier .
4,461
def display_data_item ( self , data_item : DataItem , source_display_panel = None , source_data_item = None ) : for display_panel in self . __document_controller . workspace_controller . display_panels : if display_panel . data_item == data_item . _data_item : display_panel . request_focus ( ) return DisplayPanel ( display_panel ) result_display_panel = self . __document_controller . next_result_display_panel ( ) if result_display_panel : display_item = self . __document_controller . document_model . get_display_item_for_data_item ( data_item . _data_item ) result_display_panel . set_display_panel_display_item ( display_item ) result_display_panel . request_focus ( ) return DisplayPanel ( result_display_panel ) return None
Display a new data item and gives it keyboard focus . Uses existing display if it is already displayed .
4,462
def create_data_item_from_data ( self , data : numpy . ndarray , title : str = None ) -> DataItem : return DataItem ( self . __document_controller . add_data ( data , title ) )
Create a data item in the library from data .
4,463
def create_data_item_from_data_and_metadata ( self , data_and_metadata : DataAndMetadata . DataAndMetadata , title : str = None ) -> DataItem : data_item = DataItemModule . new_data_item ( data_and_metadata ) if title is not None : data_item . title = title self . __document_controller . document_model . append_data_item ( data_item ) return DataItem ( data_item )
Create a data item in the library from the data and metadata .
4,464
def document_windows ( self ) -> typing . List [ DocumentWindow ] : return [ DocumentWindow ( document_controller ) for document_controller in self . __application . document_controllers ]
Return the document windows .
4,465
def create_panel ( self , panel_delegate ) : panel_id = panel_delegate . panel_id panel_name = panel_delegate . panel_name panel_positions = getattr ( panel_delegate , "panel_positions" , [ "left" , "right" ] ) panel_position = getattr ( panel_delegate , "panel_position" , "none" ) properties = getattr ( panel_delegate , "panel_properties" , None ) workspace_manager = Workspace . WorkspaceManager ( ) def create_facade_panel ( document_controller , panel_id , properties ) : panel = Panel ( document_controller , panel_id , properties ) ui = UserInterface ( self . __ui_version , document_controller . ui ) document_controller = DocumentWindow ( document_controller ) panel . widget = panel_delegate . create_panel_widget ( ui , document_controller ) . _widget return panel class PanelReference : def __init__ ( self ) : self . __panel_delegate = panel_delegate workspace_manager . register_panel ( create_facade_panel , panel_id , panel_name , panel_positions , panel_position , properties ) def __del__ ( self ) : self . close ( ) def close ( self ) : if self . __panel_delegate : panel_delegate_close_fn = getattr ( self . __panel_delegate , "close" , None ) if panel_delegate_close_fn : panel_delegate_close_fn ( ) workspace_manager . unregister_panel ( panel_id ) self . __panel_delegate = None return PanelReference ( )
Create a utility panel that can be attached to a window .
4,466
def get_hardware_source_by_id ( self , hardware_source_id : str , version : str ) : actual_version = "1.0.0" if Utility . compare_versions ( version , actual_version ) > 0 : raise NotImplementedError ( "Hardware API requested version %s is greater than %s." % ( version , actual_version ) ) hardware_source = HardwareSourceModule . HardwareSourceManager ( ) . get_hardware_source_for_hardware_source_id ( hardware_source_id ) return HardwareSource ( hardware_source ) if hardware_source else None
Return the hardware source API matching the hardware_source_id and version .
4,467
def library ( self ) -> Library : assert self . __app . document_model return Library ( self . __app . document_model )
Return the library object .
4,468
def pad_matrix ( self , matrix , pad_value = 0 ) : max_columns = 0 total_rows = len ( matrix ) for row in matrix : max_columns = max ( max_columns , len ( row ) ) total_rows = max ( max_columns , total_rows ) new_matrix = [ ] for row in matrix : row_len = len ( row ) new_row = row [ : ] if total_rows > row_len : new_row += [ pad_value ] * ( total_rows - row_len ) new_matrix += [ new_row ] while len ( new_matrix ) < total_rows : new_matrix += [ [ pad_value ] * total_rows ] return new_matrix
Pad a possibly non - square matrix to make it square .
4,469
def __step1 ( self ) : C = self . C n = self . n for i in range ( n ) : minval = min ( self . C [ i ] ) for j in range ( n ) : self . C [ i ] [ j ] -= minval return 2
For each row of the matrix find the smallest element and subtract it from every element in its row . Go to Step 2 .
4,470
def __step3 ( self ) : n = self . n count = 0 for i in range ( n ) : for j in range ( n ) : if self . marked [ i ] [ j ] == 1 : self . col_covered [ j ] = True count += 1 if count >= n : step = 7 else : step = 4 return step
Cover each column containing a starred zero . If K columns are covered the starred zeros describe a complete set of unique assignments . In this case Go to DONE otherwise Go to Step 4 .
4,471
def __step4 ( self ) : step = 0 done = False row = - 1 col = - 1 star_col = - 1 while not done : ( row , col ) = self . __find_a_zero ( ) if row < 0 : done = True step = 6 else : self . marked [ row ] [ col ] = 2 star_col = self . __find_star_in_row ( row ) if star_col >= 0 : col = star_col self . row_covered [ row ] = True self . col_covered [ col ] = False else : done = True self . Z0_r = row self . Z0_c = col step = 5 return step
Find a noncovered zero and prime it . If there is no starred zero in the row containing this primed zero Go to Step 5 . Otherwise cover this row and uncover the column containing the starred zero . Continue in this manner until there are no uncovered zeros left . Save the smallest uncovered value and Go to Step 6 .
4,472
def __step6 ( self ) : minval = self . __find_smallest ( ) for i in range ( self . n ) : for j in range ( self . n ) : if self . row_covered [ i ] : self . C [ i ] [ j ] += minval if not self . col_covered [ j ] : self . C [ i ] [ j ] -= minval return 4
Add the value found in Step 4 to every element of each covered row and subtract it from every element of each uncovered column . Return to Step 4 without altering any stars primes or covered lines .
4,473
def __find_smallest ( self ) : minval = sys . maxsize for i in range ( self . n ) : for j in range ( self . n ) : if ( not self . row_covered [ i ] ) and ( not self . col_covered [ j ] ) : if minval > self . C [ i ] [ j ] : minval = self . C [ i ] [ j ] return minval
Find the smallest uncovered value in the matrix .
4,474
def __find_a_zero ( self ) : row = - 1 col = - 1 i = 0 n = self . n done = False while not done : j = 0 while True : if ( self . C [ i ] [ j ] == 0 ) and ( not self . row_covered [ i ] ) and ( not self . col_covered [ j ] ) : row = i col = j done = True j += 1 if j >= n : break i += 1 if i >= n : done = True return ( row , col )
Find the first uncovered element with value 0
4,475
def __find_star_in_row ( self , row ) : col = - 1 for j in range ( self . n ) : if self . marked [ row ] [ j ] == 1 : col = j break return col
Find the first starred element in the specified row . Returns the column index or - 1 if no starred element was found .
4,476
def __find_star_in_col ( self , col ) : row = - 1 for i in range ( self . n ) : if self . marked [ i ] [ col ] == 1 : row = i break return row
Find the first starred element in the specified row . Returns the row index or - 1 if no starred element was found .
4,477
def __find_prime_in_row ( self , row ) : col = - 1 for j in range ( self . n ) : if self . marked [ row ] [ j ] == 2 : col = j break return col
Find the first prime element in the specified row . Returns the column index or - 1 if no starred element was found .
4,478
def __clear_covers ( self ) : for i in range ( self . n ) : self . row_covered [ i ] = False self . col_covered [ i ] = False
Clear all covered matrix cells
4,479
def __erase_primes ( self ) : for i in range ( self . n ) : for j in range ( self . n ) : if self . marked [ i ] [ j ] == 2 : self . marked [ i ] [ j ] = 0
Erase all prime markings
4,480
def update ( self , a , b , c , d ) : self . table . ravel ( ) [ : ] = [ a , b , c , d ] self . N = self . table . sum ( )
Update contingency table with new values without creating a new object .
4,481
def output_tree_ensemble ( tree_ensemble_obj , output_filename , attribute_names = None ) : for t , tree in enumerate ( tree_ensemble_obj . estimators_ ) : print ( "Writing Tree {0:d}" . format ( t ) ) out_file = open ( output_filename + ".{0:d}.tree" , "w" ) tree_str = print_tree_recursive ( tree . tree_ , 0 , attribute_names ) out_file . write ( tree_str ) out_file . close ( ) return
Write each decision tree in an ensemble to a file .
4,482
def print_tree_recursive ( tree_obj , node_index , attribute_names = None ) : tree_str = "" if node_index == 0 : tree_str += "{0:d}\n" . format ( tree_obj . node_count ) if tree_obj . feature [ node_index ] >= 0 : if attribute_names is None : attr_val = "{0:d}" . format ( tree_obj . feature [ node_index ] ) else : attr_val = attribute_names [ tree_obj . feature [ node_index ] ] tree_str += "b {0:d} {1} {2:0.4f} {3:d} {4:1.5e}\n" . format ( node_index , attr_val , tree_obj . weighted_n_node_samples [ node_index ] , tree_obj . n_node_samples [ node_index ] , tree_obj . threshold [ node_index ] ) else : if tree_obj . max_n_classes > 1 : leaf_value = "{0:d}" . format ( tree_obj . value [ node_index ] . argmax ( ) ) else : leaf_value = "{0}" . format ( tree_obj . value [ node_index ] [ 0 ] [ 0 ] ) tree_str += "l {0:d} {1} {2:0.4f} {3:d}\n" . format ( node_index , leaf_value , tree_obj . weighted_n_node_samples [ node_index ] , tree_obj . n_node_samples [ node_index ] ) if tree_obj . children_left [ node_index ] > 0 : tree_str += print_tree_recursive ( tree_obj , tree_obj . children_left [ node_index ] , attribute_names ) if tree_obj . children_right [ node_index ] > 0 : tree_str += print_tree_recursive ( tree_obj , tree_obj . children_right [ node_index ] , attribute_names ) return tree_str
Recursively writes a string representation of a decision tree object .
4,483
def fitness_vs ( self , v ) : base = self . _base if base . _classifier : if base . _multiple_outputs : v . fitness_vs = v . _error else : v . fitness_vs = - ( ( base . y - v . hy . sign ( ) ) . sign ( ) . fabs ( ) * base . _mask_vs ) . sum ( ) else : mask = base . _mask y = base . y hy = v . hy if not isinstance ( mask , list ) : mask = [ mask ] y = [ y ] hy = [ hy ] fit = [ ] for _mask , _y , _hy in zip ( mask , y , hy ) : m = ( _mask + - 1 ) . fabs ( ) x = _y * m y = _hy * m a = ( x - y ) . sq ( ) . sum ( ) b = ( x + - x . sum ( ) / x . size ( ) ) . sq ( ) . sum ( ) fit . append ( - a / b ) v . fitness_vs = np . mean ( fit )
Fitness function in the validation set In classification it uses BER and RSE in regression
4,484
def set_fitness ( self , v ) : base = self . _base self . fitness ( v ) if not np . isfinite ( v . fitness ) : self . del_error ( v ) return False if base . _tr_fraction < 1 : self . fitness_vs ( v ) if not np . isfinite ( v . fitness_vs ) : self . del_error ( v ) return False self . del_error ( v ) return True
Set the fitness to a new node . Returns false in case fitness is not finite
4,485
def output_sector_csv ( self , csv_path , file_dict_key , out_path ) : csv_file = csv_path + "{0}_{1}_{2}_{3}.csv" . format ( file_dict_key , self . ensemble_name , self . member , self . run_date . strftime ( self . date_format ) ) if exists ( csv_file ) : csv_data = pd . read_csv ( csv_file ) if self . inds is None : lon_obj = csv_data . loc [ : , "Centroid_Lon" ] lat_obj = csv_data . loc [ : , "Centroid_Lat" ] self . inds = np . where ( ( self . ne_lat >= lat_obj ) & ( self . sw_lat <= lat_obj ) & ( self . ne_lon >= lon_obj ) & ( self . sw_lon <= lon_obj ) ) [ 0 ] if np . shape ( self . inds ) [ 0 ] > 0 : csv_data = csv_data . reindex ( np . array ( self . inds ) ) sector_csv_filename = out_path + "{0}_{1}_{2}_{3}.csv" . format ( file_dict_key , self . ensemble_name , self . member , self . run_date . strftime ( self . date_format ) ) print ( "Output sector csv file " + sector_csv_filename ) csv_data . to_csv ( sector_csv_filename , na_rep = "nan" , float_format = "%0.5f" , index = False ) os . chmod ( sector_csv_filename , 0o666 ) else : print ( 'No {0} {1} sector data found' . format ( self . member , self . run_date . strftime ( "%Y%m%d" ) ) ) else : print ( 'No {0} {1} csv file found' . format ( self . member , self . run_date . strftime ( "%Y%m%d" ) ) ) return
Segment forecast tracks to only output data contined within a region in the CONUS as defined by the mapfile .
4,486
def clean_dict ( d0 , clean_item_fn = None ) : clean_item_fn = clean_item_fn if clean_item_fn else clean_item d = dict ( ) for key in d0 : cleaned_item = clean_item_fn ( d0 [ key ] ) if cleaned_item is not None : d [ key ] = cleaned_item return d
Return a json - clean dict . Will log info message for failures .
4,487
def clean_list ( l0 , clean_item_fn = None ) : clean_item_fn = clean_item_fn if clean_item_fn else clean_item l = list ( ) for index , item in enumerate ( l0 ) : cleaned_item = clean_item_fn ( item ) l . append ( cleaned_item ) return l
Return a json - clean list . Will log info message for failures .
4,488
def clean_tuple ( t0 , clean_item_fn = None ) : clean_item_fn = clean_item_fn if clean_item_fn else clean_item l = list ( ) for index , item in enumerate ( t0 ) : cleaned_item = clean_item_fn ( item ) l . append ( cleaned_item ) return tuple ( l )
Return a json - clean tuple . Will log info message for failures .
4,489
def sample_stack_all ( count = 10 , interval = 0.1 ) : def print_stack_all ( l , ll ) : l1 = list ( ) l1 . append ( "*** STACKTRACE - START ***" ) code = [ ] for threadId , stack in sys . _current_frames ( ) . items ( ) : sub_code = [ ] sub_code . append ( "# ThreadID: %s" % threadId ) for filename , lineno , name , line in traceback . extract_stack ( stack ) : sub_code . append ( 'File: "%s", line %d, in %s' % ( filename , lineno , name ) ) if line : sub_code . append ( " %s" % ( line . strip ( ) ) ) if not "in select" in sub_code [ - 2 ] and not "in wait" in sub_code [ - 2 ] and not "in print_stack_all" in sub_code [ - 2 ] and not "in sample_stack_all" in sub_code [ - 2 ] and not "in checkcache" in sub_code [ - 2 ] and not "do_sleep" in sub_code [ - 2 ] and not "sleep" in sub_code [ - 1 ] and not any ( [ "in do_sample" in s for s in sub_code ] ) : code . extend ( sub_code ) for line in code : l1 . append ( line ) l1 . append ( "*** STACKTRACE - END ***" ) with l : ll . extend ( l1 ) def do_sample ( ) : l = threading . RLock ( ) ll = list ( ) for i in range ( count ) : print_stack_all ( l , ll ) time . sleep ( interval ) with l : print ( "\n" . join ( ll ) ) threading . Thread ( target = do_sample ) . start ( )
Sample the stack in a thread and print it at regular intervals .
4,490
def _eval ( self ) : "Evaluates a individual using recursion and self._pos as pointer" pos = self . _pos self . _pos += 1 node = self . _ind [ pos ] if isinstance ( node , Function ) : args = [ self . _eval ( ) for x in range ( node . nargs ) ] node . eval ( args ) for x in args : x . hy = None x . hy_test = None else : node . eval ( self . _X ) return node
Evaluates a individual using recursion and self . _pos as pointer
4,491
def create_random_ind_full ( self , depth = 0 ) : "Random individual using full method" lst = [ ] self . _create_random_ind_full ( depth = depth , output = lst ) return lst
Random individual using full method
4,492
def grow_use_function ( self , depth = 0 ) : "Select either function or terminal in grow method" if depth == 0 : return False if depth == self . _depth : return True return np . random . random ( ) < 0.5
Select either function or terminal in grow method
4,493
def create_random_ind_grow ( self , depth = 0 ) : "Random individual using grow method" lst = [ ] self . _depth = depth self . _create_random_ind_grow ( depth = depth , output = lst ) return lst
Random individual using grow method
4,494
def create_population ( self , popsize = 1000 , min_depth = 2 , max_depth = 4 , X = None ) : "Creates random population using ramped half-and-half method" import itertools args = [ x for x in itertools . product ( range ( min_depth , max_depth + 1 ) , [ True , False ] ) ] index = 0 output = [ ] while len ( output ) < popsize : depth , full = args [ index ] index += 1 if index >= len ( args ) : index = 0 if full : ind = self . create_random_ind_full ( depth = depth ) else : ind = self . create_random_ind_grow ( depth = depth ) flag = True if X is not None : x = Individual ( ind ) x . decision_function ( X ) flag = x . individual [ 0 ] . isfinite ( ) l_vars = ( flag , len ( output ) , full , depth , len ( ind ) ) l_str = " flag: %s len(output): %s full: %s depth: %s len(ind): %s" self . _logger . debug ( l_str % l_vars ) if flag : output . append ( ind ) return output
Creates random population using ramped half - and - half method
4,495
def fitness_vs ( self ) : "Median Fitness in the validation set" l = [ x . fitness_vs for x in self . models ] return np . median ( l )
Median Fitness in the validation set
4,496
def graphviz ( self , directory , ** kwargs ) : "Directory to store the graphviz models" import os if not os . path . isdir ( directory ) : os . mkdir ( directory ) output = os . path . join ( directory , 'evodag-%s' ) for k , m in enumerate ( self . models ) : m . graphviz ( output % k , ** kwargs )
Directory to store the graphviz models
4,497
def neighborhood_probability ( self , threshold , radius ) : weights = disk ( radius , dtype = np . uint8 ) thresh_data = np . zeros ( self . data . shape [ 1 : ] , dtype = np . uint8 ) neighbor_prob = np . zeros ( self . data . shape , dtype = np . float32 ) for t in np . arange ( self . data . shape [ 0 ] ) : thresh_data [ self . data [ t ] >= threshold ] = 1 maximized = fftconvolve ( thresh_data , weights , mode = "same" ) maximized [ maximized > 1 ] = 1 maximized [ maximized < 1 ] = 0 neighbor_prob [ t ] = fftconvolve ( maximized , weights , mode = "same" ) thresh_data [ : ] = 0 neighbor_prob [ neighbor_prob < 1 ] = 0 neighbor_prob /= weights . sum ( ) return neighbor_prob
Calculate a probability based on the number of grid points in an area that exceed a threshold .
4,498
def load_data ( self ) : for m , member in enumerate ( self . members ) : mo = ModelOutput ( self . ensemble_name , member , self . run_date , self . variable , self . start_date , self . end_date , self . path , self . map_file , self . single_step ) mo . load_data ( ) if self . data is None : self . data = np . zeros ( ( len ( self . members ) , mo . data . shape [ 0 ] , mo . data . shape [ 1 ] , mo . data . shape [ 2 ] ) , dtype = np . float32 ) if mo . units == "m" : self . data [ m ] = mo . data * 1000 self . units = "mm" else : self . data [ m ] = mo . data if self . units == "" : self . units = mo . units del mo . data del mo
Loads data from each ensemble member .
4,499
def point_consensus ( self , consensus_type ) : if "mean" in consensus_type : consensus_data = np . mean ( self . data , axis = 0 ) elif "std" in consensus_type : consensus_data = np . std ( self . data , axis = 0 ) elif "median" in consensus_type : consensus_data = np . median ( self . data , axis = 0 ) elif "max" in consensus_type : consensus_data = np . max ( self . data , axis = 0 ) elif "percentile" in consensus_type : percentile = int ( consensus_type . split ( "_" ) [ 1 ] ) consensus_data = np . percentile ( self . data , percentile , axis = 0 ) else : consensus_data = np . zeros ( self . data . shape [ 1 : ] ) consensus = EnsembleConsensus ( consensus_data , consensus_type , self . ensemble_name , self . run_date , self . variable , self . start_date , self . end_date , self . units ) return consensus
Calculate grid - point statistics across ensemble members .