idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
4,500
def point_probability ( self , threshold ) : point_prob = np . zeros ( self . data . shape [ 1 : ] ) for t in range ( self . data . shape [ 1 ] ) : point_prob [ t ] = np . where ( self . data [ : , t ] >= threshold , 1.0 , 0.0 ) . mean ( axis = 0 ) return EnsembleConsensus ( point_prob , "point_probability" , self . ensemble_name , self . run_date , self . variable + "_{0:0.2f}_{1}" . format ( threshold , self . units . replace ( " " , "_" ) ) , self . start_date , self . end_date , "" )
Determine the probability of exceeding a threshold at a grid point based on the ensemble forecasts at that point .
4,501
def neighborhood_probability ( self , threshold , radius , sigmas = None ) : if sigmas is None : sigmas = [ 0 ] weights = disk ( radius ) filtered_prob = [ ] for sigma in sigmas : filtered_prob . append ( EnsembleConsensus ( np . zeros ( self . data . shape [ 1 : ] , dtype = np . float32 ) , "neighbor_prob_r_{0:d}_s_{1:d}" . format ( radius , sigma ) , self . ensemble_name , self . run_date , self . variable + "_{0:0.2f}" . format ( threshold ) , self . start_date , self . end_date , "" ) ) thresh_data = np . zeros ( self . data . shape [ 2 : ] , dtype = np . uint8 ) neighbor_prob = np . zeros ( self . data . shape [ 2 : ] , dtype = np . float32 ) for t in range ( self . data . shape [ 1 ] ) : for m in range ( self . data . shape [ 0 ] ) : thresh_data [ self . data [ m , t ] >= threshold ] = 1 maximized = fftconvolve ( thresh_data , weights , mode = "same" ) maximized [ maximized > 1 ] = 1 maximized [ maximized < 1 ] = 0 neighbor_prob += fftconvolve ( maximized , weights , mode = "same" ) neighbor_prob [ neighbor_prob < 1 ] = 0 thresh_data [ : ] = 0 neighbor_prob /= ( self . data . shape [ 0 ] * float ( weights . sum ( ) ) ) for s , sigma in enumerate ( sigmas ) : if sigma > 0 : filtered_prob [ s ] . data [ t ] = gaussian_filter ( neighbor_prob , sigma = sigma ) else : filtered_prob [ s ] . data [ t ] = neighbor_prob neighbor_prob [ : ] = 0 return filtered_prob
Hourly probability of exceeding a threshold based on model values within a specified radius of a point .
4,502
def period_max_neighborhood_probability ( self , threshold , radius , sigmas = None ) : if sigmas is None : sigmas = [ 0 ] weights = disk ( radius ) neighborhood_prob = np . zeros ( self . data . shape [ 2 : ] , dtype = np . float32 ) thresh_data = np . zeros ( self . data . shape [ 2 : ] , dtype = np . uint8 ) for m in range ( self . data . shape [ 0 ] ) : thresh_data [ self . data [ m ] . max ( axis = 0 ) >= threshold ] = 1 maximized = fftconvolve ( thresh_data , weights , mode = "same" ) maximized [ maximized > 1 ] = 1 neighborhood_prob += fftconvolve ( maximized , weights , mode = "same" ) neighborhood_prob [ neighborhood_prob < 1 ] = 0 neighborhood_prob /= ( self . data . shape [ 0 ] * float ( weights . sum ( ) ) ) consensus_probs = [ ] for sigma in sigmas : if sigma > 0 : filtered_prob = gaussian_filter ( neighborhood_prob , sigma = sigma ) else : filtered_prob = neighborhood_prob ec = EnsembleConsensus ( filtered_prob , "neighbor_prob_{0:02d}-hour_r_{1:d}_s_{2:d}" . format ( self . data . shape [ 1 ] , radius , sigma ) , self . ensemble_name , self . run_date , self . variable + "_{0:0.2f}" . format ( float ( threshold ) ) , self . start_date , self . end_date , "" ) consensus_probs . append ( ec ) return consensus_probs
Calculates the neighborhood probability of exceeding a threshold at any time over the period loaded .
4,503
def init_file ( self , filename , time_units = "seconds since 1970-01-01T00:00" ) : if os . access ( filename , os . R_OK ) : out_data = Dataset ( filename , "r+" ) else : out_data = Dataset ( filename , "w" ) if len ( self . data . shape ) == 2 : for d , dim in enumerate ( [ "y" , "x" ] ) : out_data . createDimension ( dim , self . data . shape [ d ] ) else : for d , dim in enumerate ( [ "y" , "x" ] ) : out_data . createDimension ( dim , self . data . shape [ d + 1 ] ) out_data . createDimension ( "time" , len ( self . times ) ) time_var = out_data . createVariable ( "time" , "i8" , ( "time" , ) ) time_var [ : ] = date2num ( self . times . to_pydatetime ( ) , time_units ) time_var . units = time_units out_data . Conventions = "CF-1.6" return out_data
Initializes netCDF file for writing
4,504
def write_to_file ( self , out_data ) : full_var_name = self . consensus_type + "_" + self . variable if "-hour" in self . consensus_type : if full_var_name not in out_data . variables . keys ( ) : var = out_data . createVariable ( full_var_name , "f4" , ( "y" , "x" ) , zlib = True , least_significant_digit = 3 , shuffle = True ) else : var = out_data . variables [ full_var_name ] var . coordinates = "y x" else : if full_var_name not in out_data . variables . keys ( ) : var = out_data . createVariable ( full_var_name , "f4" , ( "time" , "y" , "x" ) , zlib = True , least_significant_digit = 3 , shuffle = True ) else : var = out_data . variables [ full_var_name ] var . coordinates = "time y x" var [ : ] = self . data var . units = self . units var . long_name = self . consensus_type + "_" + self . variable return
Outputs data to a netCDF file . If the file does not exist it will be created . Otherwise additional variables are appended to the current file
4,505
def restore ( self , workspace_uuid ) : workspace = next ( ( workspace for workspace in self . document_model . workspaces if workspace . uuid == workspace_uuid ) , None ) if workspace is None : workspace = self . new_workspace ( ) self . _change_workspace ( workspace )
Restore the workspace to the given workspace_uuid .
4,506
def new_workspace ( self , name = None , layout = None , workspace_id = None , index = None ) -> WorkspaceLayout . WorkspaceLayout : workspace = WorkspaceLayout . WorkspaceLayout ( ) self . document_model . insert_workspace ( index if index is not None else len ( self . document_model . workspaces ) , workspace ) d = create_image_desc ( ) d [ "selected" ] = True workspace . layout = layout if layout is not None else d workspace . name = name if name is not None else _ ( "Workspace" ) if workspace_id : workspace . workspace_id = workspace_id return workspace
Create a new workspace insert into document_model and return it .
4,507
def ensure_workspace ( self , name , layout , workspace_id ) : workspace = next ( ( workspace for workspace in self . document_model . workspaces if workspace . workspace_id == workspace_id ) , None ) if not workspace : workspace = self . new_workspace ( name = name , layout = layout , workspace_id = workspace_id ) self . _change_workspace ( workspace )
Looks for a workspace with workspace_id .
4,508
def create_workspace ( self ) -> None : def create_clicked ( text ) : if text : command = Workspace . CreateWorkspaceCommand ( self , text ) command . perform ( ) self . document_controller . push_undo_command ( command ) self . pose_get_string_message_box ( caption = _ ( "Enter a name for the workspace" ) , text = _ ( "Workspace" ) , accepted_fn = create_clicked , accepted_text = _ ( "Create" ) , message_box_id = "create_workspace" )
Pose a dialog to name and create a workspace .
4,509
def rename_workspace ( self ) -> None : def rename_clicked ( text ) : if len ( text ) > 0 : command = Workspace . RenameWorkspaceCommand ( self , text ) command . perform ( ) self . document_controller . push_undo_command ( command ) self . pose_get_string_message_box ( caption = _ ( "Enter new name for workspace" ) , text = self . __workspace . name , accepted_fn = rename_clicked , accepted_text = _ ( "Rename" ) , message_box_id = "rename_workspace" )
Pose a dialog to rename the workspace .
4,510
def remove_workspace ( self ) : def confirm_clicked ( ) : if len ( self . document_model . workspaces ) > 1 : command = Workspace . RemoveWorkspaceCommand ( self ) command . perform ( ) self . document_controller . push_undo_command ( command ) caption = _ ( "Remove workspace named '{0}'?" ) . format ( self . __workspace . name ) self . pose_confirmation_message_box ( caption , confirm_clicked , accepted_text = _ ( "Remove Workspace" ) , message_box_id = "remove_workspace" )
Pose a dialog to confirm removal then remove workspace .
4,511
def clone_workspace ( self ) -> None : def clone_clicked ( text ) : if text : command = Workspace . CloneWorkspaceCommand ( self , text ) command . perform ( ) self . document_controller . push_undo_command ( command ) self . pose_get_string_message_box ( caption = _ ( "Enter a name for the workspace" ) , text = self . __workspace . name , accepted_fn = clone_clicked , accepted_text = _ ( "Clone" ) , message_box_id = "clone_workspace" )
Pose a dialog to name and clone a workspace .
4,512
def bootstrap ( score_objs , n_boot = 1000 ) : all_samples = np . random . choice ( score_objs , size = ( n_boot , len ( score_objs ) ) , replace = True ) return all_samples . sum ( axis = 1 )
Given a set of DistributedROC or DistributedReliability objects this function performs a bootstrap resampling of the objects and returns n_boot aggregations of them .
4,513
def update ( self , forecasts , observations ) : for t , threshold in enumerate ( self . thresholds ) : tp = np . count_nonzero ( ( forecasts >= threshold ) & ( observations >= self . obs_threshold ) ) fp = np . count_nonzero ( ( forecasts >= threshold ) & ( observations < self . obs_threshold ) ) fn = np . count_nonzero ( ( forecasts < threshold ) & ( observations >= self . obs_threshold ) ) tn = np . count_nonzero ( ( forecasts < threshold ) & ( observations < self . obs_threshold ) ) self . contingency_tables . iloc [ t ] += [ tp , fp , fn , tn ]
Update the ROC curve with a set of forecasts and observations
4,514
def merge ( self , other_roc ) : if other_roc . thresholds . size == self . thresholds . size and np . all ( other_roc . thresholds == self . thresholds ) : self . contingency_tables += other_roc . contingency_tables else : print ( "Input table thresholds do not match." )
Ingest the values of another DistributedROC object into this one and update the statistics inplace .
4,515
def performance_curve ( self ) : pod = self . contingency_tables [ "TP" ] / ( self . contingency_tables [ "TP" ] + self . contingency_tables [ "FN" ] ) far = self . contingency_tables [ "FP" ] / ( self . contingency_tables [ "FP" ] + self . contingency_tables [ "TP" ] ) far [ ( self . contingency_tables [ "FP" ] + self . contingency_tables [ "TP" ] ) == 0 ] = np . nan return pd . DataFrame ( { "POD" : pod , "FAR" : far , "Thresholds" : self . thresholds } , columns = [ "POD" , "FAR" , "Thresholds" ] )
Calculate the Probability of Detection and False Alarm Ratio in order to output a performance diagram .
4,516
def max_csi ( self ) : csi = self . contingency_tables [ "TP" ] / ( self . contingency_tables [ "TP" ] + self . contingency_tables [ "FN" ] + self . contingency_tables [ "FP" ] ) return csi . max ( )
Calculate the maximum Critical Success Index across all probability thresholds
4,517
def get_contingency_tables ( self ) : return np . array ( [ ContingencyTable ( * ct ) for ct in self . contingency_tables . values ] )
Create an Array of ContingencyTable objects for each probability threshold .
4,518
def from_str ( self , in_str ) : parts = in_str . split ( ";" ) for part in parts : var_name , value = part . split ( ":" ) if var_name == "Obs_Threshold" : self . obs_threshold = float ( value ) elif var_name == "Thresholds" : self . thresholds = np . array ( value . split ( ) , dtype = float ) self . contingency_tables = pd . DataFrame ( columns = self . contingency_tables . columns , data = np . zeros ( ( self . thresholds . size , self . contingency_tables . columns . size ) ) ) elif var_name in self . contingency_tables . columns : self . contingency_tables [ var_name ] = np . array ( value . split ( ) , dtype = int )
Read the DistributedROC string and parse the contingency table values from it .
4,519
def update ( self , forecasts , observations ) : for t , threshold in enumerate ( self . thresholds [ : - 1 ] ) : self . frequencies . loc [ t , "Positive_Freq" ] += np . count_nonzero ( ( threshold <= forecasts ) & ( forecasts < self . thresholds [ t + 1 ] ) & ( observations >= self . obs_threshold ) ) self . frequencies . loc [ t , "Total_Freq" ] += np . count_nonzero ( ( threshold <= forecasts ) & ( forecasts < self . thresholds [ t + 1 ] ) )
Update the statistics with a set of forecasts and observations .
4,520
def merge ( self , other_rel ) : if other_rel . thresholds . size == self . thresholds . size and np . all ( other_rel . thresholds == self . thresholds ) : self . frequencies += other_rel . frequencies else : print ( "Input table thresholds do not match." )
Ingest another DistributedReliability and add its contents to the current object .
4,521
def reliability_curve ( self ) : total = self . frequencies [ "Total_Freq" ] . sum ( ) curve = pd . DataFrame ( columns = [ "Bin_Start" , "Bin_End" , "Bin_Center" , "Positive_Relative_Freq" , "Total_Relative_Freq" ] ) curve [ "Bin_Start" ] = self . thresholds [ : - 1 ] curve [ "Bin_End" ] = self . thresholds [ 1 : ] curve [ "Bin_Center" ] = 0.5 * ( self . thresholds [ : - 1 ] + self . thresholds [ 1 : ] ) curve [ "Positive_Relative_Freq" ] = self . frequencies [ "Positive_Freq" ] / self . frequencies [ "Total_Freq" ] curve [ "Total_Relative_Freq" ] = self . frequencies [ "Total_Freq" ] / total return curve
Calculates the reliability diagram statistics . The key columns are Bin_Start and Positive_Relative_Freq
4,522
def brier_score ( self ) : reliability , resolution , uncertainty = self . brier_score_components ( ) return reliability - resolution + uncertainty
Calculate the Brier Score
4,523
def brier_skill_score ( self ) : reliability , resolution , uncertainty = self . brier_score_components ( ) return ( resolution - reliability ) / uncertainty
Calculate the Brier Skill Score
4,524
def update ( self , forecasts , observations ) : if len ( observations . shape ) == 1 : obs_cdfs = np . zeros ( ( observations . size , self . thresholds . size ) ) for o , observation in enumerate ( observations ) : obs_cdfs [ o , self . thresholds >= observation ] = 1 else : obs_cdfs = observations self . errors [ "F_2" ] += np . sum ( forecasts ** 2 , axis = 0 ) self . errors [ "F_O" ] += np . sum ( forecasts * obs_cdfs , axis = 0 ) self . errors [ "O_2" ] += np . sum ( obs_cdfs ** 2 , axis = 0 ) self . errors [ "O" ] += np . sum ( obs_cdfs , axis = 0 ) self . num_forecasts += forecasts . shape [ 0 ]
Update the statistics with forecasts and observations .
4,525
def crps ( self ) : return np . sum ( self . errors [ "F_2" ] . values - self . errors [ "F_O" ] . values * 2.0 + self . errors [ "O_2" ] . values ) / ( self . thresholds . size * self . num_forecasts )
Calculates the continuous ranked probability score .
4,526
def crps_climo ( self ) : o_bar = self . errors [ "O" ] . values / float ( self . num_forecasts ) crps_c = np . sum ( self . num_forecasts * ( o_bar ** 2 ) - o_bar * self . errors [ "O" ] . values * 2.0 + self . errors [ "O_2" ] . values ) / float ( self . thresholds . size * self . num_forecasts ) return crps_c
Calculate the climatological CRPS .
4,527
def crpss ( self ) : crps_f = self . crps ( ) crps_c = self . crps_climo ( ) return 1.0 - float ( crps_f ) / float ( crps_c )
Calculate the continous ranked probability skill score from existing data .
4,528
def has_metadata_value ( metadata_source , key : str ) -> bool : desc = session_key_map . get ( key ) if desc is not None : d = getattr ( metadata_source , "session_metadata" , dict ( ) ) for k in desc [ 'path' ] [ : - 1 ] : d = d . setdefault ( k , dict ( ) ) if d is not None else None if d is not None : return desc [ 'path' ] [ - 1 ] in d desc = key_map . get ( key ) if desc is not None : d = getattr ( metadata_source , "metadata" , dict ( ) ) for k in desc [ 'path' ] [ : - 1 ] : d = d . setdefault ( k , dict ( ) ) if d is not None else None if d is not None : return desc [ 'path' ] [ - 1 ] in d raise False
Return whether the metadata value for the given key exists .
4,529
def delete_metadata_value ( metadata_source , key : str ) -> None : desc = session_key_map . get ( key ) if desc is not None : d0 = getattr ( metadata_source , "session_metadata" , dict ( ) ) d = d0 for k in desc [ 'path' ] [ : - 1 ] : d = d . setdefault ( k , dict ( ) ) if d is not None else None if d is not None and desc [ 'path' ] [ - 1 ] in d : d . pop ( desc [ 'path' ] [ - 1 ] , None ) metadata_source . session_metadata = d0 return desc = key_map . get ( key ) if desc is not None : d0 = getattr ( metadata_source , "metadata" , dict ( ) ) d = d0 for k in desc [ 'path' ] [ : - 1 ] : d = d . setdefault ( k , dict ( ) ) if d is not None else None if d is not None and desc [ 'path' ] [ - 1 ] in d : d . pop ( desc [ 'path' ] [ - 1 ] , None ) metadata_source . metadata = d0 return
Delete the metadata value for the given key .
4,530
def calculate_y_ticks ( self , plot_height ) : calibrated_data_min = self . calibrated_data_min calibrated_data_max = self . calibrated_data_max calibrated_data_range = calibrated_data_max - calibrated_data_min ticker = self . y_ticker y_ticks = list ( ) for tick_value , tick_label in zip ( ticker . values , ticker . labels ) : if calibrated_data_range != 0.0 : y_tick = plot_height - plot_height * ( tick_value - calibrated_data_min ) / calibrated_data_range else : y_tick = plot_height - plot_height * 0.5 if y_tick >= 0 and y_tick <= plot_height : y_ticks . append ( ( y_tick , tick_label ) ) return y_ticks
Calculate the y - axis items dependent on the plot height .
4,531
def calculate_x_ticks ( self , plot_width ) : x_calibration = self . x_calibration uncalibrated_data_left = self . __uncalibrated_left_channel uncalibrated_data_right = self . __uncalibrated_right_channel calibrated_data_left = x_calibration . convert_to_calibrated_value ( uncalibrated_data_left ) if x_calibration is not None else uncalibrated_data_left calibrated_data_right = x_calibration . convert_to_calibrated_value ( uncalibrated_data_right ) if x_calibration is not None else uncalibrated_data_right calibrated_data_left , calibrated_data_right = min ( calibrated_data_left , calibrated_data_right ) , max ( calibrated_data_left , calibrated_data_right ) graph_left , graph_right , tick_values , division , precision = Geometry . make_pretty_range ( calibrated_data_left , calibrated_data_right ) drawn_data_width = self . drawn_right_channel - self . drawn_left_channel x_ticks = list ( ) if drawn_data_width > 0.0 : for tick_value in tick_values : label = nice_label ( tick_value , precision ) data_tick = x_calibration . convert_from_calibrated_value ( tick_value ) if x_calibration else tick_value x_tick = plot_width * ( data_tick - self . drawn_left_channel ) / drawn_data_width if x_tick >= 0 and x_tick <= plot_width : x_ticks . append ( ( x_tick , label ) ) return x_ticks
Calculate the x - axis items dependent on the plot width .
4,532
def size_to_content ( self ) : new_sizing = self . copy_sizing ( ) new_sizing . minimum_height = 0 new_sizing . maximum_height = 0 axes = self . __axes if axes and axes . is_valid : if axes . x_calibration and axes . x_calibration . units : new_sizing . minimum_height = self . font_size + 4 new_sizing . maximum_height = self . font_size + 4 self . update_sizing ( new_sizing )
Size the canvas item to the proper height .
4,533
def size_to_content ( self , get_font_metrics_fn ) : new_sizing = self . copy_sizing ( ) new_sizing . minimum_width = 0 new_sizing . maximum_width = 0 axes = self . __axes if axes and axes . is_valid : font = "{0:d}px" . format ( self . font_size ) max_width = 0 y_range = axes . calibrated_data_max - axes . calibrated_data_min label = axes . y_ticker . value_label ( axes . calibrated_data_max + y_range * 5 ) max_width = max ( max_width , get_font_metrics_fn ( font , label ) . width ) label = axes . y_ticker . value_label ( axes . calibrated_data_min - y_range * 5 ) max_width = max ( max_width , get_font_metrics_fn ( font , label ) . width ) new_sizing . minimum_width = max_width new_sizing . maximum_width = max_width self . update_sizing ( new_sizing )
Size the canvas item to the proper width the maximum of any label .
4,534
def size_to_content ( self ) : new_sizing = self . copy_sizing ( ) new_sizing . minimum_width = 0 new_sizing . maximum_width = 0 axes = self . __axes if axes and axes . is_valid : if axes . y_calibration and axes . y_calibration . units : new_sizing . minimum_width = self . font_size + 4 new_sizing . maximum_width = self . font_size + 4 self . update_sizing ( new_sizing )
Size the canvas item to the proper width .
4,535
def get_snippet_content ( snippet_name , ** format_kwargs ) : filename = snippet_name + '.snippet' snippet_file = os . path . join ( SNIPPETS_ROOT , filename ) if not os . path . isfile ( snippet_file ) : raise ValueError ( 'could not find snippet with name ' + filename ) ret = helpers . get_file_content ( snippet_file ) if format_kwargs : ret = ret . format ( ** format_kwargs ) return ret
Load the content from a snippet file which exists in SNIPPETS_ROOT
4,536
def update_display_properties ( self , display_calibration_info , display_properties : typing . Mapping , display_layers : typing . Sequence [ typing . Mapping ] ) -> None : with self . __closing_lock : if self . __closed : return displayed_dimensional_scales = display_calibration_info . displayed_dimensional_scales displayed_dimensional_calibrations = display_calibration_info . displayed_dimensional_calibrations self . __data_scale = displayed_dimensional_scales [ - 1 ] if len ( displayed_dimensional_scales ) > 0 else 1 self . __displayed_dimensional_calibration = displayed_dimensional_calibrations [ - 1 ] if len ( displayed_dimensional_calibrations ) > 0 else Calibration . Calibration ( scale = displayed_dimensional_scales [ - 1 ] ) self . __intensity_calibration = display_calibration_info . displayed_intensity_calibration self . __calibration_style = display_calibration_info . calibration_style self . __y_min = display_properties . get ( "y_min" ) self . __y_max = display_properties . get ( "y_max" ) self . __y_style = display_properties . get ( "y_style" , "linear" ) self . __left_channel = display_properties . get ( "left_channel" ) self . __right_channel = display_properties . get ( "right_channel" ) self . __legend_position = display_properties . get ( "legend_position" ) self . __display_layers = display_layers if self . __display_values_list and len ( self . __display_values_list ) > 0 : self . __xdata_list = [ display_values . display_data_and_metadata if display_values else None for display_values in self . __display_values_list ] xdata0 = self . __xdata_list [ 0 ] if xdata0 : self . __update_frame ( xdata0 . metadata ) else : self . __xdata_list = list ( ) self . __update_cursor_info ( ) self . update ( )
Update the display values . Called from display panel .
4,537
def __view_to_selected_graphics ( self , data_and_metadata : DataAndMetadata . DataAndMetadata ) -> None : all_graphics = self . __graphics graphics = [ graphic for graphic_index , graphic in enumerate ( all_graphics ) if self . __graphic_selection . contains ( graphic_index ) ] intervals = list ( ) for graphic in graphics : if isinstance ( graphic , Graphics . IntervalGraphic ) : intervals . append ( graphic . interval ) self . __view_to_intervals ( data_and_metadata , intervals )
Change the view to encompass the selected graphic intervals .
4,538
def __update_cursor_info ( self ) : if not self . delegate : return if self . __mouse_in and self . __last_mouse : pos_1d = None axes = self . __axes line_graph_canvas_item = self . line_graph_canvas_item if axes and axes . is_valid and line_graph_canvas_item : mouse = self . map_to_canvas_item ( self . __last_mouse , line_graph_canvas_item ) plot_rect = line_graph_canvas_item . canvas_bounds if plot_rect . contains_point ( mouse ) : mouse = mouse - plot_rect . origin x = float ( mouse . x ) / plot_rect . width px = axes . drawn_left_channel + x * ( axes . drawn_right_channel - axes . drawn_left_channel ) pos_1d = px , self . delegate . cursor_changed ( pos_1d )
Map the mouse to the 1 - d position within the line graph .
4,539
def find_model_patch_tracks ( self ) : self . model_grid . load_data ( ) tracked_model_objects = [ ] model_objects = [ ] if self . model_grid . data is None : print ( "No model output found" ) return tracked_model_objects min_orig = self . model_ew . min_thresh max_orig = self . model_ew . max_thresh data_increment_orig = self . model_ew . data_increment self . model_ew . min_thresh = 0 self . model_ew . data_increment = 1 self . model_ew . max_thresh = 100 for h , hour in enumerate ( self . hours ) : print ( "Finding {0} objects for run {1} Hour: {2:02d}" . format ( self . ensemble_member , self . run_date . strftime ( "%Y%m%d%H" ) , hour ) ) if self . mask is not None : model_data = self . model_grid . data [ h ] * self . mask else : model_data = self . model_grid . data [ h ] model_data [ : self . patch_radius ] = 0 model_data [ - self . patch_radius : ] = 0 model_data [ : , : self . patch_radius ] = 0 model_data [ : , - self . patch_radius : ] = 0 scaled_data = np . array ( rescale_data ( model_data , min_orig , max_orig ) ) hour_labels = label_storm_objects ( scaled_data , "ew" , self . model_ew . min_thresh , self . model_ew . max_thresh , min_area = self . size_filter , max_area = self . model_ew . max_size , max_range = self . model_ew . delta , increment = self . model_ew . data_increment , gaussian_sd = self . gaussian_window ) model_objects . extend ( extract_storm_patches ( hour_labels , model_data , self . model_grid . x , self . model_grid . y , [ hour ] , dx = self . model_grid . dx , patch_radius = self . patch_radius ) ) for model_obj in model_objects [ - 1 ] : dims = model_obj . timesteps [ - 1 ] . shape if h > 0 : model_obj . estimate_motion ( hour , self . model_grid . data [ h - 1 ] , dims [ 1 ] , dims [ 0 ] ) del scaled_data del model_data del hour_labels tracked_model_objects . extend ( track_storms ( model_objects , self . hours , self . object_matcher . cost_function_components , self . object_matcher . max_values , self . object_matcher . weights ) ) self . model_ew . min_thresh = min_orig self . model_ew . max_thresh = max_orig self . model_ew . data_increment = data_increment_orig return tracked_model_objects
Identify storms in gridded model output and extract uniform sized patches around the storm centers of mass .
4,540
def find_mrms_tracks ( self ) : obs_objects = [ ] tracked_obs_objects = [ ] if self . mrms_ew is not None : self . mrms_grid . load_data ( ) if len ( self . mrms_grid . data ) != len ( self . hours ) : print ( 'Less than 24 hours of observation data found' ) return tracked_obs_objects for h , hour in enumerate ( self . hours ) : mrms_data = np . zeros ( self . mrms_grid . data [ h ] . shape ) mrms_data [ : ] = np . array ( self . mrms_grid . data [ h ] ) mrms_data [ mrms_data < 0 ] = 0 hour_labels = self . mrms_ew . size_filter ( self . mrms_ew . label ( gaussian_filter ( mrms_data , self . gaussian_window ) ) , self . size_filter ) hour_labels [ mrms_data < self . mrms_ew . min_thresh ] = 0 obj_slices = find_objects ( hour_labels ) num_slices = len ( obj_slices ) obs_objects . append ( [ ] ) if num_slices > 0 : for sl in obj_slices : obs_objects [ - 1 ] . append ( STObject ( mrms_data [ sl ] , np . where ( hour_labels [ sl ] > 0 , 1 , 0 ) , self . model_grid . x [ sl ] , self . model_grid . y [ sl ] , self . model_grid . i [ sl ] , self . model_grid . j [ sl ] , hour , hour , dx = self . model_grid . dx ) ) if h > 0 : dims = obs_objects [ - 1 ] [ - 1 ] . timesteps [ 0 ] . shape obs_objects [ - 1 ] [ - 1 ] . estimate_motion ( hour , self . mrms_grid . data [ h - 1 ] , dims [ 1 ] , dims [ 0 ] ) for h , hour in enumerate ( self . hours ) : past_time_objs = [ ] for obj in tracked_obs_objects : if obj . end_time == hour - 1 : past_time_objs . append ( obj ) if len ( past_time_objs ) == 0 : tracked_obs_objects . extend ( obs_objects [ h ] ) elif len ( past_time_objs ) > 0 and len ( obs_objects [ h ] ) > 0 : assignments = self . object_matcher . match_objects ( past_time_objs , obs_objects [ h ] , hour - 1 , hour ) unpaired = list ( range ( len ( obs_objects [ h ] ) ) ) for pair in assignments : past_time_objs [ pair [ 0 ] ] . extend ( obs_objects [ h ] [ pair [ 1 ] ] ) unpaired . remove ( pair [ 1 ] ) if len ( unpaired ) > 0 : for up in unpaired : tracked_obs_objects . append ( obs_objects [ h ] [ up ] ) print ( "Tracked Obs Objects: {0:03d} Hour: {1:02d}" . format ( len ( tracked_obs_objects ) , hour ) ) return tracked_obs_objects
Identify objects from MRMS timesteps and link them together with object matching .
4,541
def match_tracks ( self , model_tracks , obs_tracks , unique_matches = True , closest_matches = False ) : if unique_matches : pairings = self . track_matcher . match_tracks ( model_tracks , obs_tracks , closest_matches = closest_matches ) else : pairings = self . track_matcher . neighbor_matches ( model_tracks , obs_tracks ) return pairings
Match forecast and observed tracks .
4,542
def match_hail_sizes ( model_tracks , obs_tracks , track_pairings ) : unpaired = list ( range ( len ( model_tracks ) ) ) for p , pair in enumerate ( track_pairings ) : model_track = model_tracks [ pair [ 0 ] ] unpaired . remove ( pair [ 0 ] ) obs_track = obs_tracks [ pair [ 1 ] ] obs_hail_sizes = np . array ( [ step [ obs_track . masks [ t ] == 1 ] . max ( ) for t , step in enumerate ( obs_track . timesteps ) ] ) if obs_track . times . size > 1 and model_track . times . size > 1 : normalized_obs_times = 1.0 / ( obs_track . times . max ( ) - obs_track . times . min ( ) ) * ( obs_track . times - obs_track . times . min ( ) ) normalized_model_times = 1.0 / ( model_track . times . max ( ) - model_track . times . min ( ) ) * ( model_track . times - model_track . times . min ( ) ) hail_interp = interp1d ( normalized_obs_times , obs_hail_sizes , kind = "nearest" , bounds_error = False , fill_value = 0 ) model_track . observations = hail_interp ( normalized_model_times ) elif obs_track . times . size == 1 : model_track . observations = np . ones ( model_track . times . shape ) * obs_hail_sizes [ 0 ] elif model_track . times . size == 1 : model_track . observations = np . array ( [ obs_hail_sizes . max ( ) ] ) print ( pair [ 0 ] , "obs" , obs_hail_sizes ) print ( pair [ 0 ] , "model" , model_track . observations ) for u in unpaired : model_tracks [ u ] . observations = np . zeros ( model_tracks [ u ] . times . shape )
Given forecast and observed track pairings maximum hail sizes are associated with each paired forecast storm track timestep . If the duration of the forecast and observed tracks differ then interpolation is used for the intermediate timesteps .
4,543
def calc_track_errors ( model_tracks , obs_tracks , track_pairings ) : columns = [ 'obs_track_id' , 'translation_error_x' , 'translation_error_y' , 'start_time_difference' , 'end_time_difference' , ] track_errors = pd . DataFrame ( index = list ( range ( len ( model_tracks ) ) ) , columns = columns ) for p , pair in enumerate ( track_pairings ) : model_track = model_tracks [ pair [ 0 ] ] if type ( pair [ 1 ] ) in [ int , np . int64 ] : obs_track = obs_tracks [ pair [ 1 ] ] else : obs_track = obs_tracks [ pair [ 1 ] [ 0 ] ] model_com = model_track . center_of_mass ( model_track . start_time ) obs_com = obs_track . center_of_mass ( obs_track . start_time ) track_errors . loc [ pair [ 0 ] , 'obs_track_id' ] = pair [ 1 ] if type ( pair [ 1 ] ) in [ int , np . int64 ] else pair [ 1 ] [ 0 ] track_errors . loc [ pair [ 0 ] , 'translation_error_x' ] = model_com [ 0 ] - obs_com [ 0 ] track_errors . loc [ pair [ 0 ] , 'translation_error_y' ] = model_com [ 1 ] - obs_com [ 1 ] track_errors . loc [ pair [ 0 ] , 'start_time_difference' ] = model_track . start_time - obs_track . start_time track_errors . loc [ pair [ 0 ] , 'end_time_difference' ] = model_track . end_time - obs_track . end_time return track_errors
Calculates spatial and temporal translation errors between matched forecast and observed tracks .
4,544
def __display_for_tree_node ( self , tree_node ) : keys = tree_node . keys if len ( keys ) == 1 : return "{0} ({1})" . format ( tree_node . keys [ - 1 ] , tree_node . count ) elif len ( keys ) == 2 : months = ( _ ( "January" ) , _ ( "February" ) , _ ( "March" ) , _ ( "April" ) , _ ( "May" ) , _ ( "June" ) , _ ( "July" ) , _ ( "August" ) , _ ( "September" ) , _ ( "October" ) , _ ( "November" ) , _ ( "December" ) ) return "{0} ({1})" . format ( months [ max ( min ( tree_node . keys [ 1 ] - 1 , 11 ) , 0 ) ] , tree_node . count ) else : weekdays = ( _ ( "Monday" ) , _ ( "Tuesday" ) , _ ( "Wednesday" ) , _ ( "Thursday" ) , _ ( "Friday" ) , _ ( "Saturday" ) , _ ( "Sunday" ) ) date = datetime . date ( tree_node . keys [ 0 ] , tree_node . keys [ 1 ] , tree_node . keys [ 2 ] ) return "{0} - {1} ({2})" . format ( tree_node . keys [ 2 ] , weekdays [ date . weekday ( ) ] , tree_node . count )
Return the text display for the given tree node . Based on number of keys associated with tree node .
4,545
def __insert_child ( self , parent_tree_node , index , tree_node ) : parent_item = self . __mapping [ id ( parent_tree_node ) ] self . item_model_controller . begin_insert ( index , index , parent_item . row , parent_item . id ) properties = { "display" : self . __display_for_tree_node ( tree_node ) , "tree_node" : tree_node } item = self . item_model_controller . create_item ( properties ) parent_item . insert_child ( index , item ) self . __mapping [ id ( tree_node ) ] = item self . item_model_controller . end_insert ( )
Called from the root tree node when a new node is inserted into tree . This method creates properties to represent the node for display and inserts it into the item model controller .
4,546
def __remove_child ( self , parent_tree_node , index ) : parent_item = self . __mapping [ id ( parent_tree_node ) ] self . item_model_controller . begin_remove ( index , index , parent_item . row , parent_item . id ) child_item = parent_item . children [ index ] parent_item . remove_child ( child_item ) self . __mapping . pop ( id ( child_item . data [ "tree_node" ] ) ) self . item_model_controller . end_remove ( )
Called from the root tree node when a node is removed from the tree . This method removes it into the item model controller .
4,547
def update_all_nodes ( self ) : item_model_controller = self . item_model_controller if item_model_controller : if self . __node_counts_dirty : for item in self . __mapping . values ( ) : if "tree_node" in item . data : tree_node = item . data [ "tree_node" ] item . data [ "display" ] = self . __display_for_tree_node ( tree_node ) item_model_controller . data_changed ( item . row , item . parent . row , item . parent . id ) self . __node_counts_dirty = False
Update all tree item displays if needed . Usually for count updates .
4,548
def date_browser_selection_changed ( self , selected_indexes ) : partial_date_filters = list ( ) for index , parent_row , parent_id in selected_indexes : item_model_controller = self . item_model_controller tree_node = item_model_controller . item_value ( "tree_node" , index , parent_id ) partial_date_filters . append ( ListModel . PartialDateFilter ( "created_local" , * tree_node . keys ) ) if len ( partial_date_filters ) > 0 : self . __date_filter = ListModel . OrFilter ( partial_date_filters ) else : self . __date_filter = None self . __update_filter ( )
Called to handle selection changes in the tree widget .
4,549
def text_filter_changed ( self , text ) : text = text . strip ( ) if text else None if text is not None : self . __text_filter = ListModel . TextFilter ( "text_for_filter" , text ) else : self . __text_filter = None self . __update_filter ( )
Called to handle changes to the text filter .
4,550
def __update_filter ( self ) : filters = list ( ) if self . __date_filter : filters . append ( self . __date_filter ) if self . __text_filter : filters . append ( self . __text_filter ) self . document_controller . display_filter = ListModel . AndFilter ( filters )
Create a combined filter . Set the resulting filter into the document controller .
4,551
def __get_keys ( self ) : keys = list ( ) tree_node = self while tree_node is not None and tree_node . key is not None : keys . insert ( 0 , tree_node . key ) tree_node = tree_node . parent return keys
Return the keys associated with this node by adding its key and then adding parent keys recursively .
4,552
def label_storm_objects ( data , method , min_intensity , max_intensity , min_area = 1 , max_area = 100 , max_range = 1 , increment = 1 , gaussian_sd = 0 ) : if method . lower ( ) in [ "ew" , "watershed" ] : labeler = EnhancedWatershed ( min_intensity , increment , max_intensity , max_area , max_range ) else : labeler = Hysteresis ( min_intensity , max_intensity ) if len ( data . shape ) == 2 : label_grid = labeler . label ( gaussian_filter ( data , gaussian_sd ) ) label_grid [ data < min_intensity ] = 0 if min_area > 1 : label_grid = labeler . size_filter ( label_grid , min_area ) else : label_grid = np . zeros ( data . shape , dtype = int ) for t in range ( data . shape [ 0 ] ) : label_grid [ t ] = labeler . label ( gaussian_filter ( data [ t ] , gaussian_sd ) ) label_grid [ t ] [ data [ t ] < min_intensity ] = 0 if min_area > 1 : label_grid [ t ] = labeler . size_filter ( label_grid [ t ] , min_area ) return label_grid
From a 2D grid or time series of 2D grids this method labels storm objects with either the Enhanced Watershed or Hysteresis methods .
4,553
def extract_storm_objects ( label_grid , data , x_grid , y_grid , times , dx = 1 , dt = 1 , obj_buffer = 0 ) : storm_objects = [ ] if len ( label_grid . shape ) == 3 : ij_grid = np . indices ( label_grid . shape [ 1 : ] ) for t , time in enumerate ( times ) : storm_objects . append ( [ ] ) object_slices = list ( find_objects ( label_grid [ t ] , label_grid [ t ] . max ( ) ) ) if len ( object_slices ) > 0 : for o , obj_slice in enumerate ( object_slices ) : if obj_buffer > 0 : obj_slice_buff = [ slice ( np . maximum ( 0 , osl . start - obj_buffer ) , np . minimum ( osl . stop + obj_buffer , label_grid . shape [ l + 1 ] ) ) for l , osl in enumerate ( obj_slice ) ] else : obj_slice_buff = obj_slice storm_objects [ - 1 ] . append ( STObject ( data [ t ] [ obj_slice_buff ] , np . where ( label_grid [ t ] [ obj_slice_buff ] == o + 1 , 1 , 0 ) , x_grid [ obj_slice_buff ] , y_grid [ obj_slice_buff ] , ij_grid [ 0 ] [ obj_slice_buff ] , ij_grid [ 1 ] [ obj_slice_buff ] , time , time , dx = dx , step = dt ) ) if t > 0 : dims = storm_objects [ - 1 ] [ - 1 ] . timesteps [ 0 ] . shape storm_objects [ - 1 ] [ - 1 ] . estimate_motion ( time , data [ t - 1 ] , dims [ 1 ] , dims [ 0 ] ) else : ij_grid = np . indices ( label_grid . shape ) storm_objects . append ( [ ] ) object_slices = list ( find_objects ( label_grid , label_grid . max ( ) ) ) if len ( object_slices ) > 0 : for o , obj_slice in enumerate ( object_slices ) : if obj_buffer > 0 : obj_slice_buff = [ slice ( np . maximum ( 0 , osl . start - obj_buffer ) , np . minimum ( osl . stop + obj_buffer , label_grid . shape [ l + 1 ] ) ) for l , osl in enumerate ( obj_slice ) ] else : obj_slice_buff = obj_slice storm_objects [ - 1 ] . append ( STObject ( data [ obj_slice_buff ] , np . where ( label_grid [ obj_slice_buff ] == o + 1 , 1 , 0 ) , x_grid [ obj_slice_buff ] , y_grid [ obj_slice_buff ] , ij_grid [ 0 ] [ obj_slice_buff ] , ij_grid [ 1 ] [ obj_slice_buff ] , times , times , dx = dx , step = dt ) ) return storm_objects
After storms are labeled this method extracts the storm objects from the grid and places them into STObjects . The STObjects contain intensity location and shape information about each storm at each timestep .
4,554
def extract_storm_patches ( label_grid , data , x_grid , y_grid , times , dx = 1 , dt = 1 , patch_radius = 16 ) : storm_objects = [ ] if len ( label_grid . shape ) == 3 : ij_grid = np . indices ( label_grid . shape [ 1 : ] ) for t , time in enumerate ( times ) : storm_objects . append ( [ ] ) centers = list ( center_of_mass ( data [ t ] , labels = label_grid [ t ] , index = np . arange ( 1 , label_grid [ t ] . max ( ) + 1 ) ) ) if len ( centers ) > 0 : for o , center in enumerate ( centers ) : int_center = np . round ( center ) . astype ( int ) obj_slice_buff = [ slice ( int_center [ 0 ] - patch_radius , int_center [ 0 ] + patch_radius ) , slice ( int_center [ 1 ] - patch_radius , int_center [ 1 ] + patch_radius ) ] storm_objects [ - 1 ] . append ( STObject ( data [ t ] [ obj_slice_buff ] , np . where ( label_grid [ t ] [ obj_slice_buff ] == o + 1 , 1 , 0 ) , x_grid [ obj_slice_buff ] , y_grid [ obj_slice_buff ] , ij_grid [ 0 ] [ obj_slice_buff ] , ij_grid [ 1 ] [ obj_slice_buff ] , time , time , dx = dx , step = dt ) ) if t > 0 : dims = storm_objects [ - 1 ] [ - 1 ] . timesteps [ 0 ] . shape storm_objects [ - 1 ] [ - 1 ] . estimate_motion ( time , data [ t - 1 ] , dims [ 1 ] , dims [ 0 ] ) else : ij_grid = np . indices ( label_grid . shape ) storm_objects . append ( [ ] ) centers = list ( center_of_mass ( data , labels = label_grid , index = np . arange ( 1 , label_grid . max ( ) + 1 ) ) ) if len ( centers ) > 0 : for o , center in enumerate ( centers ) : int_center = np . round ( center ) . astype ( int ) obj_slice_buff = ( slice ( int_center [ 0 ] - patch_radius , int_center [ 0 ] + patch_radius ) , slice ( int_center [ 1 ] - patch_radius , int_center [ 1 ] + patch_radius ) ) storm_objects [ - 1 ] . append ( STObject ( data [ obj_slice_buff ] , np . where ( label_grid [ obj_slice_buff ] == o + 1 , 1 , 0 ) , x_grid [ obj_slice_buff ] , y_grid [ obj_slice_buff ] , ij_grid [ 0 ] [ obj_slice_buff ] , ij_grid [ 1 ] [ obj_slice_buff ] , times [ 0 ] , times [ 0 ] , dx = dx , step = dt ) ) return storm_objects
After storms are labeled this method extracts boxes of equal size centered on each storm from the grid and places them into STObjects . The STObjects contain intensity location and shape information about each storm at each timestep .
4,555
def track_storms ( storm_objects , times , distance_components , distance_maxima , distance_weights , tracked_objects = None ) : obj_matcher = ObjectMatcher ( distance_components , distance_weights , distance_maxima ) if tracked_objects is None : tracked_objects = [ ] for t , time in enumerate ( times ) : past_time_objects = [ ] for obj in tracked_objects : if obj . end_time == time - obj . step : past_time_objects . append ( obj ) if len ( past_time_objects ) == 0 : tracked_objects . extend ( storm_objects [ t ] ) elif len ( past_time_objects ) > 0 and len ( storm_objects [ t ] ) > 0 : assignments = obj_matcher . match_objects ( past_time_objects , storm_objects [ t ] , times [ t - 1 ] , times [ t ] ) unpaired = list ( range ( len ( storm_objects [ t ] ) ) ) for pair in assignments : past_time_objects [ pair [ 0 ] ] . extend ( storm_objects [ t ] [ pair [ 1 ] ] ) unpaired . remove ( pair [ 1 ] ) if len ( unpaired ) > 0 : for up in unpaired : tracked_objects . append ( storm_objects [ t ] [ up ] ) return tracked_objects
Given the output of extract_storm_objects this method tracks storms through time and merges individual STObjects into a set of tracks .
4,556
def centroid_distance ( item_a , time_a , item_b , time_b , max_value ) : ax , ay = item_a . center_of_mass ( time_a ) bx , by = item_b . center_of_mass ( time_b ) return np . minimum ( np . sqrt ( ( ax - bx ) ** 2 + ( ay - by ) ** 2 ) , max_value ) / float ( max_value )
Euclidean distance between the centroids of item_a and item_b .
4,557
def shifted_centroid_distance ( item_a , time_a , item_b , time_b , max_value ) : ax , ay = item_a . center_of_mass ( time_a ) bx , by = item_b . center_of_mass ( time_b ) if time_a < time_b : bx = bx - item_b . u by = by - item_b . v else : ax = ax - item_a . u ay = ay - item_a . v return np . minimum ( np . sqrt ( ( ax - bx ) ** 2 + ( ay - by ) ** 2 ) , max_value ) / float ( max_value )
Centroid distance with motion corrections .
4,558
def closest_distance ( item_a , time_a , item_b , time_b , max_value ) : return np . minimum ( item_a . closest_distance ( time_a , item_b , time_b ) , max_value ) / float ( max_value )
Euclidean distance between the pixels in item_a and item_b closest to each other .
4,559
def ellipse_distance ( item_a , time_a , item_b , time_b , max_value ) : ts = np . array ( [ 0 , np . pi ] ) ell_a = item_a . get_ellipse_model ( time_a ) ell_b = item_b . get_ellipse_model ( time_b ) ends_a = ell_a . predict_xy ( ts ) ends_b = ell_b . predict_xy ( ts ) distances = np . sqrt ( ( ends_a [ : , 0 : 1 ] - ends_b [ : , 0 : 1 ] . T ) ** 2 + ( ends_a [ : , 1 : ] - ends_b [ : , 1 : ] . T ) ** 2 ) return np . minimum ( distances [ 0 , 1 ] , max_value ) / float ( max_value )
Calculate differences in the properties of ellipses fitted to each object .
4,560
def nonoverlap ( item_a , time_a , item_b , time_b , max_value ) : return np . minimum ( 1 - item_a . count_overlap ( time_a , item_b , time_b ) , max_value ) / float ( max_value )
Percentage of pixels in each object that do not overlap with the other object
4,561
def max_intensity ( item_a , time_a , item_b , time_b , max_value ) : intensity_a = item_a . max_intensity ( time_a ) intensity_b = item_b . max_intensity ( time_b ) diff = np . sqrt ( ( intensity_a - intensity_b ) ** 2 ) return np . minimum ( diff , max_value ) / float ( max_value )
RMS difference in maximum intensity
4,562
def area_difference ( item_a , time_a , item_b , time_b , max_value ) : size_a = item_a . size ( time_a ) size_b = item_b . size ( time_b ) diff = np . sqrt ( ( size_a - size_b ) ** 2 ) return np . minimum ( diff , max_value ) / float ( max_value )
RMS Difference in object areas .
4,563
def mean_minimum_centroid_distance ( item_a , item_b , max_value ) : centroids_a = np . array ( [ item_a . center_of_mass ( t ) for t in item_a . times ] ) centroids_b = np . array ( [ item_b . center_of_mass ( t ) for t in item_b . times ] ) distance_matrix = ( centroids_a [ : , 0 : 1 ] - centroids_b . T [ 0 : 1 ] ) ** 2 + ( centroids_a [ : , 1 : ] - centroids_b . T [ 1 : ] ) ** 2 mean_min_distances = np . sqrt ( distance_matrix . min ( axis = 0 ) . mean ( ) + distance_matrix . min ( axis = 1 ) . mean ( ) ) return np . minimum ( mean_min_distances , max_value ) / float ( max_value )
RMS difference in the minimum distances from the centroids of one track to the centroids of another track
4,564
def mean_min_time_distance ( item_a , item_b , max_value ) : times_a = item_a . times . reshape ( ( item_a . times . size , 1 ) ) times_b = item_b . times . reshape ( ( 1 , item_b . times . size ) ) distance_matrix = ( times_a - times_b ) ** 2 mean_min_distances = np . sqrt ( distance_matrix . min ( axis = 0 ) . mean ( ) + distance_matrix . min ( axis = 1 ) . mean ( ) ) return np . minimum ( mean_min_distances , max_value ) / float ( max_value )
Calculate the mean time difference among the time steps in each object .
4,565
def start_centroid_distance ( item_a , item_b , max_value ) : start_a = item_a . center_of_mass ( item_a . times [ 0 ] ) start_b = item_b . center_of_mass ( item_b . times [ 0 ] ) start_distance = np . sqrt ( ( start_a [ 0 ] - start_b [ 0 ] ) ** 2 + ( start_a [ 1 ] - start_b [ 1 ] ) ** 2 ) return np . minimum ( start_distance , max_value ) / float ( max_value )
Distance between the centroids of the first step in each object .
4,566
def start_time_distance ( item_a , item_b , max_value ) : start_time_diff = np . abs ( item_a . times [ 0 ] - item_b . times [ 0 ] ) return np . minimum ( start_time_diff , max_value ) / float ( max_value )
Absolute difference between the starting times of each item .
4,567
def duration_distance ( item_a , item_b , max_value ) : duration_a = item_a . times . size duration_b = item_b . times . size return np . minimum ( np . abs ( duration_a - duration_b ) , max_value ) / float ( max_value )
Absolute difference in the duration of two items
4,568
def mean_area_distance ( item_a , item_b , max_value ) : mean_area_a = np . mean ( [ item_a . size ( t ) for t in item_a . times ] ) mean_area_b = np . mean ( [ item_b . size ( t ) for t in item_b . times ] ) return np . abs ( mean_area_a - mean_area_b ) / float ( max_value )
Absolute difference in the means of the areas of each track over time .
4,569
def match_objects ( self , set_a , set_b , time_a , time_b ) : costs = self . cost_matrix ( set_a , set_b , time_a , time_b ) * 100 min_row_costs = costs . min ( axis = 1 ) min_col_costs = costs . min ( axis = 0 ) good_rows = np . where ( min_row_costs < 100 ) [ 0 ] good_cols = np . where ( min_col_costs < 100 ) [ 0 ] assignments = [ ] if len ( good_rows ) > 0 and len ( good_cols ) > 0 : munk = Munkres ( ) initial_assignments = munk . compute ( costs [ tuple ( np . meshgrid ( good_rows , good_cols , indexing = 'ij' ) ) ] . tolist ( ) ) initial_assignments = [ ( good_rows [ x [ 0 ] ] , good_cols [ x [ 1 ] ] ) for x in initial_assignments ] for a in initial_assignments : if costs [ a [ 0 ] , a [ 1 ] ] < 100 : assignments . append ( a ) return assignments
Match two sets of objects at particular times .
4,570
def total_cost_function ( self , item_a , item_b , time_a , time_b ) : distances = np . zeros ( len ( self . weights ) ) for c , component in enumerate ( self . cost_function_components ) : distances [ c ] = component ( item_a , time_a , item_b , time_b , self . max_values [ c ] ) total_distance = np . sum ( self . weights * distances ) return total_distance
Calculate total cost function between two items .
4,571
def variable_specifier ( self ) -> dict : if self . value_type is not None : return { "type" : "variable" , "version" : 1 , "uuid" : str ( self . uuid ) , "x-name" : self . name , "x-value" : self . value } else : return self . specifier
Return the variable specifier for this variable .
4,572
def bound_variable ( self ) : class BoundVariable : def __init__ ( self , variable ) : self . __variable = variable self . changed_event = Event . Event ( ) self . needs_rebind_event = Event . Event ( ) def property_changed ( key ) : if key == "value" : self . changed_event . fire ( ) self . __variable_property_changed_listener = variable . property_changed_event . listen ( property_changed ) @ property def value ( self ) : return self . __variable . value def close ( self ) : self . __variable_property_changed_listener . close ( ) self . __variable_property_changed_listener = None return BoundVariable ( self )
Return an object with a value property and a changed_event .
4,573
def resolve_object_specifier ( self , object_specifier , secondary_specifier = None , property_name = None , objects_model = None ) : variable = self . __computation ( ) . resolve_variable ( object_specifier ) if not variable : return self . __context . resolve_object_specifier ( object_specifier , secondary_specifier , property_name , objects_model ) elif variable . specifier is None : return variable . bound_variable return None
Resolve the object specifier .
4,574
def parse_names ( cls , expression ) : names = set ( ) try : ast_node = ast . parse ( expression , "ast" ) class Visitor ( ast . NodeVisitor ) : def visit_Name ( self , node ) : names . add ( node . id ) Visitor ( ) . visit ( ast_node ) except Exception : pass return names
Return the list of identifiers used in the expression .
4,575
def bind ( self , context ) -> None : self . __computation_context = ComputationContext ( self , context ) for variable in self . variables : assert variable . bound_item is None for result in self . results : assert result . bound_item is None for variable in self . variables : self . __bind_variable ( variable ) for result in self . results : self . __bind_result ( result )
Bind a context to this computation .
4,576
def unbind ( self ) : for variable in self . variables : self . __unbind_variable ( variable ) for result in self . results : self . __unbind_result ( result )
Unlisten and close each bound item .
4,577
def sort_by_date_key ( data_item ) : return data_item . title + str ( data_item . uuid ) if data_item . is_live else str ( ) , data_item . date_for_sorting , str ( data_item . uuid )
A sort key to for the created field of a data item . The sort by uuid makes it determinate .
4,578
def set_r_value ( self , r_var : str , * , notify_changed = True ) -> None : self . r_var = r_var self . _description_changed ( ) if notify_changed : self . __notify_description_changed ( )
Used to signal changes to the ref var which are kept in document controller . ugh .
4,579
def set_data_and_metadata ( self , data_and_metadata , data_modified = None ) : self . increment_data_ref_count ( ) try : if data_and_metadata : data = data_and_metadata . data data_shape_and_dtype = data_and_metadata . data_shape_and_dtype intensity_calibration = data_and_metadata . intensity_calibration dimensional_calibrations = data_and_metadata . dimensional_calibrations metadata = data_and_metadata . metadata timestamp = data_and_metadata . timestamp data_descriptor = data_and_metadata . data_descriptor timezone = data_and_metadata . timezone or Utility . get_local_timezone ( ) timezone_offset = data_and_metadata . timezone_offset or Utility . TimezoneMinutesToStringConverter ( ) . convert ( Utility . local_utcoffset_minutes ( ) ) new_data_and_metadata = DataAndMetadata . DataAndMetadata ( self . __load_data , data_shape_and_dtype , intensity_calibration , dimensional_calibrations , metadata , timestamp , data , data_descriptor , timezone , timezone_offset ) else : new_data_and_metadata = None self . __set_data_metadata_direct ( new_data_and_metadata , data_modified ) if self . __data_and_metadata is not None : if self . persistent_object_context and not self . persistent_object_context . is_write_delayed ( self ) : self . persistent_object_context . write_external_data ( self , "data" , self . __data_and_metadata . data ) self . __data_and_metadata . unloadable = True finally : self . decrement_data_ref_count ( )
Sets the underlying data and data - metadata to the data_and_metadata .
4,580
def get_calculated_display_values ( self , immediate : bool = False ) -> DisplayValues : if not immediate or not self . __is_master or not self . __last_display_values : if not self . __current_display_values and self . __data_item : self . __current_display_values = DisplayValues ( self . __data_item . xdata , self . sequence_index , self . collection_index , self . slice_center , self . slice_width , self . display_limits , self . complex_display_type , self . __color_map_data ) def finalize ( display_values ) : self . __last_display_values = display_values self . display_values_changed_event . fire ( ) self . __current_display_values . on_finalize = finalize return self . __current_display_values return self . __last_display_values
Return the display values .
4,581
def auto_display_limits ( self ) : display_data_and_metadata = self . get_calculated_display_values ( True ) . display_data_and_metadata data = display_data_and_metadata . data if display_data_and_metadata else None if data is not None : mn , mx = numpy . nanmin ( data ) , numpy . nanmax ( data ) self . display_limits = mn , mx
Calculate best display limits and set them .
4,582
def remove_graphic ( self , graphic : Graphics . Graphic , * , safe : bool = False ) -> typing . Optional [ typing . Sequence ] : return self . remove_model_item ( self , "graphics" , graphic , safe = safe )
Remove a graphic but do it through the container so dependencies can be tracked .
4,583
def dimensional_shape ( self ) -> typing . Optional [ typing . Tuple [ int , ... ] ] : if not self . __data_and_metadata : return None return self . __data_and_metadata . dimensional_shape
Shape of the underlying data if only one .
4,584
def write_local_file ( fp , name_bytes , writer , dt ) : fp . write ( struct . pack ( 'I' , 0x04034b50 ) ) fp . write ( struct . pack ( 'H' , 10 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) msdos_date = int ( dt . year - 1980 ) << 9 | int ( dt . month ) << 5 | int ( dt . day ) msdos_time = int ( dt . hour ) << 11 | int ( dt . minute ) << 5 | int ( dt . second ) fp . write ( struct . pack ( 'H' , msdos_time ) ) fp . write ( struct . pack ( 'H' , msdos_date ) ) crc32_pos = fp . tell ( ) fp . write ( struct . pack ( 'I' , 0 ) ) data_len_pos = fp . tell ( ) fp . write ( struct . pack ( 'I' , 0 ) ) fp . write ( struct . pack ( 'I' , 0 ) ) fp . write ( struct . pack ( 'H' , len ( name_bytes ) ) ) fp . write ( struct . pack ( 'H' , 0 ) ) fp . write ( name_bytes ) data_start_pos = fp . tell ( ) crc32 = writer ( fp ) data_end_pos = fp . tell ( ) data_len = data_end_pos - data_start_pos fp . seek ( crc32_pos ) fp . write ( struct . pack ( 'I' , crc32 ) ) fp . seek ( data_len_pos ) fp . write ( struct . pack ( 'I' , data_len ) ) fp . write ( struct . pack ( 'I' , data_len ) ) fp . seek ( data_end_pos ) return data_len , crc32
Writes a zip file local file header structure at the current file position .
4,585
def write_directory_data ( fp , offset , name_bytes , data_len , crc32 , dt ) : fp . write ( struct . pack ( 'I' , 0x02014b50 ) ) fp . write ( struct . pack ( 'H' , 10 ) ) fp . write ( struct . pack ( 'H' , 10 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) msdos_date = int ( dt . year - 1980 ) << 9 | int ( dt . month ) << 5 | int ( dt . day ) msdos_time = int ( dt . hour ) << 11 | int ( dt . minute ) << 5 | int ( dt . second ) fp . write ( struct . pack ( 'H' , msdos_time ) ) fp . write ( struct . pack ( 'H' , msdos_date ) ) fp . write ( struct . pack ( 'I' , crc32 ) ) fp . write ( struct . pack ( 'I' , data_len ) ) fp . write ( struct . pack ( 'I' , data_len ) ) fp . write ( struct . pack ( 'H' , len ( name_bytes ) ) ) fp . write ( struct . pack ( 'H' , 0 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) fp . write ( struct . pack ( 'I' , 0 ) ) fp . write ( struct . pack ( 'I' , offset ) ) fp . write ( name_bytes )
Write a zip fie directory entry at the current file position
4,586
def write_end_of_directory ( fp , dir_size , dir_offset , count ) : fp . write ( struct . pack ( 'I' , 0x06054b50 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) fp . write ( struct . pack ( 'H' , count ) ) fp . write ( struct . pack ( 'H' , count ) ) fp . write ( struct . pack ( 'I' , dir_size ) ) fp . write ( struct . pack ( 'I' , dir_offset ) ) fp . write ( struct . pack ( 'H' , 0 ) )
Write zip file end of directory header at the current file position
4,587
def write_zip_fp ( fp , data , properties , dir_data_list = None ) : assert data is not None or properties is not None dir_data_list = list ( ) if dir_data_list is None else dir_data_list dt = datetime . datetime . now ( ) if data is not None : offset_data = fp . tell ( ) def write_data ( fp ) : numpy_start_pos = fp . tell ( ) numpy . save ( fp , data ) numpy_end_pos = fp . tell ( ) fp . seek ( numpy_start_pos ) data_c = numpy . require ( data , dtype = data . dtype , requirements = [ "C_CONTIGUOUS" ] ) header_data = fp . read ( ( numpy_end_pos - numpy_start_pos ) - data_c . nbytes ) data_crc32 = binascii . crc32 ( data_c . data , binascii . crc32 ( header_data ) ) & 0xFFFFFFFF fp . seek ( numpy_end_pos ) return data_crc32 data_len , crc32 = write_local_file ( fp , b"data.npy" , write_data , dt ) dir_data_list . append ( ( offset_data , b"data.npy" , data_len , crc32 ) ) if properties is not None : json_str = str ( ) try : class JSONEncoder ( json . JSONEncoder ) : def default ( self , obj ) : if isinstance ( obj , Geometry . IntPoint ) or isinstance ( obj , Geometry . IntSize ) or isinstance ( obj , Geometry . IntRect ) or isinstance ( obj , Geometry . FloatPoint ) or isinstance ( obj , Geometry . FloatSize ) or isinstance ( obj , Geometry . FloatRect ) : return tuple ( obj ) else : return json . JSONEncoder . default ( self , obj ) json_io = io . StringIO ( ) json . dump ( properties , json_io , cls = JSONEncoder ) json_str = json_io . getvalue ( ) except Exception as e : import traceback logging . error ( "Exception writing zip file %s" + str ( e ) ) traceback . print_exc ( ) traceback . print_stack ( ) def write_json ( fp ) : json_bytes = bytes ( json_str , 'ISO-8859-1' ) fp . write ( json_bytes ) return binascii . crc32 ( json_bytes ) & 0xFFFFFFFF offset_json = fp . tell ( ) json_len , json_crc32 = write_local_file ( fp , b"metadata.json" , write_json , dt ) dir_data_list . append ( ( offset_json , b"metadata.json" , json_len , json_crc32 ) ) dir_offset = fp . tell ( ) for offset , name_bytes , data_len , crc32 in dir_data_list : write_directory_data ( fp , offset , name_bytes , data_len , crc32 , dt ) dir_size = fp . tell ( ) - dir_offset write_end_of_directory ( fp , dir_size , dir_offset , len ( dir_data_list ) ) fp . truncate ( )
Write custom zip file of data and properties to fp
4,588
def write_zip ( file_path , data , properties ) : with open ( file_path , "w+b" ) as fp : write_zip_fp ( fp , data , properties )
Write custom zip file to the file path
4,589
def parse_zip ( fp ) : local_files = { } dir_files = { } eocd = None fp . seek ( 0 ) while True : pos = fp . tell ( ) signature = struct . unpack ( 'I' , fp . read ( 4 ) ) [ 0 ] if signature == 0x04034b50 : fp . seek ( pos + 14 ) crc32 = struct . unpack ( 'I' , fp . read ( 4 ) ) [ 0 ] fp . seek ( pos + 18 ) data_len = struct . unpack ( 'I' , fp . read ( 4 ) ) [ 0 ] fp . seek ( pos + 26 ) name_len = struct . unpack ( 'H' , fp . read ( 2 ) ) [ 0 ] extra_len = struct . unpack ( 'H' , fp . read ( 2 ) ) [ 0 ] name_bytes = fp . read ( name_len ) fp . seek ( extra_len , os . SEEK_CUR ) data_pos = fp . tell ( ) fp . seek ( data_len , os . SEEK_CUR ) local_files [ pos ] = ( name_bytes , data_pos , data_len , crc32 ) elif signature == 0x02014b50 : fp . seek ( pos + 28 ) name_len = struct . unpack ( 'H' , fp . read ( 2 ) ) [ 0 ] extra_len = struct . unpack ( 'H' , fp . read ( 2 ) ) [ 0 ] comment_len = struct . unpack ( 'H' , fp . read ( 2 ) ) [ 0 ] fp . seek ( pos + 42 ) pos2 = struct . unpack ( 'I' , fp . read ( 4 ) ) [ 0 ] name_bytes = fp . read ( name_len ) fp . seek ( pos + 46 + name_len + extra_len + comment_len ) dir_files [ name_bytes ] = ( pos , pos2 ) elif signature == 0x06054b50 : fp . seek ( pos + 16 ) pos2 = struct . unpack ( 'I' , fp . read ( 4 ) ) [ 0 ] eocd = ( pos , pos2 ) break else : raise IOError ( ) return local_files , dir_files , eocd
Parse the zip file headers at fp
4,590
def read_data ( fp , local_files , dir_files , name_bytes ) : if name_bytes in dir_files : fp . seek ( local_files [ dir_files [ name_bytes ] [ 1 ] ] [ 1 ] ) return numpy . load ( fp ) return None
Read a numpy data array from the zip file
4,591
def read_json ( fp , local_files , dir_files , name_bytes ) : if name_bytes in dir_files : json_pos = local_files [ dir_files [ name_bytes ] [ 1 ] ] [ 1 ] json_len = local_files [ dir_files [ name_bytes ] [ 1 ] ] [ 2 ] fp . seek ( json_pos ) json_properties = fp . read ( json_len ) return json . loads ( json_properties . decode ( "utf-8" ) ) return None
Read json properties from the zip file
4,592
def rewrite_zip ( file_path , properties ) : with open ( file_path , "r+b" ) as fp : local_files , dir_files , eocd = parse_zip ( fp ) if len ( dir_files ) == 2 and b"data.npy" in dir_files and b"metadata.json" in dir_files and dir_files [ b"data.npy" ] [ 1 ] == 0 : fp . seek ( dir_files [ b"metadata.json" ] [ 1 ] ) dir_data_list = list ( ) local_file_pos = dir_files [ b"data.npy" ] [ 1 ] local_file = local_files [ local_file_pos ] dir_data_list . append ( ( local_file_pos , b"data.npy" , local_file [ 2 ] , local_file [ 3 ] ) ) write_zip_fp ( fp , None , properties , dir_data_list ) else : data = None if b"data.npy" in dir_files : fp . seek ( local_files [ dir_files [ b"data.npy" ] [ 1 ] ] [ 1 ] ) data = numpy . load ( fp ) fp . seek ( 0 ) write_zip_fp ( fp , data , properties )
Rewrite the json properties in the zip file
4,593
def is_matching ( cls , file_path ) : if file_path . endswith ( ".ndata" ) and os . path . exists ( file_path ) : try : with open ( file_path , "r+b" ) as fp : local_files , dir_files , eocd = parse_zip ( fp ) contains_data = b"data.npy" in dir_files contains_metadata = b"metadata.json" in dir_files file_count = contains_data + contains_metadata if len ( dir_files ) != file_count or file_count == 0 : return False return True except Exception as e : logging . error ( "Exception parsing ndata file: %s" , file_path ) logging . error ( str ( e ) ) return False
Return whether the given absolute file path is an ndata file .
4,594
def write_data ( self , data , file_datetime ) : with self . __lock : assert data is not None absolute_file_path = self . __file_path make_directory_if_needed ( os . path . dirname ( absolute_file_path ) ) properties = self . read_properties ( ) if os . path . exists ( absolute_file_path ) else dict ( ) write_zip ( absolute_file_path , data , properties ) tz_minutes = Utility . local_utcoffset_minutes ( file_datetime ) timestamp = calendar . timegm ( file_datetime . timetuple ( ) ) - tz_minutes * 60 os . utime ( absolute_file_path , ( time . time ( ) , timestamp ) )
Write data to the ndata file specified by reference .
4,595
def write_properties ( self , properties , file_datetime ) : with self . __lock : absolute_file_path = self . __file_path make_directory_if_needed ( os . path . dirname ( absolute_file_path ) ) exists = os . path . exists ( absolute_file_path ) if exists : rewrite_zip ( absolute_file_path , Utility . clean_dict ( properties ) ) else : write_zip ( absolute_file_path , None , Utility . clean_dict ( properties ) ) tz_minutes = Utility . local_utcoffset_minutes ( file_datetime ) timestamp = calendar . timegm ( file_datetime . timetuple ( ) ) - tz_minutes * 60 os . utime ( absolute_file_path , ( time . time ( ) , timestamp ) )
Write properties to the ndata file specified by reference .
4,596
def read_properties ( self ) : with self . __lock : absolute_file_path = self . __file_path with open ( absolute_file_path , "rb" ) as fp : local_files , dir_files , eocd = parse_zip ( fp ) properties = read_json ( fp , local_files , dir_files , b"metadata.json" ) return properties
Read properties from the ndata file reference
4,597
def read_data ( self ) : with self . __lock : absolute_file_path = self . __file_path with open ( absolute_file_path , "rb" ) as fp : local_files , dir_files , eocd = parse_zip ( fp ) return read_data ( fp , local_files , dir_files , b"data.npy" ) return None
Read data from the ndata file reference
4,598
def remove ( self ) : with self . __lock : absolute_file_path = self . __file_path if os . path . isfile ( absolute_file_path ) : os . remove ( absolute_file_path )
Remove the ndata file reference
4,599
def build_menu ( self , display_type_menu , document_controller , display_panel ) : dynamic_live_actions = list ( ) def switch_to_display_content ( display_panel_type ) : self . switch_to_display_content ( document_controller , display_panel , display_panel_type , display_panel . display_item ) empty_action = display_type_menu . add_menu_item ( _ ( "Clear Display Panel" ) , functools . partial ( switch_to_display_content , "empty-display-panel" ) ) display_type_menu . add_separator ( ) data_item_display_action = display_type_menu . add_menu_item ( _ ( "Display Item" ) , functools . partial ( switch_to_display_content , "data-display-panel" ) ) thumbnail_browser_action = display_type_menu . add_menu_item ( _ ( "Thumbnail Browser" ) , functools . partial ( switch_to_display_content , "thumbnail-browser-display-panel" ) ) grid_browser_action = display_type_menu . add_menu_item ( _ ( "Grid Browser" ) , functools . partial ( switch_to_display_content , "browser-display-panel" ) ) display_type_menu . add_separator ( ) display_panel_type = display_panel . display_panel_type empty_action . checked = display_panel_type == "empty" and display_panel . display_panel_controller is None data_item_display_action . checked = display_panel_type == "data_item" thumbnail_browser_action . checked = display_panel_type == "horizontal" grid_browser_action . checked = display_panel_type == "grid" dynamic_live_actions . append ( empty_action ) dynamic_live_actions . append ( data_item_display_action ) dynamic_live_actions . append ( thumbnail_browser_action ) dynamic_live_actions . append ( grid_browser_action ) for factory in self . __display_controller_factories . values ( ) : dynamic_live_actions . extend ( factory . build_menu ( display_type_menu , display_panel ) ) return dynamic_live_actions
Build the dynamic menu for the selected display panel .