idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
4,500
def point_probability ( self , threshold ) : point_prob = np . zeros ( self . data . shape [ 1 : ] ) for t in range ( self . data . shape [ 1 ] ) : point_prob [ t ] = np . where ( self . data [ : , t ] >= threshold , 1.0 , 0.0 ) . mean ( axis = 0 ) return EnsembleConsensus ( point_prob , "point_probability" , self . en...
Determine the probability of exceeding a threshold at a grid point based on the ensemble forecasts at that point .
4,501
def neighborhood_probability ( self , threshold , radius , sigmas = None ) : if sigmas is None : sigmas = [ 0 ] weights = disk ( radius ) filtered_prob = [ ] for sigma in sigmas : filtered_prob . append ( EnsembleConsensus ( np . zeros ( self . data . shape [ 1 : ] , dtype = np . float32 ) , "neighbor_prob_r_{0:d}_s_{1...
Hourly probability of exceeding a threshold based on model values within a specified radius of a point .
4,502
def period_max_neighborhood_probability ( self , threshold , radius , sigmas = None ) : if sigmas is None : sigmas = [ 0 ] weights = disk ( radius ) neighborhood_prob = np . zeros ( self . data . shape [ 2 : ] , dtype = np . float32 ) thresh_data = np . zeros ( self . data . shape [ 2 : ] , dtype = np . uint8 ) for m i...
Calculates the neighborhood probability of exceeding a threshold at any time over the period loaded .
4,503
def init_file ( self , filename , time_units = "seconds since 1970-01-01T00:00" ) : if os . access ( filename , os . R_OK ) : out_data = Dataset ( filename , "r+" ) else : out_data = Dataset ( filename , "w" ) if len ( self . data . shape ) == 2 : for d , dim in enumerate ( [ "y" , "x" ] ) : out_data . createDimension ...
Initializes netCDF file for writing
4,504
def write_to_file ( self , out_data ) : full_var_name = self . consensus_type + "_" + self . variable if "-hour" in self . consensus_type : if full_var_name not in out_data . variables . keys ( ) : var = out_data . createVariable ( full_var_name , "f4" , ( "y" , "x" ) , zlib = True , least_significant_digit = 3 , shuff...
Outputs data to a netCDF file . If the file does not exist it will be created . Otherwise additional variables are appended to the current file
4,505
def restore ( self , workspace_uuid ) : workspace = next ( ( workspace for workspace in self . document_model . workspaces if workspace . uuid == workspace_uuid ) , None ) if workspace is None : workspace = self . new_workspace ( ) self . _change_workspace ( workspace )
Restore the workspace to the given workspace_uuid .
4,506
def new_workspace ( self , name = None , layout = None , workspace_id = None , index = None ) -> WorkspaceLayout . WorkspaceLayout : workspace = WorkspaceLayout . WorkspaceLayout ( ) self . document_model . insert_workspace ( index if index is not None else len ( self . document_model . workspaces ) , workspace ) d = c...
Create a new workspace insert into document_model and return it .
4,507
def ensure_workspace ( self , name , layout , workspace_id ) : workspace = next ( ( workspace for workspace in self . document_model . workspaces if workspace . workspace_id == workspace_id ) , None ) if not workspace : workspace = self . new_workspace ( name = name , layout = layout , workspace_id = workspace_id ) sel...
Looks for a workspace with workspace_id .
4,508
def create_workspace ( self ) -> None : def create_clicked ( text ) : if text : command = Workspace . CreateWorkspaceCommand ( self , text ) command . perform ( ) self . document_controller . push_undo_command ( command ) self . pose_get_string_message_box ( caption = _ ( "Enter a name for the workspace" ) , text = _ (...
Pose a dialog to name and create a workspace .
4,509
def rename_workspace ( self ) -> None : def rename_clicked ( text ) : if len ( text ) > 0 : command = Workspace . RenameWorkspaceCommand ( self , text ) command . perform ( ) self . document_controller . push_undo_command ( command ) self . pose_get_string_message_box ( caption = _ ( "Enter new name for workspace" ) , ...
Pose a dialog to rename the workspace .
4,510
def remove_workspace ( self ) : def confirm_clicked ( ) : if len ( self . document_model . workspaces ) > 1 : command = Workspace . RemoveWorkspaceCommand ( self ) command . perform ( ) self . document_controller . push_undo_command ( command ) caption = _ ( "Remove workspace named '{0}'?" ) . format ( self . __workspa...
Pose a dialog to confirm removal then remove workspace .
4,511
def clone_workspace ( self ) -> None : def clone_clicked ( text ) : if text : command = Workspace . CloneWorkspaceCommand ( self , text ) command . perform ( ) self . document_controller . push_undo_command ( command ) self . pose_get_string_message_box ( caption = _ ( "Enter a name for the workspace" ) , text = self ....
Pose a dialog to name and clone a workspace .
4,512
def bootstrap ( score_objs , n_boot = 1000 ) : all_samples = np . random . choice ( score_objs , size = ( n_boot , len ( score_objs ) ) , replace = True ) return all_samples . sum ( axis = 1 )
Given a set of DistributedROC or DistributedReliability objects this function performs a bootstrap resampling of the objects and returns n_boot aggregations of them .
4,513
def update ( self , forecasts , observations ) : for t , threshold in enumerate ( self . thresholds ) : tp = np . count_nonzero ( ( forecasts >= threshold ) & ( observations >= self . obs_threshold ) ) fp = np . count_nonzero ( ( forecasts >= threshold ) & ( observations < self . obs_threshold ) ) fn = np . count_nonze...
Update the ROC curve with a set of forecasts and observations
4,514
def merge ( self , other_roc ) : if other_roc . thresholds . size == self . thresholds . size and np . all ( other_roc . thresholds == self . thresholds ) : self . contingency_tables += other_roc . contingency_tables else : print ( "Input table thresholds do not match." )
Ingest the values of another DistributedROC object into this one and update the statistics inplace .
4,515
def performance_curve ( self ) : pod = self . contingency_tables [ "TP" ] / ( self . contingency_tables [ "TP" ] + self . contingency_tables [ "FN" ] ) far = self . contingency_tables [ "FP" ] / ( self . contingency_tables [ "FP" ] + self . contingency_tables [ "TP" ] ) far [ ( self . contingency_tables [ "FP" ] + self...
Calculate the Probability of Detection and False Alarm Ratio in order to output a performance diagram .
4,516
def max_csi ( self ) : csi = self . contingency_tables [ "TP" ] / ( self . contingency_tables [ "TP" ] + self . contingency_tables [ "FN" ] + self . contingency_tables [ "FP" ] ) return csi . max ( )
Calculate the maximum Critical Success Index across all probability thresholds
4,517
def get_contingency_tables ( self ) : return np . array ( [ ContingencyTable ( * ct ) for ct in self . contingency_tables . values ] )
Create an Array of ContingencyTable objects for each probability threshold .
4,518
def from_str ( self , in_str ) : parts = in_str . split ( ";" ) for part in parts : var_name , value = part . split ( ":" ) if var_name == "Obs_Threshold" : self . obs_threshold = float ( value ) elif var_name == "Thresholds" : self . thresholds = np . array ( value . split ( ) , dtype = float ) self . contingency_tabl...
Read the DistributedROC string and parse the contingency table values from it .
4,519
def update ( self , forecasts , observations ) : for t , threshold in enumerate ( self . thresholds [ : - 1 ] ) : self . frequencies . loc [ t , "Positive_Freq" ] += np . count_nonzero ( ( threshold <= forecasts ) & ( forecasts < self . thresholds [ t + 1 ] ) & ( observations >= self . obs_threshold ) ) self . frequenc...
Update the statistics with a set of forecasts and observations .
4,520
def merge ( self , other_rel ) : if other_rel . thresholds . size == self . thresholds . size and np . all ( other_rel . thresholds == self . thresholds ) : self . frequencies += other_rel . frequencies else : print ( "Input table thresholds do not match." )
Ingest another DistributedReliability and add its contents to the current object .
4,521
def reliability_curve ( self ) : total = self . frequencies [ "Total_Freq" ] . sum ( ) curve = pd . DataFrame ( columns = [ "Bin_Start" , "Bin_End" , "Bin_Center" , "Positive_Relative_Freq" , "Total_Relative_Freq" ] ) curve [ "Bin_Start" ] = self . thresholds [ : - 1 ] curve [ "Bin_End" ] = self . thresholds [ 1 : ] cu...
Calculates the reliability diagram statistics . The key columns are Bin_Start and Positive_Relative_Freq
4,522
def brier_score ( self ) : reliability , resolution , uncertainty = self . brier_score_components ( ) return reliability - resolution + uncertainty
Calculate the Brier Score
4,523
def brier_skill_score ( self ) : reliability , resolution , uncertainty = self . brier_score_components ( ) return ( resolution - reliability ) / uncertainty
Calculate the Brier Skill Score
4,524
def update ( self , forecasts , observations ) : if len ( observations . shape ) == 1 : obs_cdfs = np . zeros ( ( observations . size , self . thresholds . size ) ) for o , observation in enumerate ( observations ) : obs_cdfs [ o , self . thresholds >= observation ] = 1 else : obs_cdfs = observations self . errors [ "F...
Update the statistics with forecasts and observations .
4,525
def crps ( self ) : return np . sum ( self . errors [ "F_2" ] . values - self . errors [ "F_O" ] . values * 2.0 + self . errors [ "O_2" ] . values ) / ( self . thresholds . size * self . num_forecasts )
Calculates the continuous ranked probability score .
4,526
def crps_climo ( self ) : o_bar = self . errors [ "O" ] . values / float ( self . num_forecasts ) crps_c = np . sum ( self . num_forecasts * ( o_bar ** 2 ) - o_bar * self . errors [ "O" ] . values * 2.0 + self . errors [ "O_2" ] . values ) / float ( self . thresholds . size * self . num_forecasts ) return crps_c
Calculate the climatological CRPS .
4,527
def crpss ( self ) : crps_f = self . crps ( ) crps_c = self . crps_climo ( ) return 1.0 - float ( crps_f ) / float ( crps_c )
Calculate the continous ranked probability skill score from existing data .
4,528
def has_metadata_value ( metadata_source , key : str ) -> bool : desc = session_key_map . get ( key ) if desc is not None : d = getattr ( metadata_source , "session_metadata" , dict ( ) ) for k in desc [ 'path' ] [ : - 1 ] : d = d . setdefault ( k , dict ( ) ) if d is not None else None if d is not None : return desc [...
Return whether the metadata value for the given key exists .
4,529
def delete_metadata_value ( metadata_source , key : str ) -> None : desc = session_key_map . get ( key ) if desc is not None : d0 = getattr ( metadata_source , "session_metadata" , dict ( ) ) d = d0 for k in desc [ 'path' ] [ : - 1 ] : d = d . setdefault ( k , dict ( ) ) if d is not None else None if d is not None and ...
Delete the metadata value for the given key .
4,530
def calculate_y_ticks ( self , plot_height ) : calibrated_data_min = self . calibrated_data_min calibrated_data_max = self . calibrated_data_max calibrated_data_range = calibrated_data_max - calibrated_data_min ticker = self . y_ticker y_ticks = list ( ) for tick_value , tick_label in zip ( ticker . values , ticker . l...
Calculate the y - axis items dependent on the plot height .
4,531
def calculate_x_ticks ( self , plot_width ) : x_calibration = self . x_calibration uncalibrated_data_left = self . __uncalibrated_left_channel uncalibrated_data_right = self . __uncalibrated_right_channel calibrated_data_left = x_calibration . convert_to_calibrated_value ( uncalibrated_data_left ) if x_calibration is n...
Calculate the x - axis items dependent on the plot width .
4,532
def size_to_content ( self ) : new_sizing = self . copy_sizing ( ) new_sizing . minimum_height = 0 new_sizing . maximum_height = 0 axes = self . __axes if axes and axes . is_valid : if axes . x_calibration and axes . x_calibration . units : new_sizing . minimum_height = self . font_size + 4 new_sizing . maximum_height ...
Size the canvas item to the proper height .
4,533
def size_to_content ( self , get_font_metrics_fn ) : new_sizing = self . copy_sizing ( ) new_sizing . minimum_width = 0 new_sizing . maximum_width = 0 axes = self . __axes if axes and axes . is_valid : font = "{0:d}px" . format ( self . font_size ) max_width = 0 y_range = axes . calibrated_data_max - axes . calibrated_...
Size the canvas item to the proper width the maximum of any label .
4,534
def size_to_content ( self ) : new_sizing = self . copy_sizing ( ) new_sizing . minimum_width = 0 new_sizing . maximum_width = 0 axes = self . __axes if axes and axes . is_valid : if axes . y_calibration and axes . y_calibration . units : new_sizing . minimum_width = self . font_size + 4 new_sizing . maximum_width = se...
Size the canvas item to the proper width .
4,535
def get_snippet_content ( snippet_name , ** format_kwargs ) : filename = snippet_name + '.snippet' snippet_file = os . path . join ( SNIPPETS_ROOT , filename ) if not os . path . isfile ( snippet_file ) : raise ValueError ( 'could not find snippet with name ' + filename ) ret = helpers . get_file_content ( snippet_file...
Load the content from a snippet file which exists in SNIPPETS_ROOT
4,536
def update_display_properties ( self , display_calibration_info , display_properties : typing . Mapping , display_layers : typing . Sequence [ typing . Mapping ] ) -> None : with self . __closing_lock : if self . __closed : return displayed_dimensional_scales = display_calibration_info . displayed_dimensional_scales di...
Update the display values . Called from display panel .
4,537
def __view_to_selected_graphics ( self , data_and_metadata : DataAndMetadata . DataAndMetadata ) -> None : all_graphics = self . __graphics graphics = [ graphic for graphic_index , graphic in enumerate ( all_graphics ) if self . __graphic_selection . contains ( graphic_index ) ] intervals = list ( ) for graphic in grap...
Change the view to encompass the selected graphic intervals .
4,538
def __update_cursor_info ( self ) : if not self . delegate : return if self . __mouse_in and self . __last_mouse : pos_1d = None axes = self . __axes line_graph_canvas_item = self . line_graph_canvas_item if axes and axes . is_valid and line_graph_canvas_item : mouse = self . map_to_canvas_item ( self . __last_mouse , ...
Map the mouse to the 1 - d position within the line graph .
4,539
def find_model_patch_tracks ( self ) : self . model_grid . load_data ( ) tracked_model_objects = [ ] model_objects = [ ] if self . model_grid . data is None : print ( "No model output found" ) return tracked_model_objects min_orig = self . model_ew . min_thresh max_orig = self . model_ew . max_thresh data_increment_ori...
Identify storms in gridded model output and extract uniform sized patches around the storm centers of mass .
4,540
def find_mrms_tracks ( self ) : obs_objects = [ ] tracked_obs_objects = [ ] if self . mrms_ew is not None : self . mrms_grid . load_data ( ) if len ( self . mrms_grid . data ) != len ( self . hours ) : print ( 'Less than 24 hours of observation data found' ) return tracked_obs_objects for h , hour in enumerate ( self ....
Identify objects from MRMS timesteps and link them together with object matching .
4,541
def match_tracks ( self , model_tracks , obs_tracks , unique_matches = True , closest_matches = False ) : if unique_matches : pairings = self . track_matcher . match_tracks ( model_tracks , obs_tracks , closest_matches = closest_matches ) else : pairings = self . track_matcher . neighbor_matches ( model_tracks , obs_tr...
Match forecast and observed tracks .
4,542
def match_hail_sizes ( model_tracks , obs_tracks , track_pairings ) : unpaired = list ( range ( len ( model_tracks ) ) ) for p , pair in enumerate ( track_pairings ) : model_track = model_tracks [ pair [ 0 ] ] unpaired . remove ( pair [ 0 ] ) obs_track = obs_tracks [ pair [ 1 ] ] obs_hail_sizes = np . array ( [ step [ ...
Given forecast and observed track pairings maximum hail sizes are associated with each paired forecast storm track timestep . If the duration of the forecast and observed tracks differ then interpolation is used for the intermediate timesteps .
4,543
def calc_track_errors ( model_tracks , obs_tracks , track_pairings ) : columns = [ 'obs_track_id' , 'translation_error_x' , 'translation_error_y' , 'start_time_difference' , 'end_time_difference' , ] track_errors = pd . DataFrame ( index = list ( range ( len ( model_tracks ) ) ) , columns = columns ) for p , pair in en...
Calculates spatial and temporal translation errors between matched forecast and observed tracks .
4,544
def __display_for_tree_node ( self , tree_node ) : keys = tree_node . keys if len ( keys ) == 1 : return "{0} ({1})" . format ( tree_node . keys [ - 1 ] , tree_node . count ) elif len ( keys ) == 2 : months = ( _ ( "January" ) , _ ( "February" ) , _ ( "March" ) , _ ( "April" ) , _ ( "May" ) , _ ( "June" ) , _ ( "July" ...
Return the text display for the given tree node . Based on number of keys associated with tree node .
4,545
def __insert_child ( self , parent_tree_node , index , tree_node ) : parent_item = self . __mapping [ id ( parent_tree_node ) ] self . item_model_controller . begin_insert ( index , index , parent_item . row , parent_item . id ) properties = { "display" : self . __display_for_tree_node ( tree_node ) , "tree_node" : tre...
Called from the root tree node when a new node is inserted into tree . This method creates properties to represent the node for display and inserts it into the item model controller .
4,546
def __remove_child ( self , parent_tree_node , index ) : parent_item = self . __mapping [ id ( parent_tree_node ) ] self . item_model_controller . begin_remove ( index , index , parent_item . row , parent_item . id ) child_item = parent_item . children [ index ] parent_item . remove_child ( child_item ) self . __mappin...
Called from the root tree node when a node is removed from the tree . This method removes it into the item model controller .
4,547
def update_all_nodes ( self ) : item_model_controller = self . item_model_controller if item_model_controller : if self . __node_counts_dirty : for item in self . __mapping . values ( ) : if "tree_node" in item . data : tree_node = item . data [ "tree_node" ] item . data [ "display" ] = self . __display_for_tree_node (...
Update all tree item displays if needed . Usually for count updates .
4,548
def date_browser_selection_changed ( self , selected_indexes ) : partial_date_filters = list ( ) for index , parent_row , parent_id in selected_indexes : item_model_controller = self . item_model_controller tree_node = item_model_controller . item_value ( "tree_node" , index , parent_id ) partial_date_filters . append ...
Called to handle selection changes in the tree widget .
4,549
def text_filter_changed ( self , text ) : text = text . strip ( ) if text else None if text is not None : self . __text_filter = ListModel . TextFilter ( "text_for_filter" , text ) else : self . __text_filter = None self . __update_filter ( )
Called to handle changes to the text filter .
4,550
def __update_filter ( self ) : filters = list ( ) if self . __date_filter : filters . append ( self . __date_filter ) if self . __text_filter : filters . append ( self . __text_filter ) self . document_controller . display_filter = ListModel . AndFilter ( filters )
Create a combined filter . Set the resulting filter into the document controller .
4,551
def __get_keys ( self ) : keys = list ( ) tree_node = self while tree_node is not None and tree_node . key is not None : keys . insert ( 0 , tree_node . key ) tree_node = tree_node . parent return keys
Return the keys associated with this node by adding its key and then adding parent keys recursively .
4,552
def label_storm_objects ( data , method , min_intensity , max_intensity , min_area = 1 , max_area = 100 , max_range = 1 , increment = 1 , gaussian_sd = 0 ) : if method . lower ( ) in [ "ew" , "watershed" ] : labeler = EnhancedWatershed ( min_intensity , increment , max_intensity , max_area , max_range ) else : labeler ...
From a 2D grid or time series of 2D grids this method labels storm objects with either the Enhanced Watershed or Hysteresis methods .
4,553
def extract_storm_objects ( label_grid , data , x_grid , y_grid , times , dx = 1 , dt = 1 , obj_buffer = 0 ) : storm_objects = [ ] if len ( label_grid . shape ) == 3 : ij_grid = np . indices ( label_grid . shape [ 1 : ] ) for t , time in enumerate ( times ) : storm_objects . append ( [ ] ) object_slices = list ( find_o...
After storms are labeled this method extracts the storm objects from the grid and places them into STObjects . The STObjects contain intensity location and shape information about each storm at each timestep .
4,554
def extract_storm_patches ( label_grid , data , x_grid , y_grid , times , dx = 1 , dt = 1 , patch_radius = 16 ) : storm_objects = [ ] if len ( label_grid . shape ) == 3 : ij_grid = np . indices ( label_grid . shape [ 1 : ] ) for t , time in enumerate ( times ) : storm_objects . append ( [ ] ) centers = list ( center_of...
After storms are labeled this method extracts boxes of equal size centered on each storm from the grid and places them into STObjects . The STObjects contain intensity location and shape information about each storm at each timestep .
4,555
def track_storms ( storm_objects , times , distance_components , distance_maxima , distance_weights , tracked_objects = None ) : obj_matcher = ObjectMatcher ( distance_components , distance_weights , distance_maxima ) if tracked_objects is None : tracked_objects = [ ] for t , time in enumerate ( times ) : past_time_obj...
Given the output of extract_storm_objects this method tracks storms through time and merges individual STObjects into a set of tracks .
4,556
def centroid_distance ( item_a , time_a , item_b , time_b , max_value ) : ax , ay = item_a . center_of_mass ( time_a ) bx , by = item_b . center_of_mass ( time_b ) return np . minimum ( np . sqrt ( ( ax - bx ) ** 2 + ( ay - by ) ** 2 ) , max_value ) / float ( max_value )
Euclidean distance between the centroids of item_a and item_b .
4,557
def shifted_centroid_distance ( item_a , time_a , item_b , time_b , max_value ) : ax , ay = item_a . center_of_mass ( time_a ) bx , by = item_b . center_of_mass ( time_b ) if time_a < time_b : bx = bx - item_b . u by = by - item_b . v else : ax = ax - item_a . u ay = ay - item_a . v return np . minimum ( np . sqrt ( ( ...
Centroid distance with motion corrections .
4,558
def closest_distance ( item_a , time_a , item_b , time_b , max_value ) : return np . minimum ( item_a . closest_distance ( time_a , item_b , time_b ) , max_value ) / float ( max_value )
Euclidean distance between the pixels in item_a and item_b closest to each other .
4,559
def ellipse_distance ( item_a , time_a , item_b , time_b , max_value ) : ts = np . array ( [ 0 , np . pi ] ) ell_a = item_a . get_ellipse_model ( time_a ) ell_b = item_b . get_ellipse_model ( time_b ) ends_a = ell_a . predict_xy ( ts ) ends_b = ell_b . predict_xy ( ts ) distances = np . sqrt ( ( ends_a [ : , 0 : 1 ] - ...
Calculate differences in the properties of ellipses fitted to each object .
4,560
def nonoverlap ( item_a , time_a , item_b , time_b , max_value ) : return np . minimum ( 1 - item_a . count_overlap ( time_a , item_b , time_b ) , max_value ) / float ( max_value )
Percentage of pixels in each object that do not overlap with the other object
4,561
def max_intensity ( item_a , time_a , item_b , time_b , max_value ) : intensity_a = item_a . max_intensity ( time_a ) intensity_b = item_b . max_intensity ( time_b ) diff = np . sqrt ( ( intensity_a - intensity_b ) ** 2 ) return np . minimum ( diff , max_value ) / float ( max_value )
RMS difference in maximum intensity
4,562
def area_difference ( item_a , time_a , item_b , time_b , max_value ) : size_a = item_a . size ( time_a ) size_b = item_b . size ( time_b ) diff = np . sqrt ( ( size_a - size_b ) ** 2 ) return np . minimum ( diff , max_value ) / float ( max_value )
RMS Difference in object areas .
4,563
def mean_minimum_centroid_distance ( item_a , item_b , max_value ) : centroids_a = np . array ( [ item_a . center_of_mass ( t ) for t in item_a . times ] ) centroids_b = np . array ( [ item_b . center_of_mass ( t ) for t in item_b . times ] ) distance_matrix = ( centroids_a [ : , 0 : 1 ] - centroids_b . T [ 0 : 1 ] ) *...
RMS difference in the minimum distances from the centroids of one track to the centroids of another track
4,564
def mean_min_time_distance ( item_a , item_b , max_value ) : times_a = item_a . times . reshape ( ( item_a . times . size , 1 ) ) times_b = item_b . times . reshape ( ( 1 , item_b . times . size ) ) distance_matrix = ( times_a - times_b ) ** 2 mean_min_distances = np . sqrt ( distance_matrix . min ( axis = 0 ) . mean (...
Calculate the mean time difference among the time steps in each object .
4,565
def start_centroid_distance ( item_a , item_b , max_value ) : start_a = item_a . center_of_mass ( item_a . times [ 0 ] ) start_b = item_b . center_of_mass ( item_b . times [ 0 ] ) start_distance = np . sqrt ( ( start_a [ 0 ] - start_b [ 0 ] ) ** 2 + ( start_a [ 1 ] - start_b [ 1 ] ) ** 2 ) return np . minimum ( start_d...
Distance between the centroids of the first step in each object .
4,566
def start_time_distance ( item_a , item_b , max_value ) : start_time_diff = np . abs ( item_a . times [ 0 ] - item_b . times [ 0 ] ) return np . minimum ( start_time_diff , max_value ) / float ( max_value )
Absolute difference between the starting times of each item .
4,567
def duration_distance ( item_a , item_b , max_value ) : duration_a = item_a . times . size duration_b = item_b . times . size return np . minimum ( np . abs ( duration_a - duration_b ) , max_value ) / float ( max_value )
Absolute difference in the duration of two items
4,568
def mean_area_distance ( item_a , item_b , max_value ) : mean_area_a = np . mean ( [ item_a . size ( t ) for t in item_a . times ] ) mean_area_b = np . mean ( [ item_b . size ( t ) for t in item_b . times ] ) return np . abs ( mean_area_a - mean_area_b ) / float ( max_value )
Absolute difference in the means of the areas of each track over time .
4,569
def match_objects ( self , set_a , set_b , time_a , time_b ) : costs = self . cost_matrix ( set_a , set_b , time_a , time_b ) * 100 min_row_costs = costs . min ( axis = 1 ) min_col_costs = costs . min ( axis = 0 ) good_rows = np . where ( min_row_costs < 100 ) [ 0 ] good_cols = np . where ( min_col_costs < 100 ) [ 0 ] ...
Match two sets of objects at particular times .
4,570
def total_cost_function ( self , item_a , item_b , time_a , time_b ) : distances = np . zeros ( len ( self . weights ) ) for c , component in enumerate ( self . cost_function_components ) : distances [ c ] = component ( item_a , time_a , item_b , time_b , self . max_values [ c ] ) total_distance = np . sum ( self . wei...
Calculate total cost function between two items .
4,571
def variable_specifier ( self ) -> dict : if self . value_type is not None : return { "type" : "variable" , "version" : 1 , "uuid" : str ( self . uuid ) , "x-name" : self . name , "x-value" : self . value } else : return self . specifier
Return the variable specifier for this variable .
4,572
def bound_variable ( self ) : class BoundVariable : def __init__ ( self , variable ) : self . __variable = variable self . changed_event = Event . Event ( ) self . needs_rebind_event = Event . Event ( ) def property_changed ( key ) : if key == "value" : self . changed_event . fire ( ) self . __variable_property_changed...
Return an object with a value property and a changed_event .
4,573
def resolve_object_specifier ( self , object_specifier , secondary_specifier = None , property_name = None , objects_model = None ) : variable = self . __computation ( ) . resolve_variable ( object_specifier ) if not variable : return self . __context . resolve_object_specifier ( object_specifier , secondary_specifier ...
Resolve the object specifier .
4,574
def parse_names ( cls , expression ) : names = set ( ) try : ast_node = ast . parse ( expression , "ast" ) class Visitor ( ast . NodeVisitor ) : def visit_Name ( self , node ) : names . add ( node . id ) Visitor ( ) . visit ( ast_node ) except Exception : pass return names
Return the list of identifiers used in the expression .
4,575
def bind ( self , context ) -> None : self . __computation_context = ComputationContext ( self , context ) for variable in self . variables : assert variable . bound_item is None for result in self . results : assert result . bound_item is None for variable in self . variables : self . __bind_variable ( variable ) for ...
Bind a context to this computation .
4,576
def unbind ( self ) : for variable in self . variables : self . __unbind_variable ( variable ) for result in self . results : self . __unbind_result ( result )
Unlisten and close each bound item .
4,577
def sort_by_date_key ( data_item ) : return data_item . title + str ( data_item . uuid ) if data_item . is_live else str ( ) , data_item . date_for_sorting , str ( data_item . uuid )
A sort key to for the created field of a data item . The sort by uuid makes it determinate .
4,578
def set_r_value ( self , r_var : str , * , notify_changed = True ) -> None : self . r_var = r_var self . _description_changed ( ) if notify_changed : self . __notify_description_changed ( )
Used to signal changes to the ref var which are kept in document controller . ugh .
4,579
def set_data_and_metadata ( self , data_and_metadata , data_modified = None ) : self . increment_data_ref_count ( ) try : if data_and_metadata : data = data_and_metadata . data data_shape_and_dtype = data_and_metadata . data_shape_and_dtype intensity_calibration = data_and_metadata . intensity_calibration dimensional_c...
Sets the underlying data and data - metadata to the data_and_metadata .
4,580
def get_calculated_display_values ( self , immediate : bool = False ) -> DisplayValues : if not immediate or not self . __is_master or not self . __last_display_values : if not self . __current_display_values and self . __data_item : self . __current_display_values = DisplayValues ( self . __data_item . xdata , self . ...
Return the display values .
4,581
def auto_display_limits ( self ) : display_data_and_metadata = self . get_calculated_display_values ( True ) . display_data_and_metadata data = display_data_and_metadata . data if display_data_and_metadata else None if data is not None : mn , mx = numpy . nanmin ( data ) , numpy . nanmax ( data ) self . display_limits ...
Calculate best display limits and set them .
4,582
def remove_graphic ( self , graphic : Graphics . Graphic , * , safe : bool = False ) -> typing . Optional [ typing . Sequence ] : return self . remove_model_item ( self , "graphics" , graphic , safe = safe )
Remove a graphic but do it through the container so dependencies can be tracked .
4,583
def dimensional_shape ( self ) -> typing . Optional [ typing . Tuple [ int , ... ] ] : if not self . __data_and_metadata : return None return self . __data_and_metadata . dimensional_shape
Shape of the underlying data if only one .
4,584
def write_local_file ( fp , name_bytes , writer , dt ) : fp . write ( struct . pack ( 'I' , 0x04034b50 ) ) fp . write ( struct . pack ( 'H' , 10 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) msdos_date = int ( dt . year - 1980 ) << 9 | int ( dt . month ) << 5 | int ( dt . day ) m...
Writes a zip file local file header structure at the current file position .
4,585
def write_directory_data ( fp , offset , name_bytes , data_len , crc32 , dt ) : fp . write ( struct . pack ( 'I' , 0x02014b50 ) ) fp . write ( struct . pack ( 'H' , 10 ) ) fp . write ( struct . pack ( 'H' , 10 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) msdos_date = int ( dt . ...
Write a zip fie directory entry at the current file position
4,586
def write_end_of_directory ( fp , dir_size , dir_offset , count ) : fp . write ( struct . pack ( 'I' , 0x06054b50 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) fp . write ( struct . pack ( 'H' , 0 ) ) fp . write ( struct . pack ( 'H' , count ) ) fp . write ( struct . pack ( 'H' , count ) ) fp . write ( struct . pack ( '...
Write zip file end of directory header at the current file position
4,587
def write_zip_fp ( fp , data , properties , dir_data_list = None ) : assert data is not None or properties is not None dir_data_list = list ( ) if dir_data_list is None else dir_data_list dt = datetime . datetime . now ( ) if data is not None : offset_data = fp . tell ( ) def write_data ( fp ) : numpy_start_pos = fp . ...
Write custom zip file of data and properties to fp
4,588
def write_zip ( file_path , data , properties ) : with open ( file_path , "w+b" ) as fp : write_zip_fp ( fp , data , properties )
Write custom zip file to the file path
4,589
def parse_zip ( fp ) : local_files = { } dir_files = { } eocd = None fp . seek ( 0 ) while True : pos = fp . tell ( ) signature = struct . unpack ( 'I' , fp . read ( 4 ) ) [ 0 ] if signature == 0x04034b50 : fp . seek ( pos + 14 ) crc32 = struct . unpack ( 'I' , fp . read ( 4 ) ) [ 0 ] fp . seek ( pos + 18 ) data_len = ...
Parse the zip file headers at fp
4,590
def read_data ( fp , local_files , dir_files , name_bytes ) : if name_bytes in dir_files : fp . seek ( local_files [ dir_files [ name_bytes ] [ 1 ] ] [ 1 ] ) return numpy . load ( fp ) return None
Read a numpy data array from the zip file
4,591
def read_json ( fp , local_files , dir_files , name_bytes ) : if name_bytes in dir_files : json_pos = local_files [ dir_files [ name_bytes ] [ 1 ] ] [ 1 ] json_len = local_files [ dir_files [ name_bytes ] [ 1 ] ] [ 2 ] fp . seek ( json_pos ) json_properties = fp . read ( json_len ) return json . loads ( json_properties...
Read json properties from the zip file
4,592
def rewrite_zip ( file_path , properties ) : with open ( file_path , "r+b" ) as fp : local_files , dir_files , eocd = parse_zip ( fp ) if len ( dir_files ) == 2 and b"data.npy" in dir_files and b"metadata.json" in dir_files and dir_files [ b"data.npy" ] [ 1 ] == 0 : fp . seek ( dir_files [ b"metadata.json" ] [ 1 ] ) di...
Rewrite the json properties in the zip file
4,593
def is_matching ( cls , file_path ) : if file_path . endswith ( ".ndata" ) and os . path . exists ( file_path ) : try : with open ( file_path , "r+b" ) as fp : local_files , dir_files , eocd = parse_zip ( fp ) contains_data = b"data.npy" in dir_files contains_metadata = b"metadata.json" in dir_files file_count = contai...
Return whether the given absolute file path is an ndata file .
4,594
def write_data ( self , data , file_datetime ) : with self . __lock : assert data is not None absolute_file_path = self . __file_path make_directory_if_needed ( os . path . dirname ( absolute_file_path ) ) properties = self . read_properties ( ) if os . path . exists ( absolute_file_path ) else dict ( ) write_zip ( abs...
Write data to the ndata file specified by reference .
4,595
def write_properties ( self , properties , file_datetime ) : with self . __lock : absolute_file_path = self . __file_path make_directory_if_needed ( os . path . dirname ( absolute_file_path ) ) exists = os . path . exists ( absolute_file_path ) if exists : rewrite_zip ( absolute_file_path , Utility . clean_dict ( prope...
Write properties to the ndata file specified by reference .
4,596
def read_properties ( self ) : with self . __lock : absolute_file_path = self . __file_path with open ( absolute_file_path , "rb" ) as fp : local_files , dir_files , eocd = parse_zip ( fp ) properties = read_json ( fp , local_files , dir_files , b"metadata.json" ) return properties
Read properties from the ndata file reference
4,597
def read_data ( self ) : with self . __lock : absolute_file_path = self . __file_path with open ( absolute_file_path , "rb" ) as fp : local_files , dir_files , eocd = parse_zip ( fp ) return read_data ( fp , local_files , dir_files , b"data.npy" ) return None
Read data from the ndata file reference
4,598
def remove ( self ) : with self . __lock : absolute_file_path = self . __file_path if os . path . isfile ( absolute_file_path ) : os . remove ( absolute_file_path )
Remove the ndata file reference
4,599
def build_menu ( self , display_type_menu , document_controller , display_panel ) : dynamic_live_actions = list ( ) def switch_to_display_content ( display_panel_type ) : self . switch_to_display_content ( document_controller , display_panel , display_panel_type , display_panel . display_item ) empty_action = display_t...
Build the dynamic menu for the selected display panel .