idx
int64 0
63k
| question
stringlengths 53
5.28k
| target
stringlengths 5
805
|
|---|---|---|
4,600
|
def _set_subset_indices ( self , y_min , y_max , x_min , x_max ) : y_coords , x_coords = self . xd . lsm . coords dx = self . xd . lsm . dx dy = self . xd . lsm . dy lsm_y_indices_from_y , lsm_x_indices_from_y = np . where ( ( y_coords >= ( y_min - 2 * dy ) ) & ( y_coords <= ( y_max + 2 * dy ) ) ) lsm_y_indices_from_x , lsm_x_indices_from_x = np . where ( ( x_coords >= ( x_min - 2 * dx ) ) & ( x_coords <= ( x_max + 2 * dx ) ) ) lsm_y_indices = np . intersect1d ( lsm_y_indices_from_y , lsm_y_indices_from_x ) lsm_x_indices = np . intersect1d ( lsm_x_indices_from_y , lsm_x_indices_from_x ) self . xslice = slice ( np . amin ( lsm_x_indices ) , np . amax ( lsm_x_indices ) + 1 ) self . yslice = slice ( np . amin ( lsm_y_indices ) , np . amax ( lsm_y_indices ) + 1 )
|
load subset based on extent
|
4,601
|
def _time_to_string ( self , dt , conversion_string = "%Y %m %d %H %M" ) : if self . output_timezone is not None : dt = dt . replace ( tzinfo = utc ) . astimezone ( self . output_timezone ) return dt . strftime ( conversion_string )
|
This converts a UTC time integer to a string
|
4,602
|
def _load_lsm_data ( self , data_var , conversion_factor = 1 , calc_4d_method = None , calc_4d_dim = None , time_step = None ) : data = self . xd . lsm . getvar ( data_var , yslice = self . yslice , xslice = self . xslice , calc_4d_method = calc_4d_method , calc_4d_dim = calc_4d_dim ) if isinstance ( time_step , datetime ) : data = data . loc [ { self . lsm_time_dim : [ pd . to_datetime ( time_step ) ] } ] elif time_step is not None : data = data [ { self . lsm_time_dim : [ time_step ] } ] data = data . fillna ( 0 ) data . values *= conversion_factor return data
|
This extracts the LSM data from a folder of netcdf files
|
4,603
|
def _check_lsm_input ( self , data_var_map_array ) : REQUIRED_HMET_VAR_LIST = [ 'Prcp' , 'Pres' , 'Temp' , 'Clod' , 'RlHm' , 'Drad' , 'Grad' , 'WndS' ] given_hmet_var_list = [ ] for gssha_data_var , lsm_data_var in data_var_map_array : gssha_data_hmet_name = self . netcdf_attributes [ gssha_data_var ] [ 'hmet_name' ] if gssha_data_hmet_name in given_hmet_var_list : raise ValueError ( "Duplicate parameter for HMET variable {0}" . format ( gssha_data_hmet_name ) ) else : given_hmet_var_list . append ( gssha_data_hmet_name ) for REQUIRED_HMET_VAR in REQUIRED_HMET_VAR_LIST : if REQUIRED_HMET_VAR not in given_hmet_var_list : raise ValueError ( "ERROR: HMET param is required to continue " "{0} ..." . format ( REQUIRED_HMET_VAR ) )
|
This function checks the input var map array to ensure the required input variables exist
|
4,604
|
def _resample_data ( self , gssha_var ) : self . data = self . data . lsm . resample ( gssha_var , self . gssha_grid )
|
This function resamples the data to match the GSSHA grid IN TESTING MODE
|
4,605
|
def _convert_data_to_hourly ( self , gssha_data_var ) : time_step_hours = np . diff ( self . data . time ) [ 0 ] / np . timedelta64 ( 1 , 'h' ) calc_function = self . _get_calc_function ( gssha_data_var ) resampled_data = None if time_step_hours < 1 : resampled_data = self . data . resample ( '1H' , dim = 'time' , how = calc_function , keep_attrs = True ) elif time_step_hours > 1 : resampled_data = self . data . resample ( '1H' , dim = 'time' , keep_attrs = True ) for time_idx in range ( self . data . dims [ 'time' ] ) : if time_idx + 1 < self . data . dims [ 'time' ] : start_time = self . data . time [ time_idx ] . values end_time = self . data . time [ time_idx + 1 ] . values slope_timeslice = slice ( str ( start_time ) , str ( end_time ) ) slice_size = resampled_data . sel ( time = slope_timeslice ) . dims [ 'time' ] - 1 first_timestep = resampled_data . sel ( time = str ( start_time ) ) [ gssha_data_var ] slope = ( resampled_data . sel ( time = str ( end_time ) ) [ gssha_data_var ] - first_timestep ) / float ( slice_size ) data_timeslice = slice ( str ( start_time + np . timedelta64 ( 1 , 'm' ) ) , str ( end_time - np . timedelta64 ( 1 , 'm' ) ) ) data_subset = resampled_data . sel ( time = data_timeslice ) for xidx in range ( data_subset . dims [ 'time' ] ) : data_subset [ gssha_data_var ] [ xidx ] = first_timestep + slope * ( xidx + 1 ) else : start_time = self . data . time [ time_idx ] . values end_time = resampled_data . time [ - 1 ] . values if end_time > start_time : first_timestep = resampled_data . sel ( time = str ( start_time ) ) [ gssha_data_var ] data_timeslice = slice ( str ( start_time ) , str ( end_time ) ) data_subset = resampled_data . sel ( time = data_timeslice ) slice_size = 1 if calc_function == "mean" : slice_size = data_subset . dims [ 'time' ] for xidx in range ( data_subset . dims [ 'time' ] ) : data_subset [ gssha_data_var ] [ xidx ] = first_timestep / float ( slice_size ) if resampled_data is not None : if self . data . lsm . x_var not in resampled_data . coords : resampled_data . coords [ self . data . lsm . x_var ] = self . data . coords [ self . data . lsm . x_var ] if self . data . lsm . y_var not in resampled_data . coords : resampled_data . coords [ self . data . lsm . y_var ] = self . data . coords [ self . data . lsm . y_var ] self . data = resampled_data
|
This function converts the data to hourly data and then puts it into the data_np_array USED WHEN GENERATING HMET DATA ONLY
|
4,606
|
def lsm_var_to_grid ( self , out_grid_file , lsm_data_var , gssha_convert_var , time_step = 0 , ascii_format = 'grass' ) : self . _load_converted_gssha_data_from_lsm ( gssha_convert_var , lsm_data_var , 'grid' , time_step ) gssha_data_var_name = self . netcdf_attributes [ gssha_convert_var ] [ 'gssha_name' ] self . data = self . data . lsm . to_projection ( gssha_data_var_name , projection = self . gssha_grid . projection ) self . _resample_data ( gssha_data_var_name ) arr_grid = ArrayGrid ( in_array = self . data [ gssha_data_var_name ] . values , wkt_projection = self . data . lsm . projection . ExportToWkt ( ) , geotransform = self . data . lsm . geotransform ) if ascii_format . strip ( ) . lower ( ) == 'grass' : arr_grid . to_grass_ascii ( out_grid_file ) elif ascii_format . strip ( ) . lower ( ) == 'arc' : arr_grid . to_arc_ascii ( out_grid_file ) else : raise ValueError ( "Invalid argument for 'ascii_format'. Only 'grass' or 'arc' allowed." )
|
This function takes array data and writes out a GSSHA ascii grid .
|
4,607
|
def _write_hmet_card_file ( self , hmet_card_file_path , main_output_folder ) : with io_open ( hmet_card_file_path , 'w' ) as out_hmet_list_file : for hour_time in self . data . lsm . datetime : date_str = self . _time_to_string ( hour_time , "%Y%m%d%H" ) out_hmet_list_file . write ( u"{0}\n" . format ( path . join ( main_output_folder , date_str ) ) )
|
This function writes the HMET_ASCII card file with ASCII file list for input to GSSHA
|
4,608
|
def lsm_data_to_arc_ascii ( self , data_var_map_array , main_output_folder = "" ) : self . _check_lsm_input ( data_var_map_array ) if not main_output_folder : main_output_folder = path . join ( self . gssha_project_folder , "hmet_ascii_data" ) try : mkdir ( main_output_folder ) except OSError : pass log . info ( "Outputting HMET data to {0}" . format ( main_output_folder ) ) for data_var_map in data_var_map_array : gssha_data_var , lsm_data_var = data_var_map gssha_data_hmet_name = self . netcdf_attributes [ gssha_data_var ] [ 'hmet_name' ] gssha_data_var_name = self . netcdf_attributes [ gssha_data_var ] [ 'gssha_name' ] self . _load_converted_gssha_data_from_lsm ( gssha_data_var , lsm_data_var , 'ascii' ) self . _convert_data_to_hourly ( gssha_data_var_name ) self . data = self . data . lsm . to_projection ( gssha_data_var_name , projection = self . gssha_grid . projection ) for time_idx in range ( self . data . dims [ 'time' ] ) : arr_grid = ArrayGrid ( in_array = self . data [ gssha_data_var_name ] [ time_idx ] . values , wkt_projection = self . data . lsm . projection . ExportToWkt ( ) , geotransform = self . data . lsm . geotransform , nodata_value = - 9999 ) date_str = self . _time_to_string ( self . data . lsm . datetime [ time_idx ] , "%Y%m%d%H" ) ascii_file_path = path . join ( main_output_folder , "{0}_{1}.asc" . format ( date_str , gssha_data_hmet_name ) ) arr_grid . to_arc_ascii ( ascii_file_path ) hmet_card_file_path = path . join ( main_output_folder , 'hmet_file_list.txt' ) self . _write_hmet_card_file ( hmet_card_file_path , main_output_folder )
|
Writes extracted data to Arc ASCII file format into folder to be read in by GSSHA . Also generates the HMET_ASCII card file for GSSHA in the folder named hmet_file_list . txt .
|
4,609
|
def lsm_data_to_subset_netcdf ( self , netcdf_file_path , data_var_map_array , resample_method = None ) : self . _check_lsm_input ( data_var_map_array ) output_datasets = [ ] for gssha_var , lsm_var in data_var_map_array : if gssha_var in self . netcdf_attributes : self . _load_converted_gssha_data_from_lsm ( gssha_var , lsm_var , 'netcdf' ) gssha_data_var_name = self . netcdf_attributes [ gssha_var ] [ 'gssha_name' ] self . _convert_data_to_hourly ( gssha_data_var_name ) if resample_method : self . _resample_data ( gssha_data_var_name ) else : self . data = self . data . lsm . to_projection ( gssha_data_var_name , projection = self . gssha_grid . projection ) output_datasets . append ( self . data ) else : raise ValueError ( "Invalid GSSHA variable name: {0} ..." . format ( gssha_var ) ) output_dataset = xr . merge ( output_datasets ) output_dataset . attrs [ 'Convention' ] = 'CF-1.6' output_dataset . attrs [ 'title' ] = 'GSSHA LSM Input' output_dataset . attrs [ 'history' ] = 'date_created: {0}' . format ( datetime . utcnow ( ) ) output_dataset . attrs [ 'proj4' ] = self . data . attrs [ 'proj4' ] output_dataset . attrs [ 'geotransform' ] = self . data . attrs [ 'geotransform' ] output_dataset . to_netcdf ( netcdf_file_path )
|
Writes extracted data to the NetCDF file format
|
4,610
|
def export ( self , ** kwargs ) : query_params = { "_actions" : "false" , "_links" : "true" , "_embedded" : "true" } path_params = { } headers = { } body = None if "applicationId" in kwargs : path_params [ "applicationId" ] = kwargs [ "applicationId" ] if "query" in kwargs : body = kwargs [ "query" ] if "losantdomain" in kwargs : headers [ "losantdomain" ] = kwargs [ "losantdomain" ] if "_actions" in kwargs : query_params [ "_actions" ] = kwargs [ "_actions" ] if "_links" in kwargs : query_params [ "_links" ] = kwargs [ "_links" ] if "_embedded" in kwargs : query_params [ "_embedded" ] = kwargs [ "_embedded" ] path = "/applications/{applicationId}/data/export" . format ( ** path_params ) return self . client . request ( "POST" , path , params = query_params , headers = headers , body = body )
|
Creates a csv file from a query of devices and attributes over a time range .
|
4,611
|
def _generate ( self , message ) : raw_params = { "INPUT_TEXT" : message . encode ( 'UTF8' ) , "INPUT_TYPE" : self . input_type , "OUTPUT_TYPE" : self . output_type , "LOCALE" : self . _locale , "AUDIO" : self . audio , "VOICE" : self . _voice , } params = urlencode ( raw_params ) headers = { } logging . debug ( 'maryclient: generate, raw_params=%s' % repr ( raw_params ) ) conn = httplib . HTTPConnection ( self . _host , self . _port ) conn . request ( "POST" , "/process" , params , headers ) response = conn . getresponse ( ) if response . status != 200 : logging . error ( response . getheaders ( ) ) raise Exception ( "{0}: {1}" . format ( response . status , response . reason ) ) return response . read ( )
|
Given a message in message return a response in the appropriate format .
|
4,612
|
def receive ( self , msg ) : if msg [ TYPE ] == TELL and msg [ METHOD ] == 'stop' : self . running = False self . future_manager . stop ( ) else : result = None try : invoke = getattr ( self . _obj , msg [ METHOD ] ) params = msg [ PARAMS ] result = invoke ( * params [ 0 ] , ** params [ 1 ] ) except Exception , e : if msg [ TYPE ] == TELL : print e return result = e self . send_response ( result , msg )
|
The message received from the queue specify a method of the class the actor represents . This invokes it . If the communication is an ASK sends the result back to the channel included in the message as an ASKRESPONSE .
|
4,613
|
def download_file ( url ) : response = requests . get ( url ) if response . status_code is not 200 : return None return response . text
|
Downloads a file from the specified URL .
|
4,614
|
def get_sub_dsp ( self , nodes_bunch , edges_bunch = None ) : nodes_bunch = [ self . get_node ( u ) [ 1 ] [ 0 ] for u in nodes_bunch ] sub_dsp = self . copy_structure ( dmap = self . dmap . subgraph ( nodes_bunch ) . copy ( ) ) nodes , dmap_out_degree = sub_dsp . nodes , sub_dsp . dmap . out_degree dmap_dv , dmap_rm_edge = self . default_values , sub_dsp . dmap . remove_edge dmap_rm_node = sub_dsp . dmap . remove_node for u in nodes_bunch : n = nodes [ u ] . get ( 'inputs' , None ) if n is not None and not set ( n ) . issubset ( nodes_bunch ) : dmap_rm_node ( u ) if edges_bunch is not None : for e in edges_bunch : dmap_rm_edge ( * e ) for u in [ u for u , n in sub_dsp . dmap . nodes . items ( ) if n [ 'type' ] == 'function' ] : if not dmap_out_degree ( u ) : dmap_rm_node ( u ) from networkx import isolates sub_dsp . dmap . remove_nodes_from ( list ( isolates ( sub_dsp . dmap ) ) ) sub_dsp . default_values = { k : dmap_dv [ k ] for k in dmap_dv if k in nodes } return sub_dsp
|
Returns the sub - dispatcher induced by given node and edge bunches .
|
4,615
|
def data_nodes ( self ) : return { k : v for k , v in self . nodes . items ( ) if v [ 'type' ] == 'data' }
|
Returns all data nodes of the dispatcher .
|
4,616
|
def function_nodes ( self ) : return { k : v for k , v in self . nodes . items ( ) if v [ 'type' ] == 'function' }
|
Returns all function nodes of the dispatcher .
|
4,617
|
def sub_dsp_nodes ( self ) : return { k : v for k , v in self . nodes . items ( ) if v [ 'type' ] == 'dispatcher' }
|
Returns all sub - dispatcher nodes of the dispatcher .
|
4,618
|
def blue ( self , memo = None ) : memo = { } if memo is None else memo if self in memo : return memo [ self ] from . utils . dsp import map_list from . utils . blue import BlueDispatcher , _parent_blue memo [ self ] = blue = BlueDispatcher ( executor = self . executor , name = self . name , raises = self . raises , description = self . __doc__ ) dfl = self . default_values key_map_data = [ 'data_id' , { 'value' : 'default_value' } ] pred , succ = self . dmap . pred , self . dmap . succ def _set_weight ( n , r , d ) : d = { i : j [ 'weight' ] for i , j in d . items ( ) if 'weight' in j } if d : r [ n ] = d for k , v in sorted ( self . nodes . items ( ) , key = lambda x : x [ 1 ] [ 'index' ] ) : v = v . copy ( ) t = v . pop ( 'type' ) del v [ 'index' ] if t == 'data' : method = 'add_data' combine_dicts ( map_list ( key_map_data , k , dfl . get ( k , { } ) ) , base = v ) elif t in ( 'function' , 'dispatcher' ) : method = 'add_%s' % t if t == 'dispatcher' : t = 'dsp' v [ '%s_id' % t ] = k del v [ 'wait_inputs' ] _set_weight ( 'inp_weight' , v , pred [ k ] ) _set_weight ( 'out_weight' , v , succ [ k ] ) if 'function' in v : v [ t ] = _parent_blue ( v . pop ( 'function' ) , memo ) blue . deferred . append ( ( method , v ) ) return blue
|
Constructs a BlueDispatcher out of the current object .
|
4,619
|
def extend ( self , * blues , memo = None ) : from . utils . blue import BlueDispatcher as Blue return Blue ( ) . extend ( * blues , memo = memo ) . register ( self , memo = memo )
|
Extends Dispatcher calling each deferred operation of given Blueprints .
|
4,620
|
def dispatch ( self , inputs = None , outputs = None , cutoff = None , inputs_dist = None , wildcard = False , no_call = False , shrink = False , rm_unused_nds = False , select_output_kw = None , _wait_in = None , stopper = None , executor = False , sol_name = ( ) ) : dsp = self if not no_call : if shrink : dsp = self . shrink_dsp ( inputs , outputs , cutoff , inputs_dist , wildcard ) elif outputs : dsp = self . get_sub_dsp_from_workflow ( outputs , self . dmap , reverse = True , blockers = inputs , wildcard = wildcard ) self . solution = sol = self . solution . __class__ ( dsp , inputs , outputs , wildcard , cutoff , inputs_dist , no_call , rm_unused_nds , _wait_in , full_name = sol_name ) sol . _run ( stopper = stopper , executor = executor ) if select_output_kw : return selector ( dictionary = sol , ** select_output_kw ) return sol
|
Evaluates the minimum workflow and data outputs of the dispatcher model from given inputs .
|
4,621
|
def shrink_dsp ( self , inputs = None , outputs = None , cutoff = None , inputs_dist = None , wildcard = True ) : bfs = None if inputs : wait_in = self . _get_wait_in ( flag = False ) o = self . dispatch ( inputs , outputs , cutoff , inputs_dist , wildcard , True , False , True , _wait_in = wait_in ) data_nodes = self . data_nodes from . utils . alg import _union_workflow , _convert_bfs bfs = _union_workflow ( o ) if inputs_dist : inputs_dist = combine_dicts ( o . dist , inputs_dist ) else : inputs_dist = o . dist wait_in = self . _get_wait_in ( flag = True ) while True : o = self . dispatch ( inputs , outputs , cutoff , inputs_dist , wildcard , True , False , False , _wait_in = wait_in ) _union_workflow ( o , bfs = bfs ) n_d , status = o . _remove_wait_in ( ) if not status : break inputs = n_d . intersection ( data_nodes ) . union ( inputs ) outputs , bfs = outputs or o , _convert_bfs ( bfs ) elif not outputs : return self . copy_structure ( ) dsp = self . _get_dsp_from_bfs ( outputs , bfs_graphs = bfs ) return dsp
|
Returns a reduced dispatcher .
|
4,622
|
def _get_dsp_from_bfs ( self , outputs , bfs_graphs = None ) : bfs = bfs_graphs [ NONE ] if bfs_graphs is not None else self . dmap dsp = self . get_sub_dsp_from_workflow ( sources = outputs , graph = bfs , reverse = True , _update_links = False ) succ , nodes , pred = dsp . dmap . succ , dsp . nodes , dsp . dmap . pred rm_edges , nds = dsp . dmap . remove_edges_from , dsp . data_nodes from . utils . alg import _nodes , _get_sub_out , _update_io for n in dsp . sub_dsp_nodes : a = nodes [ n ] = nodes [ n ] . copy ( ) bfs = bfs_graphs [ n ] if bfs_graphs is not None else None out = _get_sub_out ( a , succ [ n ] ) if 'input_domain' in a : out . update ( _nodes ( a [ 'inputs' ] . values ( ) ) ) a [ 'function' ] = a [ 'function' ] . _get_dsp_from_bfs ( out , bfs ) i , o = _update_io ( a , pred [ n ] , succ [ n ] ) rm_edges ( { ( u , n ) for u in i } . union ( ( ( n , u ) for u in o ) ) ) return dsp
|
Returns the sub - dispatcher induced by the workflow from outputs .
|
4,623
|
def add_callback ( self , method ) : from_actor = get_current ( ) if from_actor is not None : callback = ( method , from_actor . channel , from_actor . url ) with self . __condition : if self . __state is not FINISHED : self . __callbacks . append ( callback ) return msg = { TYPE : TELL , METHOD : method , PARAMS : ( [ self ] , { } ) , TO : from_actor . url } from_actor . channel . send ( msg ) else : raise FutureError ( "add_callback only works when called " + "from inside an actor" )
|
Attaches a mehtod that will be called when the future finishes .
|
4,624
|
def send_work ( self ) : if self . __set_running ( ) : msg = { TYPE : FUTURE , METHOD : self . __method , PARAMS : self . __params , CHANNEL : self . __channel , TO : self . __target , RPC_ID : self . __id } self . __actor_channel . send ( msg ) else : raise FutureError ( "Future already running." )
|
Sends the query to the actor for it to start executing the work .
|
4,625
|
def set_result ( self , result ) : with self . __condition : self . __result = result self . __state = FINISHED self . __condition . notify_all ( ) self . _invoke_callbacks ( )
|
Sets the return value of work associated with the future . Only called internally .
|
4,626
|
def set_exception ( self , exception ) : with self . __condition : self . __exception = exception self . __state = FINISHED self . __condition . notify_all ( ) self . _invoke_callbacks ( )
|
Sets the result of the future as being the given exception . Only called internally .
|
4,627
|
def angle_between_vectors ( x , y ) : dp = dot_product ( x , y ) if dp == 0 : return 0 xm = magnitude ( x ) ym = magnitude ( y ) return math . acos ( dp / ( xm * ym ) ) * ( 180. / math . pi )
|
Compute the angle between vector x and y
|
4,628
|
def _ssh_forward_accept ( ssh_session , timeout_ms ) : ssh_channel = c_ssh_forward_accept ( c_void_p ( ssh_session ) , c_int ( timeout_ms ) ) if ssh_channel is None : raise SshTimeoutException ( ) return ssh_channel
|
Waiting for an incoming connection from a reverse forwarded port . Note that this results in a kernel block until a connection is received .
|
4,629
|
def execute ( self , cmd , block_size = DEFAULT_EXECUTE_READ_BLOCK_SIZE ) : with SshChannel ( self ) as sc : self . __log . debug ( "Executing command: %s" % ( cmd ) ) sc . open_session ( ) sc . request_exec ( cmd ) buffer_ = bytearray ( ) while 1 : bytes = sc . read ( block_size ) yield bytes if len ( bytes ) < block_size : break
|
Execute a remote command . This functionality does not support more than one command to be executed on the same channel so we create a dedicated channel at the session level than allowing direct access at the channel level .
|
4,630
|
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile , force_relative = True ) : self . project_directory = directory with tmp_chdir ( directory ) : HEADERS = ( 'GSSHAPROJECT' , ) WMS_CARDS = ( '#INDEXGRID_GUID' , '#PROJECTION_FILE' , '#LandSoil' , '#CHANNEL_POINT_INPUT_WMS' ) GSSHAPY_CARDS = ( '#GSSHAPY_EVENT_YML' , ) with open ( path , 'r' ) as f : for line in f : if not line . strip ( ) : continue elif '#' in line . split ( ) [ 0 ] and line . split ( ) [ 0 ] not in WMS_CARDS + GSSHAPY_CARDS : continue try : card = self . _extractCard ( line , force_relative ) except : card = self . _extractDirectoryCard ( line , force_relative ) if card [ 'name' ] not in HEADERS : prjCard = ProjectCard ( name = card [ 'name' ] , value = card [ 'value' ] ) prjCard . projectFile = self if card [ 'name' ] == 'MAP_TYPE' : self . mapType = int ( card [ 'value' ] ) self . srid = spatialReferenceID self . name = name self . fileExtension = extension
|
Project File Read from File Method
|
4,631
|
def _write ( self , session , openFile , replaceParamFile ) : PRIORITY_CARDS = ( 'WMS' , 'MASK_WATERSHED' , 'REPLACE_LINE' , 'REPLACE_PARAMS' , 'REPLACE_VALS' , 'REPLACE_FOLDER' ) filename = os . path . split ( openFile . name ) [ 1 ] name = filename . split ( '.' ) [ 0 ] openFile . write ( 'GSSHAPROJECT\n' ) for card_key in PRIORITY_CARDS : card = self . getCard ( card_key ) if card is not None : openFile . write ( card . write ( originalPrefix = self . name , newPrefix = name ) ) for card in self . projectCards : if card . name not in PRIORITY_CARDS : openFile . write ( card . write ( originalPrefix = self . name , newPrefix = name ) )
|
Project File Write to File Method
|
4,632
|
def appendDirectory ( self , directory , projectFilePath ) : lines = [ ] with open ( projectFilePath , 'r' ) as original : for l in original : lines . append ( l ) with open ( projectFilePath , 'w' ) as new : for line in lines : card = { } try : card = self . _extractCard ( line ) except : card = self . _extractDirectoryCard ( line ) numSpaces = max ( 2 , 25 - len ( card [ 'name' ] ) ) if card [ 'value' ] is None : rewriteLine = '%s\n' % ( card [ 'name' ] ) else : if card [ 'name' ] == 'WMS' : rewriteLine = '%s %s\n' % ( card [ 'name' ] , card [ 'value' ] ) elif card [ 'name' ] == 'PROJECT_PATH' : filePath = '"%s"' % os . path . normpath ( directory ) rewriteLine = '%s%s%s\n' % ( card [ 'name' ] , ' ' * numSpaces , filePath ) elif '"' in card [ 'value' ] : filename = card [ 'value' ] . strip ( '"' ) filePath = '"%s"' % os . path . join ( directory , filename ) rewriteLine = '%s%s%s\n' % ( card [ 'name' ] , ' ' * numSpaces , filePath ) else : rewriteLine = '%s%s%s\n' % ( card [ 'name' ] , ' ' * numSpaces , card [ 'value' ] ) new . write ( rewriteLine )
|
Append directory to relative paths in project file . By default the project file paths are read and written as relative paths . Use this method to prepend a directory to all the paths in the project file .
|
4,633
|
def readProject ( self , directory , projectFileName , session , spatial = False , spatialReferenceID = None ) : self . project_directory = directory with tmp_chdir ( directory ) : session . add ( self ) self . read ( directory , projectFileName , session , spatial = spatial , spatialReferenceID = spatialReferenceID ) batchDirectory = self . _getBatchDirectory ( directory ) if spatialReferenceID is None : spatialReferenceID = self . _automaticallyDeriveSpatialReferenceId ( directory ) replaceParamFile = self . _readReplacementFiles ( directory , session , spatial , spatialReferenceID ) self . _readXput ( self . INPUT_FILES , directory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) self . _readXput ( self . OUTPUT_FILES , batchDirectory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) self . _readXputMaps ( self . INPUT_MAPS , directory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) self . _readWMSDatasets ( self . WMS_DATASETS , batchDirectory , session , spatial = spatial , spatialReferenceID = spatialReferenceID ) self . _commit ( session , self . COMMIT_ERROR_MESSAGE )
|
Read all files for a GSSHA project into the database .
|
4,634
|
def readInput ( self , directory , projectFileName , session , spatial = False , spatialReferenceID = None ) : self . project_directory = directory with tmp_chdir ( directory ) : session . add ( self ) self . read ( directory , projectFileName , session , spatial , spatialReferenceID ) if spatialReferenceID is None : spatialReferenceID = self . _automaticallyDeriveSpatialReferenceId ( directory ) replaceParamFile = self . _readReplacementFiles ( directory , session , spatial , spatialReferenceID ) self . _readXput ( self . INPUT_FILES , directory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) self . _readXputMaps ( self . INPUT_MAPS , directory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) self . _commit ( session , self . COMMIT_ERROR_MESSAGE )
|
Read only input files for a GSSHA project into the database .
|
4,635
|
def readOutput ( self , directory , projectFileName , session , spatial = False , spatialReferenceID = None ) : self . project_directory = directory with tmp_chdir ( directory ) : session . add ( self ) self . read ( directory , projectFileName , session , spatial , spatialReferenceID ) batchDirectory = self . _getBatchDirectory ( directory ) maskMap = WatershedMaskFile ( ) maskMapFilename = self . getCard ( 'WATERSHED_MASK' ) . value . strip ( '"' ) maskMap . read ( session = session , directory = directory , filename = maskMapFilename , spatial = spatial ) maskMap . projectFile = self if spatialReferenceID is None : spatialReferenceID = self . _automaticallyDeriveSpatialReferenceId ( directory ) self . _readXput ( self . OUTPUT_FILES , batchDirectory , session , spatial = spatial , spatialReferenceID = spatialReferenceID ) self . _readWMSDatasets ( self . WMS_DATASETS , batchDirectory , session , spatial = spatial , spatialReferenceID = spatialReferenceID ) self . _commit ( session , self . COMMIT_ERROR_MESSAGE )
|
Read only output files for a GSSHA project to the database .
|
4,636
|
def _readXputFile ( self , file_cards , card_name , directory , session , spatial = False , spatialReferenceID = None , replaceParamFile = None , ** kwargs ) : if spatialReferenceID is None : spatialReferenceID = self . _automaticallyDeriveSpatialReferenceId ( directory ) card = self . getCard ( card_name ) if card : fileIO = file_cards [ card . name ] filename = card . value . strip ( '"' ) . strip ( "'" ) return self . _invokeRead ( fileIO = fileIO , directory = directory , filename = filename , session = session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile , ** kwargs )
|
Read specific IO file for a GSSHA project to the database .
|
4,637
|
def writeProject ( self , session , directory , name ) : self . project_directory = directory with tmp_chdir ( directory ) : batchDirectory = self . _getBatchDirectory ( directory ) replaceParamFile = self . replaceParamFile self . _writeReplacementFiles ( session = session , directory = directory , name = name ) self . write ( session = session , directory = directory , name = name ) self . _writeXput ( session = session , directory = directory , fileCards = self . INPUT_FILES , name = name , replaceParamFile = replaceParamFile ) self . _writeXput ( session = session , directory = batchDirectory , fileCards = self . OUTPUT_FILES , name = name ) self . _writeXputMaps ( session = session , directory = directory , mapCards = self . INPUT_MAPS , name = name , replaceParamFile = replaceParamFile ) self . _writeWMSDatasets ( session = session , directory = batchDirectory , wmsDatasetCards = self . WMS_DATASETS , name = name )
|
Write all files for a project from the database to file .
|
4,638
|
def writeInput ( self , session , directory , name ) : self . project_directory = directory with tmp_chdir ( directory ) : replaceParamFile = self . replaceParamFile self . write ( session = session , directory = directory , name = name ) self . _writeXput ( session = session , directory = directory , fileCards = self . INPUT_FILES , name = name , replaceParamFile = replaceParamFile ) self . _writeXputMaps ( session = session , directory = directory , mapCards = self . INPUT_MAPS , name = name , replaceParamFile = replaceParamFile )
|
Write only input files for a GSSHA project from the database to file .
|
4,639
|
def writeOutput ( self , session , directory , name ) : self . project_directory = directory with tmp_chdir ( directory ) : batchDirectory = self . _getBatchDirectory ( directory ) self . _writeReplacementFiles ( session = session , directory = directory , name = name ) self . write ( session = session , directory = directory , name = name ) self . _writeXput ( session = session , directory = batchDirectory , fileCards = self . OUTPUT_FILES , name = name ) self . _writeWMSDatasets ( session = session , directory = batchDirectory , wmsDatasetCards = self . WMS_DATASETS , name = name )
|
Write only output files for a GSSHA project from the database to file .
|
4,640
|
def getFileKeys ( self ) : files = self . getFileObjects ( ) files_list = [ ] for key , value in files . iteritems ( ) : if value : files_list . append ( key ) return files_list
|
Retrieve a list of file keys that have been read into the database .
|
4,641
|
def getFileObjects ( self ) : files = { 'project-file' : self , 'mapping-table-file' : self . mapTableFile , 'channel-input-file' : self . channelInputFile , 'precipitation-file' : self . precipFile , 'storm-pipe-network-file' : self . stormPipeNetworkFile , 'hmet-file' : self . hmetFile , 'nwsrfs-file' : self . nwsrfsFile , 'orographic-gage-file' : self . orographicGageFile , 'grid-pipe-file' : self . gridPipeFile , 'grid-stream-file' : self . gridStreamFile , 'time-series-file' : self . timeSeriesFiles , 'projection-file' : self . projectionFile , 'replace-parameters-file' : self . replaceParamFile , 'replace-value-file' : self . replaceValFile , 'output-location-file' : self . outputLocationFiles , 'maps' : self . maps , 'link-node-datasets-file' : self . linkNodeDatasets } return files
|
Retrieve a dictionary of file objects .
|
4,642
|
def getCard ( self , name ) : cards = self . projectCards for card in cards : if card . name . upper ( ) == name . upper ( ) : return card return None
|
Retrieve card object for given card name .
|
4,643
|
def deleteCard ( self , card_name , db_session ) : card_name = card_name . upper ( ) gssha_card = self . getCard ( card_name ) if gssha_card is not None : db_session . delete ( gssha_card ) db_session . commit ( )
|
Removes card from gssha project file
|
4,644
|
def getGridByCard ( self , gssha_card_name ) : with tmp_chdir ( self . project_directory ) : if gssha_card_name not in ( self . INPUT_MAPS + self . WMS_DATASETS ) : raise ValueError ( "Card {0} not found in valid grid cards ..." . format ( gssha_card_name ) ) gssha_grid_card = self . getCard ( gssha_card_name ) if gssha_grid_card is None : raise ValueError ( "{0} card not found ..." . format ( gssha_card_name ) ) gssha_pro_card = self . getCard ( "#PROJECTION_FILE" ) if gssha_pro_card is None : raise ValueError ( "#PROJECTION_FILE card not found ..." ) return GDALGrid ( gssha_grid_card . value . strip ( '"' ) . strip ( "'" ) , gssha_pro_card . value . strip ( '"' ) . strip ( "'" ) )
|
Returns GDALGrid object of GSSHA grid
|
4,645
|
def getGrid ( self , use_mask = True ) : grid_card_name = "WATERSHED_MASK" if not use_mask : grid_card_name = "ELEVATION" return self . getGridByCard ( grid_card_name )
|
Returns GDALGrid object of GSSHA model bounds
|
4,646
|
def getIndexGrid ( self , name ) : index_map = self . mapTableFile . indexMaps . filter_by ( name = name ) . one ( ) gssha_pro_card = self . getCard ( "#PROJECTION_FILE" ) if gssha_pro_card is None : raise ValueError ( "#PROJECTION_FILE card not found ..." ) with tmp_chdir ( self . project_directory ) : return GDALGrid ( index_map . filename , gssha_pro_card . value . strip ( '"' ) . strip ( "'" ) )
|
Returns GDALGrid object of index map
|
4,647
|
def getWkt ( self ) : gssha_pro_card = self . getCard ( "#PROJECTION_FILE" ) if gssha_pro_card is None : raise ValueError ( "#PROJECTION_FILE card not found ..." ) with tmp_chdir ( self . project_directory ) : gssha_prj_file = gssha_pro_card . value . strip ( '"' ) . strip ( "'" ) with open ( gssha_prj_file ) as pro_file : wkt_string = pro_file . read ( ) return wkt_string
|
Returns GSSHA projection WKT string
|
4,648
|
def getOutlet ( self ) : outrow = int ( self . getCard ( name = 'OUTROW' ) . value ) - 1 outcol = int ( self . getCard ( name = 'OUTCOL' ) . value ) - 1 gssha_grid = self . getGrid ( ) return gssha_grid . pixel2lonlat ( outcol , outrow )
|
Gets the outlet latitude and longitude .
|
4,649
|
def setOutlet ( self , col , row , outslope = None ) : gssha_grid = self . getGrid ( ) self . setCard ( name = 'OUTROW' , value = str ( row ) ) self . setCard ( name = 'OUTCOL' , value = str ( col ) ) if outslope is None : self . calculateOutletSlope ( ) else : self . setCard ( name = 'OUTSLOPE' , value = str ( outslope ) )
|
Sets the outlet grid cell information in the project file .
|
4,650
|
def findOutlet ( self , shapefile_path ) : check_watershed_boundary_geometry ( shapefile_path ) shapefile = ogr . Open ( shapefile_path ) source_layer = shapefile . GetLayer ( 0 ) source_lyr_proj = source_layer . GetSpatialRef ( ) osr_geographic_proj = osr . SpatialReference ( ) osr_geographic_proj . ImportFromEPSG ( 4326 ) proj_transform = osr . CoordinateTransformation ( source_lyr_proj , osr_geographic_proj ) boundary_feature = source_layer . GetFeature ( 0 ) feat_geom = boundary_feature . GetGeometryRef ( ) feat_geom . Transform ( proj_transform ) polygon = shapely_loads ( feat_geom . ExportToWkb ( ) ) mask_grid = self . getGrid ( ) elevation_grid = self . getGrid ( use_mask = False ) elevation_array = elevation_grid . np_array ( ) ma_elevation_array = np . ma . array ( elevation_array , mask = mask_grid . np_array ( ) == 0 ) min_elevation = sys . maxsize outlet_pt = None for coord in list ( polygon . exterior . coords ) : try : col , row = mask_grid . lonlat2pixel ( * coord ) except IndexError : continue elevation_value = ma_elevation_array [ row , col ] if elevation_value is np . ma . masked : actual_value = elevation_array [ row , col ] max_diff = sys . maxsize nrow = None ncol = None nval = None for row_ix in range ( max ( row - 5 , 0 ) , min ( row + 5 , mask_grid . y_size ) ) : for col_ix in range ( max ( col - 5 , 0 ) , min ( col + 5 , mask_grid . x_size ) ) : val = ma_elevation_array [ row_ix , col_ix ] if not val is np . ma . masked : val_diff = abs ( val - actual_value ) if val_diff < max_diff : max_diff = val_diff nval = val nrow = row_ix ncol = col_ix if None not in ( nrow , ncol , nval ) : row = nrow col = ncol elevation_value = nval if elevation_value < min_elevation : min_elevation = elevation_value outlet_pt = ( col , row ) if outlet_pt is None : raise IndexError ( 'No valid outlet points found on boundary ...' ) outcol , outrow = outlet_pt self . setOutlet ( col = outcol + 1 , row = outrow + 1 )
|
Calculate outlet location
|
4,651
|
def calculateOutletSlope ( self ) : try : mask_grid = self . getGrid ( ) elevation_grid = self . getGrid ( use_mask = False ) outrow = int ( self . getCard ( "OUTROW" ) . value ) - 1 outcol = int ( self . getCard ( "OUTCOL" ) . value ) - 1 cell_size = float ( self . getCard ( "GRIDSIZE" ) . value ) min_row = max ( 0 , outrow - 1 ) max_row = min ( mask_grid . x_size , outrow + 2 ) min_col = max ( 0 , outcol - 1 ) max_col = min ( mask_grid . y_size , outcol + 2 ) mask_array = mask_grid . np_array ( ) mask_array [ outrow , outcol ] = 0 mask_array = mask_array [ min_row : max_row , min_col : max_col ] mask_array = ( mask_array == 0 ) elevation_array = elevation_grid . np_array ( ) original_elevation = elevation_array [ outrow , outcol ] elevation_array = elevation_array [ min_row : max_row , min_col : max_col ] slope_calc_array = ( elevation_array - original_elevation ) / cell_size mask_array [ slope_calc_array <= 0 ] = True slope_mask_array = np . ma . array ( slope_calc_array , mask = mask_array ) outslope = slope_mask_array . mean ( ) if outslope is np . ma . masked or outslope < 0.001 : outslope = 0.001 except ValueError : outslope = 0.001 self . setCard ( "OUTSLOPE" , str ( outslope ) )
|
Attempt to determine the slope at the OUTLET
|
4,652
|
def timezone ( self ) : if self . _tz is None : cen_lat , cen_lon = self . centerLatLon ( ) tf = TimezoneFinder ( ) tz_name = tf . timezone_at ( lng = cen_lon , lat = cen_lat ) self . _tz = timezone ( tz_name ) return self . _tz
|
timezone of GSSHA model
|
4,653
|
def _getBatchDirectory ( self , projectRootDirectory ) : batchDirectory = projectRootDirectory replaceFolderCard = self . getCard ( 'REPLACE_FOLDER' ) if replaceFolderCard : replaceDir = replaceFolderCard . value . strip ( '"' ) batchDirectory = os . path . join ( batchDirectory , replaceDir ) if not os . path . isdir ( batchDirectory ) : os . mkdir ( batchDirectory ) log . info ( 'Creating directory for batch output: {0}' . format ( batchDirectory ) ) return batchDirectory
|
Check the project file for the REPLACE_FOLDER card . If it exists append it s value to create the batch directory path . This is the directory output is written to when run in batch mode .
|
4,654
|
def _readXput ( self , fileCards , directory , session , spatial = False , spatialReferenceID = 4236 , replaceParamFile = None ) : for card in self . projectCards : if ( card . name in fileCards ) and self . _noneOrNumValue ( card . value ) and fileCards [ card . name ] : fileIO = fileCards [ card . name ] filename = card . value . strip ( '"' ) self . _invokeRead ( fileIO = fileIO , directory = directory , filename = filename , session = session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile )
|
GSSHAPY Project Read Files from File Method
|
4,655
|
def _readXputMaps ( self , mapCards , directory , session , spatial = False , spatialReferenceID = 4236 , replaceParamFile = None ) : if self . mapType in self . MAP_TYPES_SUPPORTED : for card in self . projectCards : if ( card . name in mapCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) self . _invokeRead ( fileIO = RasterMapFile , directory = directory , filename = filename , session = session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) else : for card in self . projectCards : if ( card . name in mapCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) fileExtension = filename . split ( '.' ) [ 1 ] if fileExtension in self . ALWAYS_READ_AND_WRITE_MAPS : self . _invokeRead ( fileIO = RasterMapFile , directory = directory , filename = filename , session = session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) log . warning ( 'Could not read map files. ' 'MAP_TYPE {0} not supported.' . format ( self . mapType ) )
|
GSSHA Project Read Map Files from File Method
|
4,656
|
def _readWMSDatasets ( self , datasetCards , directory , session , spatial = False , spatialReferenceID = 4236 ) : if self . mapType in self . MAP_TYPES_SUPPORTED : maskMap = session . query ( RasterMapFile ) . filter ( RasterMapFile . projectFile == self ) . filter ( RasterMapFile . fileExtension == 'msk' ) . one ( ) for card in self . projectCards : if ( card . name in datasetCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) path = os . path . join ( directory , filename ) if os . path . isfile ( path ) : wmsDatasetFile = WMSDatasetFile ( ) wmsDatasetFile . projectFile = self wmsDatasetFile . read ( directory = directory , filename = filename , session = session , maskMap = maskMap , spatial = spatial , spatialReferenceID = spatialReferenceID ) else : self . _readBatchOutputForFile ( directory , WMSDatasetFile , filename , session , spatial , spatialReferenceID , maskMap = maskMap )
|
Method to handle the special case of WMS Dataset Files . WMS Dataset Files cannot be read in independently as other types of file can . They rely on the Mask Map file for some parameters .
|
4,657
|
def _readBatchOutputForFile ( self , directory , fileIO , filename , session , spatial , spatialReferenceID , replaceParamFile = None , maskMap = None ) : directoryList = os . listdir ( directory ) batchFiles = [ ] for thing in directoryList : if filename in thing : batchFiles . append ( thing ) numFilesRead = 0 for batchFile in batchFiles : instance = fileIO ( ) instance . projectFile = self if isinstance ( instance , WMSDatasetFile ) : instance . read ( directory = directory , filename = batchFile , session = session , maskMap = maskMap , spatial = spatial , spatialReferenceID = spatialReferenceID ) else : instance . read ( directory , batchFile , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) numFilesRead += 1 if '[' in filename or ']' in filename : log . info ( 'A file cannot be read, because the path to the ' 'file in the project file has been replaced with ' 'replacement variable {0}.' . format ( filename ) ) elif numFilesRead == 0 : log . warning ( '{0} listed in project file, but no such ' 'file exists.' . format ( filename ) ) else : log . info ( 'Batch mode output detected. {0} files read ' 'for file {1}' . format ( numFilesRead , filename ) )
|
When batch mode is run in GSSHA the files of the same type are prepended with an integer to avoid filename conflicts . This will attempt to read files in this format and throw warnings if the files aren t found .
|
4,658
|
def _invokeRead ( self , fileIO , directory , filename , session , spatial = False , spatialReferenceID = 4236 , replaceParamFile = None , ** kwargs ) : path = os . path . join ( directory , filename ) if os . path . isfile ( path ) : instance = fileIO ( ) instance . projectFile = self instance . read ( directory , filename , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile , ** kwargs ) return instance else : self . _readBatchOutputForFile ( directory , fileIO , filename , session , spatial , spatialReferenceID , replaceParamFile )
|
Invoke File Read Method on Other Files
|
4,659
|
def _writeXput ( self , session , directory , fileCards , name = None , replaceParamFile = None ) : for card in self . projectCards : if ( card . name in fileCards ) and self . _noneOrNumValue ( card . value ) and fileCards [ card . name ] : fileIO = fileCards [ card . name ] filename = card . value . strip ( '"' ) if '[' in filename or ']' in filename : log . info ( 'The file for project card {0} cannot be ' 'written, because the path has been replaced ' 'with replacement variable {1}.' . format ( card . name , filename ) ) return filename = self . _replaceNewFilename ( filename = filename , name = name ) self . _invokeWrite ( fileIO = fileIO , session = session , directory = directory , filename = filename , replaceParamFile = replaceParamFile )
|
GSSHA Project Write Files to File Method
|
4,660
|
def _writeXputMaps ( self , session , directory , mapCards , name = None , replaceParamFile = None ) : if self . mapType in self . MAP_TYPES_SUPPORTED : for card in self . projectCards : if ( card . name in mapCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) filename = self . _replaceNewFilename ( filename , name ) self . _invokeWrite ( fileIO = RasterMapFile , session = session , directory = directory , filename = filename , replaceParamFile = replaceParamFile ) else : for card in self . projectCards : if ( card . name in mapCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) fileExtension = filename . split ( '.' ) [ 1 ] if fileExtension in self . ALWAYS_READ_AND_WRITE_MAPS : filename = self . _replaceNewFilename ( filename , name ) self . _invokeWrite ( fileIO = RasterMapFile , session = session , directory = directory , filename = filename , replaceParamFile = replaceParamFile ) log . error ( 'Could not write map files. MAP_TYPE {0} ' 'not supported.' . format ( self . mapType ) )
|
GSSHAPY Project Write Map Files to File Method
|
4,661
|
def _writeWMSDatasets ( self , session , directory , wmsDatasetCards , name = None ) : if self . mapType in self . MAP_TYPES_SUPPORTED : for card in self . projectCards : if ( card . name in wmsDatasetCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) filename = self . _replaceNewFilename ( filename , name ) extension = filename . split ( '.' ) [ 1 ] maskMap = session . query ( RasterMapFile ) . filter ( RasterMapFile . projectFile == self ) . filter ( RasterMapFile . fileExtension == 'msk' ) . one ( ) wmsDataset = None try : wmsDataset = session . query ( WMSDatasetFile ) . filter ( WMSDatasetFile . projectFile == self ) . filter ( WMSDatasetFile . fileExtension == extension ) . one ( ) except NoResultFound : log . warning ( '{0} listed as card in project file, ' 'but the file is not found in the database.' . format ( filename ) ) except MultipleResultsFound : self . _invokeWriteForMultipleOfType ( directory , extension , WMSDatasetFile , filename , session , maskMap = maskMap ) return if wmsDataset is not None and maskMap is not None : wmsDataset . write ( session = session , directory = directory , name = filename , maskMap = maskMap ) else : log . error ( 'Could not write WMS Dataset files. ' 'MAP_TYPE {0} not supported.' . format ( self . mapType ) )
|
GSSHAPY Project Write WMS Datasets to File Method
|
4,662
|
def _writeReplacementFiles ( self , session , directory , name ) : if self . replaceParamFile : self . replaceParamFile . write ( session = session , directory = directory , name = name ) if self . replaceValFile : self . replaceValFile . write ( session = session , directory = directory , name = name )
|
Write the replacement files
|
4,663
|
def _invokeWrite ( self , fileIO , session , directory , filename , replaceParamFile ) : instance = None try : instance = session . query ( fileIO ) . filter ( fileIO . projectFile == self ) . one ( ) except : extension = filename . split ( '.' ) [ 1 ] try : instance = session . query ( fileIO ) . filter ( fileIO . projectFile == self ) . filter ( fileIO . fileExtension == extension ) . one ( ) except NoResultFound : log . warning ( '{0} listed as card in project file, but ' 'the file is not found in the database.' . format ( filename ) ) except MultipleResultsFound : self . _invokeWriteForMultipleOfType ( directory , extension , fileIO , filename , session , replaceParamFile = replaceParamFile ) return if instance is not None : instance . write ( session = session , directory = directory , name = filename , replaceParamFile = replaceParamFile )
|
Invoke File Write Method on Other Files
|
4,664
|
def write ( self , originalPrefix , newPrefix = None ) : numSpaces = max ( 2 , 25 - len ( self . name ) ) if self . value is None : line = '%s\n' % self . name else : if self . name == 'WMS' : line = '%s %s\n' % ( self . name , self . value ) elif newPrefix is None : line = '%s%s%s\n' % ( self . name , ' ' * numSpaces , self . value ) elif originalPrefix in self . value : line = '%s%s%s\n' % ( self . name , ' ' * numSpaces , self . value . replace ( originalPrefix , newPrefix ) ) else : line = '%s%s%s\n' % ( self . name , ' ' * numSpaces , self . value ) return line
|
Write project card to string .
|
4,665
|
def elapsed ( self ) : if not self . started or self . _start_time is None : return 0.0 return self . _timing_data [ - 1 ] [ 0 ] - self . _start_time
|
Returns the number of seconds it has been since the start until the latest entry .
|
4,666
|
def rate_unstable ( self ) : if not self . started or self . stalled : return 0.0 x1 , y1 = self . _timing_data [ - 2 ] x2 , y2 = self . _timing_data [ - 1 ] return ( y2 - y1 ) / ( x2 - x1 )
|
Returns an unstable rate based on the last two entries in the timing data . Less intensive to compute .
|
4,667
|
def rate_overall ( self ) : elapsed = self . elapsed return self . rate if not elapsed else self . numerator / self . elapsed
|
Returns the overall average rate based on the start time .
|
4,668
|
def _calculate ( self ) : mean_x = sum ( i [ 0 ] for i in self . _timing_data ) / len ( self . _timing_data ) mean_y = sum ( i [ 1 ] for i in self . _timing_data ) / len ( self . _timing_data ) std_x = sqrt ( sum ( pow ( i [ 0 ] - mean_x , 2 ) for i in self . _timing_data ) / ( len ( self . _timing_data ) - 1 ) ) std_y = sqrt ( sum ( pow ( i [ 1 ] - mean_y , 2 ) for i in self . _timing_data ) / ( len ( self . _timing_data ) - 1 ) ) sum_xy , sum_sq_v_x , sum_sq_v_y = 0 , 0 , 0 for x , y in self . _timing_data : x -= mean_x y -= mean_y sum_xy += x * y sum_sq_v_x += pow ( x , 2 ) sum_sq_v_y += pow ( y , 2 ) pearson_r = sum_xy / sqrt ( sum_sq_v_x * sum_sq_v_y ) m = self . rate = pearson_r * ( std_y / std_x ) if self . undefined : return y = self . denominator b = mean_y - m * mean_x x = ( y - b ) / m fitted_b = self . _timing_data [ - 1 ] [ 1 ] - ( m * self . _timing_data [ - 1 ] [ 0 ] ) fitted_x = ( y - fitted_b ) / m adjusted_x = ( ( fitted_x - x ) * ( self . numerator / self . denominator ) ) + x self . eta_epoch = adjusted_x
|
Perform the ETA and rate calculation .
|
4,669
|
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile ) : self . fileExtension = extension with open ( path , 'r' ) as hmetFile : for line in hmetFile : sline = line . strip ( ) . split ( ) try : dateTime = datetime ( int ( sline [ 0 ] ) , int ( sline [ 1 ] ) , int ( sline [ 2 ] ) , int ( sline [ 3 ] ) ) hmetRecord = HmetRecord ( hmetDateTime = dateTime , barometricPress = sline [ 4 ] , relHumidity = sline [ 5 ] , totalSkyCover = sline [ 6 ] , windSpeed = sline [ 7 ] , dryBulbTemp = sline [ 8 ] , directRad = sline [ 9 ] , globalRad = sline [ 10 ] ) hmetRecord . hmetFile = self except : pass
|
Read HMET WES from File Method
|
4,670
|
def _write ( self , session , openFile , replaceParamFile ) : hmetRecords = self . hmetRecords for record in hmetRecords : openFile . write ( '%s\t%s\t%s\t%s\t%.3f\t%s\t%s\t%s\t%s\t%.2f\t%.2f\n' % ( record . hmetDateTime . year , record . hmetDateTime . month , record . hmetDateTime . day , record . hmetDateTime . hour , record . barometricPress , record . relHumidity , record . totalSkyCover , record . windSpeed , record . dryBulbTemp , record . directRad , record . globalRad ) )
|
Write HMET WES to File Method
|
4,671
|
def _read ( self , directory , filename , session , path , name , extension , spatial = None , spatialReferenceID = None , replaceParamFile = None ) : yml_events = [ ] with open ( path ) as fo : yml_events = yaml . load ( fo ) for yml_event in yml_events : if os . path . exists ( os . path . join ( directory , yml_event . subfolder ) ) : orm_event = yml_event . as_orm ( ) if not self . _similar_event_exists ( orm_event . subfolder ) : session . add ( orm_event ) self . events . append ( orm_event ) session . commit ( )
|
ProjectFileEvent Read from File Method
|
4,672
|
def as_yml ( self ) : return YmlFileEvent ( name = str ( self . name ) , subfolder = str ( self . subfolder ) )
|
Return yml compatible version of self
|
4,673
|
def prepare_request ( node ) : if node . resource . method not in AVAILABLE_METHODS : raise UnsupportedHTTPMethodError ( node . resource . method ) def request ( data = None , json = None , ** kwargs ) : for key , value in kwargs . items ( ) : param = next ( ( p for p in node . resource . query_params if p . name == key ) , None ) if not param : raise UnsupportedQueryParameter ( node . resource . path , key ) if not match_type ( value , param . type ) : raise TypeError ( "Resource Query Parameter has type '{0}' but expected type '{1}'" . format ( value . __class__ . __name__ , param . type ) ) response = requests . request ( node . resource . method , node . resource . absolute_uri , params = kwargs , data = data , json = json ) return response return request
|
Prepare request to node s API route
|
4,674
|
def define_plot_data ( data , x_name , * y_names ) : it = [ ] for k in y_names : it . append ( { 'x' : data [ x_name ] , 'y' : data [ k ] , 'name' : k } ) return it
|
Defines the data to be plotted .
|
4,675
|
def plot_lines ( it ) : data = [ go . Scatter ( mode = 'lines' , ** d ) for d in it ] return py . iplot ( data , filename = 'scatter-mode' )
|
Plotting lines .
|
4,676
|
def _ssh_channel_read ( ssh_channel_int , count , is_stderr ) : buffer_ = create_string_buffer ( count ) while 1 : received_bytes = c_ssh_channel_read ( ssh_channel_int , cast ( buffer_ , c_void_p ) , c_uint32 ( count ) , c_int ( int ( is_stderr ) ) ) if received_bytes == SSH_ERROR : ssh_session_int = _ssh_channel_get_session ( ssh_channel_int ) error = ssh_get_error ( ssh_session_int ) raise SshError ( "Channel read failed: %s" % ( error ) ) elif received_bytes == SSH_AGAIN : continue else : break return buffer_ . raw [ 0 : received_bytes ]
|
Do a read on a channel .
|
4,677
|
def eventChunk ( key , lines ) : KEYWORDS = ( 'EVENT' , 'NRPDS' , 'NRGAG' , 'COORD' , 'GAGES' , 'ACCUM' , 'RATES' , 'RADAR' ) NUM_CARDS = ( 'NRPDS' , 'NRGAG' ) VALUE_CARDS = ( 'GAGES' , 'ACCUM' , 'RATES' , 'RADAR' ) result = { 'description' : None , 'nrgag' : None , 'nrpds' : None , 'coords' : [ ] , 'valLines' : [ ] } chunks = pt . chunk ( KEYWORDS , lines ) for card , chunkList in iteritems ( chunks ) : for chunk in chunkList : schunk = chunk [ 0 ] . strip ( ) . split ( ) if card == 'EVENT' : schunk = pt . splitLine ( chunk [ 0 ] ) result [ 'description' ] = schunk [ 1 ] elif card in NUM_CARDS : result [ card . lower ( ) ] = schunk [ 1 ] elif card == 'COORD' : schunk = pt . splitLine ( chunk [ 0 ] ) try : desc = schunk [ 3 ] except : desc = "" coord = { 'x' : schunk [ 1 ] , 'y' : schunk [ 2 ] , 'description' : desc } result [ 'coords' ] . append ( coord ) elif card in VALUE_CARDS : dateTime = datetime ( year = int ( schunk [ 1 ] ) , month = int ( schunk [ 2 ] ) , day = int ( schunk [ 3 ] ) , hour = int ( schunk [ 4 ] ) , minute = int ( schunk [ 5 ] ) ) values = [ ] for index in range ( 6 , len ( schunk ) ) : values . append ( schunk [ index ] ) valueLine = { 'type' : schunk [ 0 ] , 'dateTime' : dateTime , 'values' : values } result [ 'valLines' ] . append ( valueLine ) return result
|
Parse EVENT chunks
|
4,678
|
def request ( self , method , path , params = None , headers = None , body = None ) : if not headers : headers = { } if not params : params = { } headers [ "Accept" ] = "application/json" headers [ "Accept-Version" ] = "^1.15.0" if self . auth_token : headers [ "Authorization" ] = "Bearer {0}" . format ( self . auth_token ) path = self . url + path params = self . flatten_params ( params ) response = requests . request ( method , path , params = params , headers = headers , json = body ) result = response . text try : result = response . json ( ) except Exception : pass if response . status_code >= 400 : raise LosantError ( response . status_code , result ) return result
|
Base method for making a Losant API request
|
4,679
|
def flatten_params ( self , data , base_key = None ) : result = { } if data is None : return result map_data = None if not isinstance ( data , collections . Mapping ) : map_data = [ ] for idx , val in enumerate ( data ) : map_data . append ( [ str ( idx ) , val ] ) else : map_data = list ( data . items ( ) ) for key , value in map_data : if not base_key is None : key = base_key + "[" + key + "]" if isinstance ( value , basestring ) or not hasattr ( value , "__iter__" ) : result [ key ] = value else : result . update ( self . flatten_params ( value , key ) ) return result
|
Flatten out nested arrays and dicts in query params into correct format
|
4,680
|
def read_excel ( input_fpath ) : return { k : v . values for k , v in pd . read_excel ( input_fpath ) . items ( ) }
|
Reads the excel file .
|
4,681
|
def save_outputs ( outputs , output_fpath ) : df = pd . DataFrame ( outputs ) with pd . ExcelWriter ( output_fpath ) as writer : df . to_excel ( writer )
|
Save model outputs in an Excel file .
|
4,682
|
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile ) : self . fileExtension = extension KEYWORDS = ( 'EVENT' , ) with open ( path , 'r' ) as f : chunks = pt . chunk ( KEYWORDS , f ) for key , chunkList in iteritems ( chunks ) : for chunk in chunkList : result = gak . eventChunk ( key , chunk ) self . _createGsshaPyObjects ( result ) session . add ( self )
|
Precipitation Read from File Method
|
4,683
|
def _write ( self , session , openFile , replaceParamFile ) : events = self . precipEvents for event in events : openFile . write ( 'EVENT "%s"\nNRGAG %s\nNRPDS %s\n' % ( event . description , event . nrGag , event . nrPds ) ) if event . nrGag > 0 : values = event . values valList = [ ] for value in values : valList . append ( { 'ValueType' : value . valueType , 'DateTime' : value . dateTime , 'Gage' : value . gage . id , 'Value' : value . value } ) pivotedValues = pivot . pivot ( valList , ( 'DateTime' , 'ValueType' ) , ( 'Gage' , ) , 'Value' ) gages = session . query ( PrecipGage ) . filter ( PrecipGage . event == event ) . order_by ( PrecipGage . id ) . all ( ) for gage in gages : openFile . write ( 'COORD %s %s "%s"\n' % ( gage . x , gage . y , gage . description ) ) for row in pivotedValues : valString = '' keys = sorted ( [ key for key in row if key != 'DateTime' and key != 'ValueType' ] ) for key in keys : if key != 'DateTime' and key != 'ValueType' : valString = '%s %.3f' % ( valString , row [ key ] ) openFile . write ( '%s %.4d %.2d %.2d %.2d %.2d%s\n' % ( row [ 'ValueType' ] , row [ 'DateTime' ] . year , row [ 'DateTime' ] . month , row [ 'DateTime' ] . day , row [ 'DateTime' ] . hour , row [ 'DateTime' ] . minute , valString ) )
|
Precipitation File Write to File Method
|
4,684
|
def _createGsshaPyObjects ( self , eventChunk ) : event = PrecipEvent ( description = eventChunk [ 'description' ] , nrGag = eventChunk [ 'nrgag' ] , nrPds = eventChunk [ 'nrpds' ] ) event . precipFile = self gages = [ ] for coord in eventChunk [ 'coords' ] : gage = PrecipGage ( description = coord [ 'description' ] , x = coord [ 'x' ] , y = coord [ 'y' ] ) gage . event = event gages . append ( gage ) for valLine in eventChunk [ 'valLines' ] : for index , value in enumerate ( valLine [ 'values' ] ) : val = PrecipValue ( valueType = valLine [ 'type' ] , dateTime = valLine [ 'dateTime' ] , value = value ) val . event = event val . gage = gages [ index ]
|
Create GSSHAPY PrecipEvent PrecipValue and PrecipGage Objects Method
|
4,685
|
def lookupSpatialReferenceID ( cls , directory , filename ) : path = os . path . join ( directory , filename ) with open ( path , 'r' ) as f : srid = lookupSpatialReferenceID ( f . read ( ) ) return srid
|
Look up spatial reference system using the projection file .
|
4,686
|
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile ) : self . fileExtension = extension with io_open ( path , 'r' ) as f : self . projection = f . read ( )
|
Projection File Read from File Method
|
4,687
|
def _write ( self , session , openFile , replaceParamFile ) : openFile . write ( text ( self . projection ) )
|
Projection File Write to File Method
|
4,688
|
def numerator ( self , value ) : if self . eta_every <= 1 : self . _eta . numerator = value self . _eta_string = self . _generate_eta ( self . _eta . eta_seconds ) return if self . _eta . undefined : self . _eta . set_numerator ( value , calculate = False ) return if self . _eta_count >= self . eta_every : self . _eta_count = 1 self . _eta . numerator = value self . _eta_string = self . _generate_eta ( self . _eta . eta_seconds ) return self . _eta_count += 1 self . _eta . set_numerator ( value , calculate = False )
|
Sets a new numerator and generates the ETA . Must be greater than or equal to previous numerator .
|
4,689
|
def rate ( self ) : return float ( self . _eta . rate_unstable if self . eta_every > 1 else self . _eta . rate )
|
Returns the rate of the progress as a float . Selects the unstable rate if eta_every > 1 for performance .
|
4,690
|
def generateFromRaster ( self , elevation_raster , shapefile_path = None , out_elevation_grid = None , resample_method = gdalconst . GRA_Average , load_raster_to_db = True ) : if not self . projectFile : raise ValueError ( "Must be connected to project file ..." ) elevation_raster = os . path . abspath ( elevation_raster ) shapefile_path = os . path . abspath ( shapefile_path ) mask_grid = self . projectFile . getGrid ( ) if out_elevation_grid is None : out_elevation_grid = '{0}.{1}' . format ( self . projectFile . name , self . fileExtension ) elevation_grid = resample_grid ( elevation_raster , mask_grid , resample_method = resample_method , as_gdal_grid = True ) with tmp_chdir ( self . projectFile . project_directory ) : elevation_grid . to_grass_ascii ( out_elevation_grid , print_nodata = False ) if load_raster_to_db : self . _load_raster_text ( out_elevation_grid ) self . filename = out_elevation_grid self . projectFile . setCard ( "ELEVATION" , out_elevation_grid , add_quotes = True ) self . projectFile . findOutlet ( shapefile_path )
|
Generates an elevation grid for the GSSHA simulation from an elevation raster
|
4,691
|
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile ) : self . fileExtension = extension KEYWORDS = { 'CONNECT' : spc . connectChunk , 'SJUNC' : spc . sjuncChunk , 'SLINK' : spc . slinkChunk } sjuncs = [ ] slinks = [ ] connections = [ ] with open ( path , 'r' ) as f : chunks = pt . chunk ( KEYWORDS , f ) for key , chunkList in iteritems ( chunks ) : for chunk in chunkList : result = KEYWORDS [ key ] ( key , chunk ) if key == 'CONNECT' : connections . append ( result ) elif key == 'SJUNC' : sjuncs . append ( result ) elif key == 'SLINK' : slinks . append ( result ) self . _createConnection ( connections ) self . _createSjunc ( sjuncs ) self . _createSlink ( slinks )
|
Storm Pipe Network File Read from File Method
|
4,692
|
def _write ( self , session , openFile , replaceParamFile ) : connections = self . connections self . _writeConnections ( connections = connections , fileObject = openFile ) sjuncs = self . superJunctions self . _writeSuperJunctions ( superJunctions = sjuncs , fileObject = openFile ) slinks = self . superLinks self . _writeSuperLinks ( superLinks = slinks , fileObject = openFile )
|
Storm Pipe Network File Write to File Method
|
4,693
|
def _createConnection ( self , connections ) : for c in connections : connection = Connection ( slinkNumber = c [ 'slinkNumber' ] , upSjuncNumber = c [ 'upSjunc' ] , downSjuncNumber = c [ 'downSjunc' ] ) connection . stormPipeNetworkFile = self
|
Create GSSHAPY Connection Objects Method
|
4,694
|
def _createSlink ( self , slinks ) : for slink in slinks : superLink = SuperLink ( slinkNumber = slink [ 'slinkNumber' ] , numPipes = slink [ 'numPipes' ] ) superLink . stormPipeNetworkFile = self for node in slink [ 'nodes' ] : superNode = SuperNode ( nodeNumber = node [ 'nodeNumber' ] , groundSurfaceElev = node [ 'groundSurfaceElev' ] , invertElev = node [ 'invertElev' ] , manholeSA = node [ 'manholeSA' ] , nodeInletCode = node [ 'inletCode' ] , cellI = node [ 'cellI' ] , cellJ = node [ 'cellJ' ] , weirSideLength = node [ 'weirSideLength' ] , orificeDiameter = node [ 'orificeDiameter' ] ) superNode . superLink = superLink for p in slink [ 'pipes' ] : pipe = Pipe ( pipeNumber = p [ 'pipeNumber' ] , xSecType = p [ 'xSecType' ] , diameterOrHeight = p [ 'diameterOrHeight' ] , width = p [ 'width' ] , slope = p [ 'slope' ] , roughness = p [ 'roughness' ] , length = p [ 'length' ] , conductance = p [ 'conductance' ] , drainSpacing = p [ 'drainSpacing' ] ) pipe . superLink = superLink
|
Create GSSHAPY SuperLink Pipe and SuperNode Objects Method
|
4,695
|
def _createSjunc ( self , sjuncs ) : for sjunc in sjuncs : superJunction = SuperJunction ( sjuncNumber = sjunc [ 'sjuncNumber' ] , groundSurfaceElev = sjunc [ 'groundSurfaceElev' ] , invertElev = sjunc [ 'invertElev' ] , manholeSA = sjunc [ 'manholeSA' ] , inletCode = sjunc [ 'inletCode' ] , linkOrCellI = sjunc [ 'linkOrCellI' ] , nodeOrCellJ = sjunc [ 'nodeOrCellJ' ] , weirSideLength = sjunc [ 'weirSideLength' ] , orificeDiameter = sjunc [ 'orificeDiameter' ] ) superJunction . stormPipeNetworkFile = self
|
Create GSSHAPY SuperJunction Objects Method
|
4,696
|
def _writeConnections ( self , connections , fileObject ) : for connection in connections : fileObject . write ( 'CONNECT %s %s %s\n' % ( connection . slinkNumber , connection . upSjuncNumber , connection . downSjuncNumber ) )
|
Write Connections to File Method
|
4,697
|
def _writeSuperJunctions ( self , superJunctions , fileObject ) : for sjunc in superJunctions : fileObject . write ( 'SJUNC %s %.2f %.2f %.6f %s %s %s %.6f %.6f\n' % ( sjunc . sjuncNumber , sjunc . groundSurfaceElev , sjunc . invertElev , sjunc . manholeSA , sjunc . inletCode , sjunc . linkOrCellI , sjunc . nodeOrCellJ , sjunc . weirSideLength , sjunc . orificeDiameter ) )
|
Write SuperJunctions to File Method
|
4,698
|
def _writeSuperLinks ( self , superLinks , fileObject ) : for slink in superLinks : fileObject . write ( 'SLINK %s %s\n' % ( slink . slinkNumber , slink . numPipes ) ) for node in slink . superNodes : fileObject . write ( 'NODE %s %.2f %.2f %.6f %s %s %s %.6f %.6f\n' % ( node . nodeNumber , node . groundSurfaceElev , node . invertElev , node . manholeSA , node . nodeInletCode , node . cellI , node . cellJ , node . weirSideLength , node . orificeDiameter ) ) for pipe in slink . pipes : fileObject . write ( 'PIPE %s %s %.6f %.6f %.6f %.6f %.2f %.6f %.6f\n' % ( pipe . pipeNumber , pipe . xSecType , pipe . diameterOrHeight , pipe . width , pipe . slope , pipe . roughness , pipe . length , pipe . conductance , pipe . drainSpacing ) )
|
Write SuperLinks to File Method
|
4,699
|
def ziegler_nichols ( self , ku , tu , control_type = 'pid' ) : converter = dict ( p = lambda ku , tu : ( .5 * ku , 0 , 0 ) , pi = lambda ku , tu : ( .45 * ku , 1.2 * ( .45 * ku ) / tu , 0 ) , pd = lambda ku , tu : ( .8 * ku , 0 , ( .8 * ku ) * tu / 8 ) , pid = lambda ku , tu : ( .6 * ku , 2 * ( .6 * ku ) / tu , ( .6 * ku ) * tu / 8 ) , pessen = lambda ku , tu : ( .7 * ku , 2.5 * ( .7 * ku ) / tu , 3 * ( .7 * ku ) * tu / 20 ) , some_overshoot = lambda ku , tu : ( .33 * ku , 2 * ( .33 * ku ) / tu , ( .33 * ku ) * tu / 3 ) , no_overshoot = lambda ku , tu : ( .2 * ku , 2 * ( .2 * ku ) / tu , ( .2 * ku ) * tu / 3 ) ) self . kp , self . ki , self . kd = converter [ control_type . lower ( ) ] ( ku , tu )
|
ku = ultimate gain tu = period of oscillation at ultimate gain
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.