idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
14,400 | def median_filter ( tr , multiplier = 10 , windowlength = 0.5 , interp_len = 0.05 , debug = 0 ) : num_cores = cpu_count ( ) if debug >= 1 : data_in = tr . copy ( ) filt = tr . copy ( ) filt . detrend ( 'linear' ) try : filt . filter ( 'bandpass' , freqmin = 10.0 , freqmax = ( tr . stats . sampling_rate / 2 ) - 1 ) exce... | Filter out spikes in data above a multiple of MAD of the data . |
14,401 | def _median_window ( window , window_start , multiplier , starttime , sampling_rate , debug = 0 ) : MAD = np . median ( np . abs ( window ) ) thresh = multiplier * MAD if debug >= 2 : print ( 'Threshold for window is: ' + str ( thresh ) + '\nMedian is: ' + str ( MAD ) + '\nMax is: ' + str ( np . max ( window ) ) ) peak... | Internal function to aid parallel processing |
14,402 | def _interp_gap ( data , peak_loc , interp_len ) : start_loc = peak_loc - int ( 0.5 * interp_len ) end_loc = peak_loc + int ( 0.5 * interp_len ) if start_loc < 0 : start_loc = 0 if end_loc > len ( data ) - 1 : end_loc = len ( data ) - 1 fill = np . linspace ( data [ start_loc ] , data [ end_loc ] , end_loc - start_loc ... | Internal function for filling gap with linear interpolation |
14,403 | def template_remove ( tr , template , cc_thresh , windowlength , interp_len , debug = 0 ) : data_in = tr . copy ( ) _interp_len = int ( tr . stats . sampling_rate * interp_len ) if _interp_len < len ( template . data ) : warnings . warn ( 'Interp_len is less than the length of the template,' 'will used the length of th... | Looks for instances of template in the trace and removes the matches . |
14,404 | def read_data ( archive , arc_type , day , stachans , length = 86400 ) : st = [ ] available_stations = _check_available_data ( archive , arc_type , day ) for station in stachans : if len ( station [ 1 ] ) == 2 : station_map = ( station [ 0 ] , station [ 1 ] [ 0 ] + '*' + station [ 1 ] [ 1 ] ) available_stations_map = [... | Function to read the appropriate data from an archive for a day . |
14,405 | def _get_station_file ( path_name , station , channel , debug = 0 ) : wavfiles = glob . glob ( path_name + os . sep + '*' ) out_files = [ _check_data ( wavfile , station , channel , debug = debug ) for wavfile in wavfiles ] out_files = list ( set ( out_files ) ) return out_files | Helper function to find the correct file . |
14,406 | def _check_data ( wavfile , station , channel , debug = 0 ) : if debug > 1 : print ( 'Checking ' + wavfile ) st = read ( wavfile , headonly = True ) for tr in st : if tr . stats . station == station and tr . stats . channel == channel : return wavfile | Inner loop for parallel checks . |
14,407 | def _check_available_data ( archive , arc_type , day ) : available_stations = [ ] if arc_type . lower ( ) == 'day_vols' : wavefiles = glob . glob ( os . path . join ( archive , day . strftime ( 'Y%Y' ) , day . strftime ( 'R%j.01' ) , '*' ) ) for wavefile in wavefiles : header = read ( wavefile , headonly = True ) avail... | Function to check what stations are available in the archive for a given \ day . |
14,408 | def rt_time_log ( logfile , startdate ) : if os . name == 'nt' : f = io . open ( logfile , 'rb' ) else : f = io . open ( logfile , 'rb' ) phase_err = [ ] lock = [ ] for line_binary in f : try : line = line_binary . decode ( "utf8" , "ignore" ) except UnicodeDecodeError : warnings . warn ( 'Cannot decode line, skipping'... | Open and read reftek raw log - file . |
14,409 | def rt_location_log ( logfile ) : if os . name == 'nt' : f = open ( logfile , 'rb' ) else : f = open ( logfile , 'rb' ) locations = [ ] for line_binary in f : try : line = line_binary . decode ( "utf8" , "ignore" ) except UnicodeDecodeError : warnings . warn ( 'Cannot decode line, skipping' ) print ( line_binary ) cont... | Extract location information from a RefTek raw log - file . |
14,410 | def flag_time_err ( phase_err , time_thresh = 0.02 ) : time_err = [ ] for stamp in phase_err : if abs ( stamp [ 1 ] ) > time_thresh : time_err . append ( stamp [ 0 ] ) return time_err | Find large time errors in list . |
14,411 | def check_all_logs ( directory , time_thresh ) : log_files = glob . glob ( directory + '/*/0/000000000_00000000' ) print ( 'I have ' + str ( len ( log_files ) ) + ' log files to scan' ) total_phase_errs = [ ] for i , log_file in enumerate ( log_files ) : startdate = dt . datetime . strptime ( log_file . split ( '/' ) [... | Check all the log - files in a directory tree for timing errors . |
14,412 | def _cc_round ( num , dp ) : num = round ( num , dp ) num = '{0:.{1}f}' . format ( num , dp ) return num | Convenience function to take a float and round it to dp padding with zeros to return a string |
14,413 | def readSTATION0 ( path , stations ) : stalist = [ ] f = open ( path + '/STATION0.HYP' , 'r' ) for line in f : if line [ 1 : 6 ] . strip ( ) in stations : station = line [ 1 : 6 ] . strip ( ) lat = line [ 6 : 14 ] if lat [ - 1 ] == 'S' : NS = - 1 else : NS = 1 if lat [ 4 ] == '.' : lat = ( int ( lat [ 0 : 2 ] ) + float... | Read a Seisan STATION0 . HYP file on the path given . |
14,414 | def sfiles_to_event ( sfile_list ) : event_list = [ ] sort_list = [ ( readheader ( sfile ) . origins [ 0 ] . time , sfile ) for sfile in sfile_list ] sort_list . sort ( key = lambda tup : tup [ 0 ] ) sfile_list = [ sfile [ 1 ] for sfile in sort_list ] catalog = Catalog ( ) for i , sfile in enumerate ( sfile_list ) : ev... | Write an event . dat file from a list of Seisan events |
14,415 | def write_event ( catalog ) : f = open ( 'event.dat' , 'w' ) for i , event in enumerate ( catalog ) : try : evinfo = event . origins [ 0 ] except IndexError : raise IOError ( 'No origin' ) try : Mag_1 = event . magnitudes [ 0 ] . mag except IndexError : Mag_1 = 0.0 try : t_RMS = event . origins [ 0 ] . quality [ 'stand... | Write obspy . core . event . Catalog to a hypoDD format event . dat file . |
14,416 | def read_phase ( ph_file ) : ph_catalog = Catalog ( ) f = open ( ph_file , 'r' ) for line in f : if line [ 0 ] == '#' : if 'event_text' not in locals ( ) : event_text = { 'header' : line . rstrip ( ) , 'picks' : [ ] } else : ph_catalog . append ( _phase_to_event ( event_text ) ) event_text = { 'header' : line . rstrip ... | Read hypoDD phase files into Obspy catalog class . |
14,417 | def _phase_to_event ( event_text ) : ph_event = Event ( ) header = event_text [ 'header' ] . split ( ) ph_event . origins . append ( Origin ( ) ) ph_event . origins [ 0 ] . time = UTCDateTime ( year = int ( header [ 1 ] ) , month = int ( header [ 2 ] ) , day = int ( header [ 3 ] ) , hour = int ( header [ 4 ] ) , minute... | Function to convert the text for one event in hypoDD phase format to \ event object . |
14,418 | def extract_from_stack ( stack , template , length , pre_pick , pre_pad , Z_include = False , pre_processed = True , samp_rate = None , lowcut = None , highcut = None , filt_order = 3 ) : new_template = stack . copy ( ) mintime = min ( [ tr . stats . starttime for tr in template ] ) delays = [ ( tr . stats . station , ... | Extract a multiplexed template from a stack of detections . |
14,419 | def _group_events ( catalog , process_len , template_length , data_pad ) : if len ( catalog ) == 1 : return [ catalog ] sub_catalogs = [ ] catalog . events = sorted ( catalog . events , key = lambda e : ( e . preferred_origin ( ) or e . origins [ 0 ] ) . time ) sub_catalog = Catalog ( [ catalog [ 0 ] ] ) for event in c... | Internal function to group events into sub - catalogs based on process_len . |
14,420 | def multi_template_gen ( catalog , st , length , swin = 'all' , prepick = 0.05 , all_horiz = False , delayed = True , plot = False , debug = 0 , return_event = False , min_snr = None ) : EQcorrscanDeprecationWarning ( "Function is depreciated and will be removed soon. Use " "template_gen.template_gen instead." ) temp_l... | Generate multiple templates from one stream of data . |
14,421 | def from_client ( catalog , client_id , lowcut , highcut , samp_rate , filt_order , length , prepick , swin , process_len = 86400 , data_pad = 90 , all_horiz = False , delayed = True , plot = False , debug = 0 , return_event = False , min_snr = None ) : EQcorrscanDeprecationWarning ( "Function is depreciated and will b... | Generate multiplexed template from FDSN client . |
14,422 | def from_sac ( sac_files , lowcut , highcut , samp_rate , filt_order , length , swin , prepick , all_horiz = False , delayed = True , plot = False , debug = 0 , return_event = False , min_snr = None ) : EQcorrscanDeprecationWarning ( "Function is depreciated and will be removed soon. Use " "template_gen.template_gen in... | Generate a multiplexed template from a list of SAC files . |
14,423 | def time_func ( func , name , * args , ** kwargs ) : tic = time . time ( ) out = func ( * args , ** kwargs ) toc = time . time ( ) print ( '%s took %0.2f seconds' % ( name , toc - tic ) ) return out | call a func with args and kwargs print name of func and how long it took . |
14,424 | def seis_sim ( sp , amp_ratio = 1.5 , flength = False , phaseout = 'all' ) : if flength and 2.5 * sp < flength and 100 < flength : additional_length = flength elif 2.5 * sp < 100.0 : additional_length = 100 else : additional_length = 2.5 * sp synth = np . zeros ( int ( sp + 10 + additional_length ) ) synth [ 10 ] = 1.0... | Generate a simulated seismogram from a given S - P time . |
14,425 | def SVD_sim ( sp , lowcut , highcut , samp_rate , amp_range = np . arange ( - 10 , 10 , 0.01 ) ) : sp = int ( sp * samp_rate ) synthetics = [ Stream ( Trace ( seis_sim ( sp , a ) ) ) for a in amp_range ] for st in synthetics : for tr in st : tr . stats . station = 'SYNTH' tr . stats . channel = 'SH1' tr . stats . sampl... | Generate basis vectors of a set of simulated seismograms . |
14,426 | def template_grid ( stations , nodes , travel_times , phase , PS_ratio = 1.68 , samp_rate = 100 , flength = False , phaseout = 'all' ) : if phase not in [ 'S' , 'P' ] : raise IOError ( 'Phase is neither P nor S' ) templates = [ ] for i , node in enumerate ( nodes ) : st = [ ] for j , station in enumerate ( stations ) :... | Generate a group of synthetic seismograms for a grid of sources . |
14,427 | def generate_synth_data ( nsta , ntemplates , nseeds , samp_rate , t_length , max_amp , max_lag , debug = 0 ) : t_times = np . abs ( np . random . random ( [ nsta , ntemplates ] ) ) * max_lag lats = np . random . random ( ntemplates ) * 90.0 lons = np . random . random ( ntemplates ) * 90.0 depths = np . abs ( np . ran... | Generate a synthetic dataset to be used for testing . |
14,428 | def linstack ( streams , normalize = True ) : stack = streams [ np . argmax ( [ len ( stream ) for stream in streams ] ) ] . copy ( ) if normalize : for tr in stack : tr . data = tr . data / np . sqrt ( np . mean ( np . square ( tr . data ) ) ) tr . data = np . nan_to_num ( tr . data ) for i in range ( 1 , len ( stream... | Compute the linear stack of a series of seismic streams of \ multiplexed data . |
14,429 | def PWS_stack ( streams , weight = 2 , normalize = True ) : Linstack = linstack ( streams ) instaphases = [ ] print ( "Computing instantaneous phase" ) for stream in streams : instaphase = stream . copy ( ) for tr in instaphase : analytic = hilbert ( tr . data ) envelope = np . sqrt ( np . sum ( ( np . square ( analyti... | Compute the phase weighted stack of a series of streams . |
14,430 | def align_traces ( trace_list , shift_len , master = False , positive = False , plot = False ) : from eqcorrscan . core . match_filter import normxcorr2 from eqcorrscan . utils . plotting import xcorr_plot traces = deepcopy ( trace_list ) if not master : master = traces [ 0 ] MAD_master = np . median ( np . abs ( maste... | Align traces relative to each other based on their cross - correlation value . |
14,431 | def temporary_directory ( ) : dir_name = tempfile . mkdtemp ( ) yield dir_name if os . path . exists ( dir_name ) : shutil . rmtree ( dir_name ) | make a temporary directory yeild its name cleanup on exit |
14,432 | def _total_microsec ( t1 , t2 ) : td = t1 - t2 return ( td . seconds + td . days * 24 * 3600 ) * 10 ** 6 + td . microseconds | Calculate difference between two datetime stamps in microseconds . |
14,433 | def _templates_match ( t , family_file ) : return t . name == family_file . split ( os . sep ) [ - 1 ] . split ( '_detections.csv' ) [ 0 ] | Return True if a tribe matches a family file path . |
14,434 | def _group_process ( template_group , parallel , debug , cores , stream , daylong , ignore_length , overlap ) : master = template_group [ 0 ] processed_streams = [ ] kwargs = { 'filt_order' : master . filt_order , 'highcut' : master . highcut , 'lowcut' : master . lowcut , 'samp_rate' : master . samp_rate , 'debug' : d... | Process data into chunks based on template processing length . |
14,435 | def _par_read ( dirname , compressed = True ) : templates = [ ] if compressed : arc = tarfile . open ( dirname , "r:*" ) members = arc . getmembers ( ) _parfile = [ member for member in members if member . name . split ( os . sep ) [ - 1 ] == 'template_parameters.csv' ] if len ( _parfile ) == 0 : arc . close ( ) raise ... | Internal write function to read a formatted parameter file . |
14,436 | def _badpath ( path , base ) : return not _resolved ( os . path . join ( base , path ) ) . startswith ( base ) | joinpath will ignore base if path is absolute . |
14,437 | def _badlink ( info , base ) : tip = _resolved ( os . path . join ( base , os . path . dirname ( info . name ) ) ) return _badpath ( info . linkname , base = tip ) | Links are interpreted relative to the directory containing the link |
14,438 | def _safemembers ( members ) : base = _resolved ( "." ) for finfo in members : if _badpath ( finfo . name , base ) : print ( finfo . name , "is blocked (illegal path)" ) elif finfo . issym ( ) and _badlink ( finfo , base ) : print ( finfo . name , "is blocked: Hard link to" , finfo . linkname ) elif finfo . islnk ( ) a... | Check members of a tar archive for safety . Ensure that they do not contain paths or links outside of where we need them - this would only happen if the archive wasn t made by eqcorrscan . |
14,439 | def _write_family ( family , filename ) : with open ( filename , 'w' ) as f : for detection in family . detections : det_str = '' for key in detection . __dict__ . keys ( ) : if key == 'event' and detection . __dict__ [ key ] is not None : value = str ( detection . event . resource_id ) elif key in [ 'threshold' , 'det... | Write a family to a csv file . |
14,440 | def _read_family ( fname , all_cat , template ) : detections = [ ] with open ( fname , 'r' ) as f : for line in f : det_dict = { } gen_event = False for key_pair in line . rstrip ( ) . split ( ';' ) : key = key_pair . split ( ': ' ) [ 0 ] . strip ( ) value = key_pair . split ( ': ' ) [ - 1 ] . strip ( ) if key == 'even... | Internal function to read csv family files . |
14,441 | def read_party ( fname = None , read_detection_catalog = True ) : party = Party ( ) party . read ( filename = fname , read_detection_catalog = read_detection_catalog ) return party | Read detections and metadata from a tar archive . |
14,442 | def read_detections ( fname ) : f = open ( fname , 'r' ) detections = [ ] for index , line in enumerate ( f ) : if index == 0 : continue if line . rstrip ( ) . split ( '; ' ) [ 0 ] == 'Template name' : continue detection = line . rstrip ( ) . split ( '; ' ) detection [ 1 ] = UTCDateTime ( detection [ 1 ] ) detection [ ... | Read detections from a file to a list of Detection objects . |
14,443 | def write_catalog ( detections , fname , format = "QUAKEML" ) : catalog = get_catalog ( detections ) catalog . write ( filename = fname , format = format ) | Write events contained within detections to a catalog file . |
14,444 | def extract_from_stream ( stream , detections , pad = 5.0 , length = 30.0 ) : streams = [ ] for detection in detections : cut_stream = Stream ( ) for pick in detection . event . picks : tr = stream . select ( station = pick . waveform_id . station_code , channel = pick . waveform_id . channel_code ) if len ( tr ) == 0 ... | Extract waveforms for a list of detections from a stream . |
14,445 | def normxcorr2 ( template , image ) : array_xcorr = get_array_xcorr ( ) if type ( template ) != np . ndarray or type ( image ) != np . ndarray : print ( 'You have not provided numpy arrays, I will not convert them' ) return 'NaN' if len ( template ) > len ( image ) : ccc = array_xcorr ( templates = np . array ( [ image... | Thin wrapper to eqcorrscan . utils . correlate functions . |
14,446 | def select ( self , template_name ) : return [ fam for fam in self . families if fam . template . name == template_name ] [ 0 ] | Select a specific family from the party . |
14,447 | def sort ( self ) : self . families . sort ( key = lambda x : x . template . name ) return self | Sort the families by template name . |
14,448 | def filter ( self , dates = None , min_dets = 1 ) : if dates is None : raise MatchFilterError ( 'Need a list defining a date range' ) new_party = Party ( ) for fam in self . families : new_fam = Family ( template = fam . template , detections = [ det for det in fam if dates [ 0 ] < det . detect_time < dates [ 1 ] ] ) i... | Return a new Party filtered according to conditions . |
14,449 | def plot ( self , plot_grouped = False , dates = None , min_dets = 1 , rate = False , ** kwargs ) : all_dets = [ ] if dates : new_party = self . filter ( dates = dates , min_dets = min_dets ) for fam in new_party . families : all_dets . extend ( fam . detections ) else : for fam in self . families : all_dets . extend (... | Plot the cumulative detections in time . |
14,450 | def rethreshold ( self , new_threshold , new_threshold_type = 'MAD' ) : for family in self . families : rethresh_detections = [ ] for d in family . detections : if new_threshold_type == 'MAD' and d . threshold_type == 'MAD' : new_thresh = ( d . threshold / d . threshold_input ) * new_threshold elif new_threshold_type =... | Remove detections from the Party that are below a new threshold . |
14,451 | def decluster ( self , trig_int , timing = 'detect' , metric = 'avg_cor' ) : all_detections = [ ] for fam in self . families : all_detections . extend ( fam . detections ) if timing == 'detect' : if metric == 'avg_cor' : detect_info = [ ( d . detect_time , d . detect_val / d . no_chans ) for d in all_detections ] elif ... | De - cluster a Party of detections by enforcing a detection separation . |
14,452 | def read ( self , filename = None , read_detection_catalog = True ) : tribe = Tribe ( ) families = [ ] if filename is None : filename = os . path . join ( os . path . dirname ( __file__ ) , '..' , 'tests' , 'test_data' , 'test_party.tgz' ) if isinstance ( filename , list ) : filenames = [ ] for _filename in filename : ... | Read a Party from a file . |
14,453 | def get_catalog ( self ) : catalog = Catalog ( ) for fam in self . families : if len ( fam . catalog ) != 0 : catalog . events . extend ( fam . catalog . events ) return catalog | Get an obspy catalog object from the party . |
14,454 | def min_chans ( self , min_chans ) : declustered = Party ( ) for family in self . families : fam = Family ( family . template ) for d in family . detections : if d . no_chans > min_chans : fam . detections . append ( d ) declustered . families . append ( fam ) self . families = declustered . families return self | Remove detections with fewer channels used than min_chans |
14,455 | def _uniq ( self ) : _detections = [ ] [ _detections . append ( d ) for d in self . detections if not _detections . count ( d ) ] self . detections = _detections return self | Get list of unique detections . Works in place . |
14,456 | def sort ( self ) : self . detections = sorted ( self . detections , key = lambda d : d . detect_time ) return self | Sort by detection time . |
14,457 | def plot ( self , plot_grouped = False ) : cumulative_detections ( detections = self . detections , plot_grouped = plot_grouped ) | Plot the cumulative number of detections in time . |
14,458 | def same_processing ( self , other ) : for key in self . __dict__ . keys ( ) : if key in [ 'name' , 'st' , 'prepick' , 'event' , 'template_info' ] : continue if not self . __dict__ [ key ] == other . __dict__ [ key ] : return False return True | Check is the templates are processed the same . |
14,459 | def write ( self , filename , format = 'tar' ) : if format == 'tar' : Tribe ( templates = [ self ] ) . write ( filename = filename ) else : self . st . write ( filename , format = format ) return self | Write template . |
14,460 | def read ( self , filename ) : tribe = Tribe ( ) tribe . read ( filename = filename ) if len ( tribe ) > 1 : raise IOError ( 'Multiple templates in file' ) for key in self . __dict__ . keys ( ) : self . __dict__ [ key ] = tribe [ 0 ] . __dict__ [ key ] return self | Read template from tar format with metadata . |
14,461 | def detect ( self , stream , threshold , threshold_type , trig_int , plotvar , pre_processed = False , daylong = False , parallel_process = True , xcorr_func = None , concurrency = None , cores = None , ignore_length = False , overlap = "calculate" , debug = 0 , full_peaks = False ) : party = _group_detect ( templates ... | Detect using a single template within a continuous stream . |
14,462 | def construct ( self , method , name , lowcut , highcut , samp_rate , filt_order , prepick , ** kwargs ) : if method in [ 'from_meta_file' , 'from_seishub' , 'from_client' , 'multi_template_gen' ] : raise NotImplementedError ( 'Method is not supported, ' 'use Tribe.construct instead.' ) streams , events , process_lengt... | Construct a template using a given method . |
14,463 | def sort ( self ) : self . templates = sorted ( self . templates , key = lambda x : x . name ) return self | Sort the tribe sorts by template name . |
14,464 | def select ( self , template_name ) : return [ t for t in self . templates if t . name == template_name ] [ 0 ] | Select a particular template from the tribe . |
14,465 | def remove ( self , template ) : self . templates = [ t for t in self . templates if t != template ] return self | Remove a template from the tribe . |
14,466 | def write ( self , filename , compress = True , catalog_format = "QUAKEML" ) : if catalog_format not in CAT_EXT_MAP . keys ( ) : raise TypeError ( "{0} is not supported" . format ( catalog_format ) ) if not os . path . isdir ( filename ) : os . makedirs ( filename ) self . _par_write ( filename ) tribe_cat = Catalog ( ... | Write the tribe to a file using tar archive formatting . |
14,467 | def _par_write ( self , dirname ) : filename = dirname + '/' + 'template_parameters.csv' with open ( filename , 'w' ) as parfile : for template in self . templates : for key in template . __dict__ . keys ( ) : if key not in [ 'st' , 'event' ] : parfile . write ( key + ': ' + str ( template . __dict__ [ key ] ) + ', ' )... | Internal write function to write a formatted parameter file . |
14,468 | def read ( self , filename ) : with tarfile . open ( filename , "r:*" ) as arc : temp_dir = tempfile . mkdtemp ( ) arc . extractall ( path = temp_dir , members = _safemembers ( arc ) ) tribe_dir = glob . glob ( temp_dir + os . sep + '*' ) [ 0 ] self . _read_from_folder ( dirname = tribe_dir ) shutil . rmtree ( temp_dir... | Read a tribe of templates from a tar formatted file . |
14,469 | def _read_from_folder ( self , dirname ) : templates = _par_read ( dirname = dirname , compressed = False ) t_files = glob . glob ( dirname + os . sep + '*.ms' ) tribe_cat_file = glob . glob ( os . path . join ( dirname , "tribe_cat.*" ) ) if len ( tribe_cat_file ) != 0 : tribe_cat = read_events ( tribe_cat_file [ 0 ] ... | Internal folder reader . |
14,470 | def cluster ( self , method , ** kwargs ) : from eqcorrscan . utils import clustering tribes = [ ] func = getattr ( clustering , method ) if method in [ 'space_cluster' , 'space_time_cluster' ] : cat = Catalog ( [ t . event for t in self . templates ] ) groups = func ( cat , ** kwargs ) for group in groups : new_tribe ... | Cluster the tribe . |
14,471 | def construct ( self , method , lowcut , highcut , samp_rate , filt_order , prepick , save_progress = False , ** kwargs ) : templates , catalog , process_lengths = template_gen . template_gen ( method = method , lowcut = lowcut , highcut = highcut , filt_order = filt_order , samp_rate = samp_rate , prepick = prepick , ... | Generate a Tribe of Templates . |
14,472 | def write ( self , fname , append = True ) : mode = 'w' if append and os . path . isfile ( fname ) : mode = 'a' header = '; ' . join ( [ 'Template name' , 'Detection time (UTC)' , 'Number of channels' , 'Channel list' , 'Detection value' , 'Threshold' , 'Threshold type' , 'Input threshold' , 'Detection type' ] ) print_... | Write detection to csv formatted file . |
14,473 | def _calculate_event ( self , template = None , template_st = None ) : if template is not None and template . name != self . template_name : print ( "Template names do not match: {0}: {1}" . format ( template . name , self . template_name ) ) return det_time = str ( self . detect_time . strftime ( '%Y%m%dT%H%M%S.%f' ) ... | Calculate an event for this detection using a given template . |
14,474 | def mktemplates ( network_code = 'GEONET' , publicIDs = [ '2016p008122' , '2016p008353' , '2016p008155' , '2016p008194' ] , plot = True ) : client = Client ( network_code ) catalog = Catalog ( ) for publicID in publicIDs : if network_code == 'GEONET' : data_stream = client . _download ( 'http://quakeml.geonet.org.nz/qu... | Functional wrapper to make templates |
14,475 | def _read_tt ( path , stations , phase , phaseout = 'S' , ps_ratio = 1.68 , lags_switch = True ) : gridfiles = [ ] stations_out = [ ] for station in stations : gridfiles += ( glob . glob ( path + '*.' + phase + '.' + station + '.time.csv' ) ) if glob . glob ( path + '*.' + phase + '.' + station + '*.csv' ) : stations_o... | Read in . csv files of slowness generated from Grid2Time . |
14,476 | def _resample_grid ( stations , nodes , lags , mindepth , maxdepth , corners ) : resamp_nodes = [ ] resamp_lags = [ ] for i , node in enumerate ( nodes ) : if mindepth < float ( node [ 2 ] ) < maxdepth and corners . contains_point ( node [ 0 : 2 ] ) : resamp_nodes . append ( node ) resamp_lags . append ( [ lags [ : , i... | Resample the lagtime grid to a given volume . |
14,477 | def _rm_similarlags ( stations , nodes , lags , threshold ) : netdif = abs ( ( lags . T - lags . T [ 0 ] ) . sum ( axis = 1 ) . reshape ( 1 , len ( nodes ) ) ) > threshold for i in range ( len ( nodes ) ) : _netdif = abs ( ( lags . T - lags . T [ i ] ) . sum ( axis = 1 ) . reshape ( 1 , len ( nodes ) ) ) > threshold ne... | Remove nodes that have a very similar network moveout to another node . |
14,478 | def _cum_net_resp ( node_lis , instance = 0 ) : cum_net_resp = np . load ( 'tmp' + str ( instance ) + '/node_' + str ( node_lis [ 0 ] ) + '.npy' ) [ 0 ] os . remove ( 'tmp' + str ( instance ) + '/node_' + str ( node_lis [ 0 ] ) + '.npy' ) indices = np . ones ( len ( cum_net_resp ) ) * node_lis [ 0 ] for i in node_lis [... | Compute the cumulative network response by reading saved energy . npy files . |
14,479 | def _find_detections ( cum_net_resp , nodes , threshold , thresh_type , samp_rate , realstations , length ) : cum_net_resp = np . nan_to_num ( cum_net_resp ) if np . isnan ( cum_net_resp ) . any ( ) : raise ValueError ( "Nans present" ) print ( 'Mean of data is: ' + str ( np . median ( cum_net_resp ) ) ) print ( 'RMS o... | Find detections within the cumulative network response . |
14,480 | def coherence ( stream_in , stations = [ 'all' ] , clip = False ) : stream = stream_in . copy ( ) maxlen = np . max ( [ len ( tr . data ) for tr in stream ] ) if maxlen == 0 : warnings . warn ( 'template without data' ) return 0.0 , len ( stream ) if not stations [ 0 ] == 'all' : for tr in stream : if tr . stats . stat... | Determine the average network coherence of a given template or detection . |
14,481 | def _do_ffts ( detector , stream , Nc ) : min_fftlen = int ( stream [ 0 ] [ 0 ] . data . shape [ 0 ] + detector . data [ 0 ] . shape [ 0 ] - Nc ) fftlen = scipy . fftpack . next_fast_len ( min_fftlen ) mplen = stream [ 0 ] [ 0 ] . data . shape [ 0 ] ulen = detector . data [ 0 ] . shape [ 0 ] num_st_fd = [ np . fft . rf... | Perform ffts on data detector and denominator boxcar |
14,482 | def _det_stat_freq ( det_freq , data_freq_sq , data_freq , w , Nc , ulen , mplen ) : num_cor = np . multiply ( det_freq , data_freq ) den_cor = np . multiply ( w , data_freq_sq ) num_ifft = np . real ( np . fft . irfft ( num_cor ) ) [ : , ulen - 1 : mplen : Nc ] denominator = np . real ( np . fft . irfft ( den_cor ) ) ... | Compute detection statistic in the frequency domain |
14,483 | def multi ( stream ) : stack = stream [ 0 ] . data for tr in stream [ 1 : ] : stack = np . dstack ( np . array ( [ stack , tr . data ] ) ) multiplex = stack . reshape ( stack . size , ) return multiplex | Internal multiplexer for multiplex_detect . |
14,484 | def subspace_detect ( detectors , stream , threshold , trig_int , moveout = 0 , min_trig = 1 , parallel = True , num_cores = None ) : from multiprocessing import Pool , cpu_count parameters = [ ] detections = [ ] for detector in detectors : parameter = ( detector . lowcut , detector . highcut , detector . filt_order , ... | Conduct subspace detection with chosen detectors . |
14,485 | def construct ( self , streams , lowcut , highcut , filt_order , sampling_rate , multiplex , name , align , shift_len = 0 , reject = 0.3 , no_missed = True , plot = False ) : self . lowcut = lowcut self . highcut = highcut self . filt_order = filt_order self . sampling_rate = sampling_rate self . name = name self . mul... | Construct a subspace detector from a list of streams full rank . |
14,486 | def partition ( self , dimension ) : for i , channel in enumerate ( self . u ) : if self . v [ i ] . shape [ 1 ] < dimension : raise IndexError ( 'Channel is max dimension %s' % self . v [ i ] . shape [ 1 ] ) self . data [ i ] = channel [ : , 0 : dimension ] self . dimension = dimension return self | Partition subspace into desired dimension . |
14,487 | def energy_capture ( self , stachans = 'all' , size = ( 10 , 7 ) , show = False ) : if show : return subspace_fc_plot ( detector = self , stachans = stachans , size = size , show = show ) percent_capture = 0 if np . isinf ( self . dimension ) : return 100 for channel in self . sigma : fc = np . sum ( channel [ 0 : self... | Calculate the average percentage energy capture for this subspace . |
14,488 | def write ( self , filename ) : f = h5py . File ( filename , "w" ) data_group = f . create_group ( name = "data" ) for i , data in enumerate ( self . data ) : dset = data_group . create_dataset ( name = "data_" + str ( i ) , shape = data . shape , dtype = data . dtype ) dset [ ... ] = data data_group . attrs [ 'length'... | Write detector to a file - uses HDF5 file format . |
14,489 | def read ( self , filename ) : f = h5py . File ( filename , "r" ) self . data = [ ] for i in range ( f [ 'data' ] . attrs [ 'length' ] ) : self . data . append ( f [ 'data' ] [ 'data_' + str ( i ) ] . value ) self . u = [ ] for i in range ( f [ 'u' ] . attrs [ 'length' ] ) : self . u . append ( f [ 'u' ] [ 'u_' + str (... | Read detector from a file must be HDF5 format . |
14,490 | def plot ( self , stachans = 'all' , size = ( 10 , 7 ) , show = True ) : return subspace_detector_plot ( detector = self , stachans = stachans , size = size , show = show ) | Plot the output basis vectors for the detector at the given dimension . |
14,491 | def export_symbols ( * path ) : lines = open ( os . path . join ( * path ) , 'r' ) . readlines ( ) [ 2 : ] return [ s . strip ( ) for s in lines if s . strip ( ) != '' ] | Required for windows systems - functions defined in libutils . def . |
14,492 | def dist_calc ( loc1 , loc2 ) : R = 6371.009 dlat = np . radians ( abs ( loc1 [ 0 ] - loc2 [ 0 ] ) ) dlong = np . radians ( abs ( loc1 [ 1 ] - loc2 [ 1 ] ) ) ddepth = abs ( loc1 [ 2 ] - loc2 [ 2 ] ) mean_lat = np . radians ( ( loc1 [ 0 ] + loc2 [ 0 ] ) / 2 ) dist = R * np . sqrt ( dlat ** 2 + ( np . cos ( mean_lat ) * ... | Function to calculate the distance in km between two points . |
14,493 | def calc_max_curv ( magnitudes , plotvar = False ) : counts = Counter ( magnitudes ) df = np . zeros ( len ( counts ) ) mag_steps = np . zeros ( len ( counts ) ) grad = np . zeros ( len ( counts ) - 1 ) grad_points = grad . copy ( ) for i , magnitude in enumerate ( sorted ( counts . keys ( ) , reverse = True ) ) : mag_... | Calculate the magnitude of completeness using the maximum curvature method . |
14,494 | def _sim_WA ( trace , PAZ , seedresp , water_level , velocity = False ) : PAZ_WA = { 'poles' : [ - 6.283 + 4.7124j , - 6.283 - 4.7124j ] , 'zeros' : [ 0 + 0j ] , 'gain' : 1.0 , 'sensitivity' : 2080 } if velocity : PAZ_WA [ 'zeros' ] = [ 0 + 0j , 0 + 0j ] trace . detrend ( 'simple' ) if PAZ : trace . data = seis_sim ( t... | Remove the instrument response from a trace and simulate a Wood - Anderson . |
14,495 | def _GSE2_PAZ_read ( gsefile ) : with open ( gsefile , 'r' ) as f : header = f . readline ( ) if not header [ 0 : 4 ] == 'CAL2' : raise IOError ( 'Unknown format for GSE file, only coded for CAL2' ) station = header . split ( ) [ 1 ] channel = header . split ( ) [ 2 ] sensor = header . split ( ) [ 3 ] date = dt . datet... | Read the instrument response information from a GSE Poles and Zeros file . |
14,496 | def _find_resp ( station , channel , network , time , delta , directory ) : possible_respfiles = glob . glob ( directory + os . path . sep + 'RESP.' + network + '.' + station + '.*.' + channel ) possible_respfiles += glob . glob ( directory + os . path . sep + 'RESP.' + network + '.' + channel + '.' + station ) possibl... | Helper function to find the response information . |
14,497 | def _pairwise ( iterable ) : a , b = itertools . tee ( iterable ) next ( b , None ) if sys . version_info . major == 2 : return itertools . izip ( a , b ) else : return zip ( a , b ) | Wrapper on itertools for SVD_magnitude . |
14,498 | def filter_picks ( catalog , stations = None , channels = None , networks = None , locations = None , top_n_picks = None , evaluation_mode = 'all' ) : filtered_catalog = catalog . copy ( ) if stations : for event in filtered_catalog : if len ( event . picks ) == 0 : continue event . picks = [ pick for pick in event . p... | Filter events in the catalog based on a number of parameters . |
14,499 | def spatial_clip ( catalog , corners , mindepth = None , maxdepth = None ) : cat_out = catalog . copy ( ) if mindepth is not None : for event in cat_out : try : origin = _get_origin ( event ) except IOError : continue if origin . depth < mindepth * 1000 : cat_out . events . remove ( event ) if maxdepth is not None : fo... | Clip the catalog to a spatial box can be irregular . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.