idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
14,500
def _get_origin ( event ) : if event . preferred_origin ( ) is not None : origin = event . preferred_origin ( ) elif len ( event . origins ) > 0 : origin = event . origins [ 0 ] else : raise IndexError ( 'No origin set, cannot constrain' ) return origin
Get the origin of an event .
14,501
def read_parameters ( infile = '../parameters/EQcorrscan_parameters.txt' ) : try : import ConfigParser except ImportError : import configparser as ConfigParser import ast f = open ( infile , 'r' ) print ( 'Reading parameters with the following header:' ) for line in f : if line [ 0 ] == '#' : print ( line . rstrip ( '\n' ) . lstrip ( '\n' ) ) f . close ( ) config = ConfigParser . ConfigParser ( ) config . read ( infile ) template_names = list ( ast . literal_eval ( config . get ( "eqcorrscan_pars" , "template_names" ) ) ) parameters = EQcorrscanParameters ( template_names = template_names , lowcut = config . get ( "eqcorrscan_pars" , "lowcut" ) , highcut = config . get ( "eqcorrscan_pars" , "highcut" ) , filt_order = config . get ( "eqcorrscan_pars" , "filt_order" ) , samp_rate = config . get ( "eqcorrscan_pars" , "samp_rate" ) , debug = config . get ( "eqcorrscan_pars" , "debug" ) , startdate = config . get ( "eqcorrscan_pars" , "startdate" ) , enddate = config . get ( "eqcorrscan_pars" , "enddate" ) , archive = config . get ( "eqcorrscan_pars" , "archive" ) , arc_type = config . get ( "eqcorrscan_pars" , "arc_type" ) , cores = config . get ( "eqcorrscan_pars" , "cores" ) , plotvar = config . getboolean ( "eqcorrscan_pars" , "plotvar" ) , plotdir = config . get ( "eqcorrscan_pars" , "plotdir" ) , plot_format = config . get ( "eqcorrscan_pars" , "plot_format" ) , tempdir = ast . literal_eval ( config . get ( "eqcorrscan_pars" , "tempdir" ) ) , threshold = config . get ( "eqcorrscan_pars" , "threshold" ) , threshold_type = config . get ( "eqcorrscan_pars" , "threshold_type" ) , trigger_interval = config . get ( "eqcorrscan_pars" , "trigger_interval" ) ) return parameters
Read the default parameters from file .
14,502
def write ( self , outfile = '../parameters/EQcorrscan_parameters.txt' , overwrite = False ) : outpath = os . sep . join ( outfile . split ( os . sep ) [ 0 : - 1 ] ) if len ( outpath ) > 0 and not os . path . isdir ( outpath ) : msg = ' ' . join ( [ os . path . join ( outfile . split ( os . sep ) [ 0 : - 1 ] ) , 'does not exist, check path.' ] ) raise IOError ( msg ) if os . path . isfile ( outfile ) and not overwrite : responding = True while responding : print ( ' ' . join ( [ outfile , 'exists. Overwrite? [y/N]' ] ) ) option = raw_input ( ) if option . upper ( ) == 'N' : raise IOError ( 'File exists, will not overwrite' ) elif option . upper ( ) == 'Y' : responding = False else : print ( 'Must respond with y or n' ) f = open ( outfile , 'w' ) header = ' ' . join ( [ '# User:' , getpass . getuser ( ) , '\n# Creation date:' , str ( UTCDateTime ( ) ) , '\n# EQcorrscan version:' , str ( eqcorrscan . __version__ ) , '\n\n\n' ] ) f . write ( header ) parameters = self . __str__ ( ) . split ( '\n' ) [ 1 : ] f . write ( '[eqcorrscan_pars]\n' ) for parameter in parameters : f . write ( parameter . lstrip ( ) + '\n' ) f . close ( ) print ( 'Written parameter file: ' + outfile )
Function to write the parameters to a file - user readable .
14,503
def _check_daylong ( tr ) : if len ( np . nonzero ( tr . data ) [ 0 ] ) < 0.5 * len ( tr . data ) : qual = False else : qual = True return qual
Check the data quality of the daylong file .
14,504
def shortproc ( st , lowcut , highcut , filt_order , samp_rate , debug = 0 , parallel = False , num_cores = False , starttime = None , endtime = None , seisan_chan_names = False , fill_gaps = True ) : if isinstance ( st , Trace ) : tracein = True st = Stream ( st ) else : tracein = False if highcut and highcut >= 0.5 * samp_rate : raise IOError ( 'Highcut must be lower than the nyquist' ) if debug > 4 : parallel = False length = None clip = False if starttime is not None and endtime is not None : for tr in st : tr . trim ( starttime , endtime ) if len ( tr . data ) == ( ( endtime - starttime ) * tr . stats . sampling_rate ) + 1 : tr . data = tr . data [ 1 : len ( tr . data ) ] length = endtime - starttime clip = True elif starttime : for tr in st : tr . trim ( starttime = starttime ) elif endtime : for tr in st : tr . trim ( endtime = endtime ) for tr in st : if len ( tr . data ) == 0 : st . remove ( tr ) debug_print ( 'No data for %s.%s after trim' % ( tr . stats . station , tr . stats . channel ) , 1 , debug ) if parallel : if not num_cores : num_cores = cpu_count ( ) if num_cores > len ( st ) : num_cores = len ( st ) pool = Pool ( processes = num_cores ) results = [ pool . apply_async ( process , ( tr , ) , { 'lowcut' : lowcut , 'highcut' : highcut , 'filt_order' : filt_order , 'samp_rate' : samp_rate , 'debug' : debug , 'starttime' : starttime , 'clip' : clip , 'seisan_chan_names' : seisan_chan_names , 'fill_gaps' : fill_gaps , 'length' : length } ) for tr in st ] pool . close ( ) try : stream_list = [ p . get ( ) for p in results ] except KeyboardInterrupt as e : pool . terminate ( ) raise e pool . join ( ) st = Stream ( stream_list ) else : for i , tr in enumerate ( st ) : st [ i ] = process ( tr = tr , lowcut = lowcut , highcut = highcut , filt_order = filt_order , samp_rate = samp_rate , debug = debug , starttime = starttime , clip = clip , seisan_chan_names = seisan_chan_names , fill_gaps = fill_gaps , length = length ) if tracein : st . merge ( ) return st [ 0 ] return st
Basic function to bandpass and downsample .
14,505
def dayproc ( st , lowcut , highcut , filt_order , samp_rate , starttime , debug = 0 , parallel = True , num_cores = False , ignore_length = False , seisan_chan_names = False , fill_gaps = True ) : if isinstance ( st , Trace ) : st = Stream ( st ) tracein = True else : tracein = False if highcut and highcut >= 0.5 * samp_rate : raise IOError ( 'Highcut must be lower than the nyquist' ) if debug > 4 : parallel = False if starttime is None : startdates = [ ] for tr in st : if abs ( tr . stats . starttime - ( UTCDateTime ( tr . stats . starttime . date ) + 86400 ) ) < tr . stats . delta : startdates . append ( ( tr . stats . starttime + 86400 ) . date ) debug_print ( '{0} starts within 1 sample of the next day, using this ' 'time {1}' . format ( tr . id , ( tr . stats . starttime + 86400 ) . date ) , 2 , debug ) else : startdates . append ( tr . stats . starttime . date ) if not len ( set ( startdates ) ) == 1 : raise NotImplementedError ( 'Traces start on different days' ) starttime = UTCDateTime ( startdates [ 0 ] ) if parallel : if not num_cores : num_cores = cpu_count ( ) if num_cores > len ( st ) : num_cores = len ( st ) pool = Pool ( processes = num_cores ) results = [ pool . apply_async ( process , ( tr , ) , { 'lowcut' : lowcut , 'highcut' : highcut , 'filt_order' : filt_order , 'samp_rate' : samp_rate , 'debug' : debug , 'starttime' : starttime , 'clip' : True , 'ignore_length' : ignore_length , 'length' : 86400 , 'seisan_chan_names' : seisan_chan_names , 'fill_gaps' : fill_gaps } ) for tr in st ] pool . close ( ) try : stream_list = [ p . get ( ) for p in results ] except KeyboardInterrupt as e : pool . terminate ( ) raise e pool . join ( ) st = Stream ( stream_list ) else : for i , tr in enumerate ( st ) : st [ i ] = process ( tr = tr , lowcut = lowcut , highcut = highcut , filt_order = filt_order , samp_rate = samp_rate , debug = debug , starttime = starttime , clip = True , length = 86400 , ignore_length = ignore_length , seisan_chan_names = seisan_chan_names , fill_gaps = fill_gaps ) for tr in st : if len ( tr . data ) == 0 : st . remove ( tr ) if tracein : st . merge ( ) return st [ 0 ] return st
Wrapper for dayproc to parallel multiple traces in a stream .
14,506
def _zero_pad_gaps ( tr , gaps , fill_gaps = True ) : start_in , end_in = ( tr . stats . starttime , tr . stats . endtime ) for gap in gaps : stream = Stream ( ) if gap [ 'starttime' ] > tr . stats . starttime : stream += tr . slice ( tr . stats . starttime , gap [ 'starttime' ] ) . copy ( ) if gap [ 'endtime' ] < tr . stats . endtime : stream += tr . slice ( gap [ 'endtime' ] , tr . stats . endtime ) . copy ( ) tr = stream . merge ( ) [ 0 ] if fill_gaps : tr = tr . split ( ) tr = tr . detrend ( ) tr = tr . merge ( fill_value = 0 ) [ 0 ] if tr . stats . starttime != start_in : tr . data = np . concatenate ( [ np . zeros ( int ( tr . stats . starttime - start_in ) ) , tr . data ] ) tr . stats . starttime = start_in if tr . stats . endtime != end_in : tr . data = np . concatenate ( [ tr . data , np . zeros ( int ( end_in - tr . stats . endtime ) ) ] ) return tr
Replace padded parts of trace with zeros .
14,507
def _fill_gaps ( tr ) : tr = tr . split ( ) gaps = tr . get_gaps ( ) tr = tr . detrend ( ) . merge ( fill_value = 0 ) [ 0 ] gaps = [ { 'starttime' : gap [ 4 ] , 'endtime' : gap [ 5 ] } for gap in gaps ] return gaps , tr
Interpolate through gaps and work - out where gaps are .
14,508
def find_peaks2_short ( arr , thresh , trig_int , debug = 0 , starttime = False , samp_rate = 1.0 , full_peaks = False ) : if not starttime : starttime = UTCDateTime ( 0 ) image = np . copy ( arr ) image = np . abs ( image ) debug_print ( "Threshold: {0}\tMax: {1}" . format ( thresh , max ( image ) ) , 2 , debug ) image [ image < thresh ] = 0 if len ( image [ image > thresh ] ) == 0 : debug_print ( "No values over threshold {0}" . format ( thresh ) , 0 , debug ) return [ ] debug_print ( 'Found {0} samples above the threshold' . format ( len ( image [ image > thresh ] ) ) , 0 , debug ) initial_peaks = [ ] labeled_image , number_of_objects = ndimage . label ( image ) peak_slices = ndimage . find_objects ( labeled_image ) for peak_slice in peak_slices : window = arr [ peak_slice [ 0 ] . start : peak_slice [ 0 ] . stop ] if peak_slice [ 0 ] . stop - peak_slice [ 0 ] . start > trig_int and full_peaks : peaks = decluster ( peaks = window , trig_int = trig_int , index = np . arange ( peak_slice [ 0 ] . start , peak_slice [ 0 ] . stop ) ) else : peaks = [ ( window [ np . argmax ( abs ( window ) ) ] , int ( peak_slice [ 0 ] . start + np . argmax ( abs ( window ) ) ) ) ] initial_peaks . extend ( peaks ) peaks = decluster ( peaks = np . array ( list ( zip ( * initial_peaks ) ) [ 0 ] ) , index = np . array ( list ( zip ( * initial_peaks ) ) [ 1 ] ) , trig_int = trig_int ) if initial_peaks : if debug >= 3 : from eqcorrscan . utils import plotting _fname = '' . join ( [ 'peaks_' , starttime . datetime . strftime ( '%Y-%m-%d' ) , '.pdf' ] ) plotting . peaks_plot ( data = image , starttime = starttime , samp_rate = samp_rate , save = True , peaks = peaks , savefile = _fname ) peaks = sorted ( peaks , key = lambda time : time [ 1 ] , reverse = False ) return peaks else : print ( 'No peaks for you!' ) return [ ]
Determine peaks in an array of data above a certain threshold .
14,509
def multi_find_peaks ( arr , thresh , trig_int , debug = 0 , starttime = False , samp_rate = 1.0 , parallel = True , full_peaks = False , cores = None ) : peaks = [ ] if not parallel : for sub_arr , arr_thresh in zip ( arr , thresh ) : peaks . append ( find_peaks2_short ( arr = sub_arr , thresh = arr_thresh , trig_int = trig_int , debug = debug , starttime = starttime , samp_rate = samp_rate , full_peaks = full_peaks ) ) else : if cores is None : cores = arr . shape [ 0 ] with pool_boy ( Pool = Pool , traces = cores ) as pool : params = ( ( sub_arr , arr_thresh , trig_int , debug , False , 1.0 , full_peaks ) for sub_arr , arr_thresh in zip ( arr , thresh ) ) results = [ pool . apply_async ( find_peaks2_short , param ) for param in params ] peaks = [ res . get ( ) for res in results ] return peaks
Wrapper for find - peaks for multiple arrays .
14,510
def coin_trig ( peaks , stachans , samp_rate , moveout , min_trig , trig_int ) : triggers = [ ] for stachan , _peaks in zip ( stachans , peaks ) : for peak in _peaks : trigger = ( peak [ 1 ] , peak [ 0 ] , '.' . join ( stachan ) ) triggers . append ( trigger ) coincidence_triggers = [ ] for i , master in enumerate ( triggers ) : slaves = triggers [ i + 1 : ] coincidence = 1 trig_time = master [ 0 ] trig_val = master [ 1 ] for slave in slaves : if abs ( slave [ 0 ] - master [ 0 ] ) <= ( moveout * samp_rate ) and slave [ 2 ] != master [ 2 ] : coincidence += 1 if slave [ 0 ] < master [ 0 ] : trig_time = slave [ 0 ] trig_val += slave [ 1 ] if coincidence >= min_trig : coincidence_triggers . append ( ( trig_val / coincidence , trig_time ) ) if coincidence_triggers : coincidence_triggers . sort ( key = lambda tup : tup [ 0 ] , reverse = True ) output = [ coincidence_triggers [ 0 ] ] for coincidence_trigger in coincidence_triggers [ 1 : ] : add = True for peak in output : if abs ( coincidence_trigger [ 1 ] - peak [ 1 ] ) < ( trig_int * samp_rate ) : add = False break if add : output . append ( ( coincidence_trigger [ 0 ] , coincidence_trigger [ 1 ] ) ) output . sort ( key = lambda tup : tup [ 1 ] ) return output else : return [ ]
Find network coincidence triggers within peaks of detection statistics .
14,511
def _finalise_figure ( fig , ** kwargs ) : title = kwargs . get ( "title" ) or None show = kwargs . get ( "show" ) or False save = kwargs . get ( "save" ) or False savefile = kwargs . get ( "savefile" ) or "EQcorrscan_figure.png" return_fig = kwargs . get ( "return_figure" ) or False if title : fig . suptitle ( title ) if show : fig . show ( ) if save : fig . savefig ( savefile ) print ( "Saved figure to {0}" . format ( savefile ) ) if return_fig : return fig return None
Internal function to wrap up a figure .
14,512
def chunk_data ( tr , samp_rate , state = 'mean' ) : trout = tr . copy ( ) x = np . arange ( len ( tr . data ) ) y = tr . data chunksize = int ( round ( tr . stats . sampling_rate / samp_rate ) ) numchunks = int ( y . size // chunksize ) ychunks = y [ : chunksize * numchunks ] . reshape ( ( - 1 , chunksize ) ) xchunks = x [ : chunksize * numchunks ] . reshape ( ( - 1 , chunksize ) ) if state == 'Max' : trout . data = ychunks . max ( axis = 1 ) elif state == 'Min' : trout . data = ychunks . min ( axis = 1 ) elif state == 'Mean' : trout . data = ychunks . mean ( axis = 1 ) elif state == 'Maxabs' : max_env = ychunks . max ( axis = 1 ) min_env = ychunks . min ( axis = 1 ) indeces = np . argmax ( np . vstack ( [ np . abs ( max_env ) , np . abs ( min_env ) ] ) , axis = 0 ) stack = np . vstack ( [ max_env , min_env ] ) . T trout . data = np . array ( [ stack [ i ] [ indeces [ i ] ] for i in range ( len ( stack ) ) ] ) xcenters = xchunks . mean ( axis = 1 ) trout . stats . starttime = tr . stats . starttime + xcenters [ 0 ] / tr . stats . sampling_rate trout . stats . sampling_rate = samp_rate return trout
Downsample data for plotting .
14,513
def xcorr_plot ( template , image , shift = None , cc = None , cc_vec = None , ** kwargs ) : import matplotlib . pyplot as plt if cc is None or shift is None : if not isinstance ( cc_vec , np . ndarray ) : print ( 'Given cc: %s and shift: %s' % ( cc , shift ) ) raise IOError ( 'Must provide either cc_vec, or cc and shift' ) shift = np . abs ( cc_vec ) . argmax ( ) cc = cc_vec [ shift ] x = np . arange ( len ( image ) ) plt . plot ( x , image / abs ( image ) . max ( ) , 'k' , lw = 1.3 , label = 'Image' ) x = np . arange ( len ( template ) ) + shift plt . plot ( x , template / abs ( template ) . max ( ) , 'r' , lw = 1.1 , label = 'Template' ) plt . title ( 'Shift=%s, Correlation=%s' % ( shift , cc ) ) fig = plt . gcf ( ) fig = _finalise_figure ( fig = fig , ** kwargs ) return fig
Plot a template overlying an image aligned by correlation .
14,514
def triple_plot ( cccsum , cccsum_hist , trace , threshold , ** kwargs ) : import matplotlib . pyplot as plt if len ( cccsum ) != len ( trace . data ) : print ( 'cccsum is: ' + str ( len ( cccsum ) ) + ' trace is: ' + str ( len ( trace . data ) ) ) msg = ' ' . join ( [ 'cccsum and trace must have the' , 'same number of data points' ] ) raise ValueError ( msg ) df = trace . stats . sampling_rate npts = trace . stats . npts t = np . arange ( npts , dtype = np . float32 ) / ( df * 3600 ) ax1 = plt . subplot2grid ( ( 2 , 5 ) , ( 0 , 0 ) , colspan = 4 ) ax1 . plot ( t , trace . data , 'k' ) ax1 . axis ( 'tight' ) ax1 . set_ylim ( [ - 15 * np . mean ( np . abs ( trace . data ) ) , 15 * np . mean ( np . abs ( trace . data ) ) ] ) ax2 = plt . subplot2grid ( ( 2 , 5 ) , ( 1 , 0 ) , colspan = 4 , sharex = ax1 ) ax2 . plot ( [ min ( t ) , max ( t ) ] , [ threshold , threshold ] , color = 'r' , lw = 1 , label = "Threshold" ) ax2 . plot ( [ min ( t ) , max ( t ) ] , [ - threshold , - threshold ] , color = 'r' , lw = 1 ) ax2 . plot ( t , cccsum , 'k' ) ax2 . axis ( 'tight' ) ax2 . set_ylim ( [ - 1.7 * threshold , 1.7 * threshold ] ) ax2 . set_xlabel ( "Time after %s [hr]" % trace . stats . starttime . isoformat ( ) ) ax3 = plt . subplot2grid ( ( 2 , 5 ) , ( 1 , 4 ) , sharey = ax2 ) ax3 . hist ( cccsum_hist , 200 , normed = 1 , histtype = 'stepfilled' , orientation = 'horizontal' , color = 'black' ) ax3 . set_ylim ( [ - 5 , 5 ] ) fig = plt . gcf ( ) fig . suptitle ( trace . id ) fig . canvas . draw ( ) fig = _finalise_figure ( fig = fig , ** kwargs ) return fig
Plot a seismogram correlogram and histogram .
14,515
def peaks_plot ( data , starttime , samp_rate , peaks = [ ( 0 , 0 ) ] , ** kwargs ) : import matplotlib . pyplot as plt npts = len ( data ) t = np . arange ( npts , dtype = np . float32 ) / ( samp_rate * 3600 ) fig = plt . figure ( ) ax1 = fig . add_subplot ( 111 ) ax1 . plot ( t , data , 'k' ) ax1 . scatter ( peaks [ 0 ] [ 1 ] / ( samp_rate * 3600 ) , abs ( peaks [ 0 ] [ 0 ] ) , color = 'r' , label = 'Peaks' ) for peak in peaks : ax1 . scatter ( peak [ 1 ] / ( samp_rate * 3600 ) , abs ( peak [ 0 ] ) , color = 'r' ) ax1 . legend ( ) ax1 . set_xlabel ( "Time after %s [hr]" % starttime . isoformat ( ) ) ax1 . axis ( 'tight' ) fig . suptitle ( 'Peaks' ) fig = _finalise_figure ( fig = fig , ** kwargs ) return fig
Plot peaks to check that the peak finding routine is running correctly .
14,516
def threeD_gridplot ( nodes , ** kwargs ) : from mpl_toolkits . mplot3d import Axes3D import matplotlib . pyplot as plt lats = [ ] longs = [ ] depths = [ ] for node in nodes : lats . append ( float ( node [ 0 ] ) ) longs . append ( float ( node [ 1 ] ) ) depths . append ( float ( node [ 2 ] ) ) fig = plt . figure ( ) ax = fig . add_subplot ( 111 , projection = '3d' ) ax . scatter ( lats , longs , depths ) ax . set_ylabel ( "Latitude (deg)" ) ax . set_xlabel ( "Longitude (deg)" ) ax . set_zlabel ( "Depth(km)" ) ax . get_xaxis ( ) . get_major_formatter ( ) . set_scientific ( False ) ax . get_yaxis ( ) . get_major_formatter ( ) . set_scientific ( False ) fig = _finalise_figure ( fig = fig , ** kwargs ) return fig
Plot in a series of grid points in 3D .
14,517
def interev_mag ( times , mags , size = ( 10.5 , 7.5 ) , ** kwargs ) : import matplotlib . pyplot as plt info = [ ( times [ i ] , mags [ i ] ) for i in range ( len ( times ) ) ] info . sort ( key = lambda tup : tup [ 0 ] ) times = [ x [ 0 ] for x in info ] mags = [ x [ 1 ] for x in info ] fig , axes = plt . subplots ( 1 , 2 , sharey = True , figsize = size ) axes = axes . ravel ( ) pre_times = [ ] post_times = [ ] for i in range ( len ( times ) ) : if i > 0 : pre_times . append ( ( times [ i ] - times [ i - 1 ] ) / 60 ) if i < len ( times ) - 1 : post_times . append ( ( times [ i + 1 ] - times [ i ] ) / 60 ) axes [ 0 ] . scatter ( pre_times , mags [ 1 : ] ) axes [ 0 ] . set_title ( 'Pre-event times' ) axes [ 0 ] . set_ylabel ( 'Magnitude' ) axes [ 0 ] . set_xlabel ( 'Time (Minutes)' ) plt . setp ( axes [ 0 ] . xaxis . get_majorticklabels ( ) , rotation = 30 ) axes [ 1 ] . scatter ( pre_times , mags [ : - 1 ] ) axes [ 1 ] . set_title ( 'Post-event times' ) axes [ 1 ] . set_xlabel ( 'Time (Minutes)' ) axes [ 0 ] . autoscale ( enable = True , tight = True ) axes [ 1 ] . autoscale ( enable = True , tight = True ) plt . setp ( axes [ 1 ] . xaxis . get_majorticklabels ( ) , rotation = 30 ) fig = _finalise_figure ( fig = fig , ** kwargs ) return fig
Plot inter - event times against magnitude .
14,518
def obspy_3d_plot ( inventory , catalog , size = ( 10.5 , 7.5 ) , ** kwargs ) : nodes = [ ] for ev in catalog : nodes . append ( ( ev . preferred_origin ( ) . latitude , ev . preferred_origin ( ) . longitude , ev . preferred_origin ( ) . depth / 1000 ) ) all_stas = [ ] for net in inventory : for sta in net : if len ( sta . channels ) > 0 : all_stas . append ( ( sta . latitude , sta . longitude , sta . elevation / 1000 - sta . channels [ 0 ] . depth / 1000 ) ) else : warnings . warn ( 'No channel information attached, ' 'setting elevation without depth' ) all_stas . append ( ( sta . latitude , sta . longitude , sta . elevation / 1000 ) ) fig = threeD_seismplot ( stations = all_stas , nodes = nodes , size = size , ** kwargs ) return fig
Plot obspy Inventory and obspy Catalog classes in three dimensions .
14,519
def threeD_seismplot ( stations , nodes , size = ( 10.5 , 7.5 ) , ** kwargs ) : import matplotlib . pyplot as plt from mpl_toolkits . mplot3d import Axes3D stalats , stalongs , staelevs = zip ( * stations ) evlats , evlongs , evdepths = zip ( * nodes ) _evlongs = [ ] for evlong in evlongs : if evlong < 0 : evlong = float ( evlong ) evlong += 360 _evlongs . append ( evlong ) evlongs = _evlongs _stalongs = [ ] for stalong in stalongs : if stalong < 0 : stalong = float ( stalong ) stalong += 360 _stalongs . append ( stalong ) stalongs = _stalongs evdepths = [ - 1 * depth for depth in evdepths ] fig = plt . figure ( figsize = size ) ax = Axes3D ( fig ) ax . scatter ( evlats , evlongs , evdepths , marker = "x" , c = "k" , label = 'Hypocenters' ) ax . scatter ( stalats , stalongs , staelevs , marker = "v" , c = "r" , label = 'Stations' ) ax . set_ylabel ( "Longitude (deg)" ) ax . set_xlabel ( "Latitude (deg)" ) ax . set_zlabel ( "Elevation (km)" ) ax . get_xaxis ( ) . get_major_formatter ( ) . set_scientific ( False ) ax . get_yaxis ( ) . get_major_formatter ( ) . set_scientific ( False ) plt . legend ( ) fig = _finalise_figure ( fig = fig , ** kwargs ) return fig
Plot seismicity and stations in a 3D movable zoomable space .
14,520
def noise_plot ( signal , noise , normalise = False , ** kwargs ) : import matplotlib . pyplot as plt n_traces = 0 for tr in signal : try : noise . select ( id = tr . id ) [ 0 ] except IndexError : continue n_traces += 1 fig , axes = plt . subplots ( n_traces , 2 , sharex = True ) if len ( signal ) > 1 : axes = axes . ravel ( ) i = 0 lines = [ ] labels = [ ] for tr in signal : try : noise_tr = noise . select ( id = tr . id ) [ 0 ] except IndexError : continue ax1 = axes [ i ] ax2 = axes [ i + 1 ] fft_len = fftpack . next_fast_len ( max ( noise_tr . stats . npts , tr . stats . npts ) ) if not normalise : signal_fft = fftpack . rfft ( tr . data , fft_len ) noise_fft = fftpack . rfft ( noise_tr . data , fft_len ) else : signal_fft = fftpack . rfft ( tr . data / max ( tr . data ) , fft_len ) noise_fft = fftpack . rfft ( noise_tr . data / max ( noise_tr . data ) , fft_len ) frequencies = np . linspace ( 0 , 1 / ( 2 * tr . stats . delta ) , fft_len // 2 ) noise_line , = ax1 . semilogy ( frequencies , 2.0 / fft_len * np . abs ( noise_fft [ 0 : fft_len // 2 ] ) , 'k' , label = "noise" ) signal_line , = ax1 . semilogy ( frequencies , 2.0 / fft_len * np . abs ( signal_fft [ 0 : fft_len // 2 ] ) , 'r' , label = "signal" ) if "signal" not in labels : labels . append ( "signal" ) lines . append ( signal_line ) if "noise" not in labels : labels . append ( "noise" ) lines . append ( noise_line ) ax1 . set_ylabel ( tr . id , rotation = 0 , horizontalalignment = 'right' ) ax2 . plot ( frequencies , ( 2.0 / fft_len * np . abs ( signal_fft [ 0 : fft_len // 2 ] ) ) - ( 2.0 / fft_len * np . abs ( noise_fft [ 0 : fft_len // 2 ] ) ) , 'k' ) ax2 . yaxis . tick_right ( ) ax2 . set_ylim ( bottom = 0 ) i += 2 axes [ - 1 ] . set_xlabel ( "Frequency (Hz)" ) axes [ - 2 ] . set_xlabel ( "Frequency (Hz)" ) axes [ 0 ] . set_title ( "Spectra" ) axes [ 1 ] . set_title ( "Signal - noise" ) plt . figlegend ( lines , labels , 'upper left' ) plt . tight_layout ( ) plt . subplots_adjust ( hspace = 0 ) fig = _finalise_figure ( fig = fig , ** kwargs ) return fig
Plot signal and noise fourier transforms and the difference .
14,521
def spec_trace ( traces , cmap = None , wlen = 0.4 , log = False , trc = 'k' , tralpha = 0.9 , size = ( 10 , 13 ) , fig = None , ** kwargs ) : import matplotlib . pyplot as plt if isinstance ( traces , Stream ) : traces . sort ( [ 'station' , 'channel' ] ) if not fig : fig = plt . figure ( ) for i , tr in enumerate ( traces ) : if i == 0 : ax = fig . add_subplot ( len ( traces ) , 1 , i + 1 ) else : ax = fig . add_subplot ( len ( traces ) , 1 , i + 1 , sharex = ax ) ax1 , ax2 = _spec_trace ( tr , cmap = cmap , wlen = wlen , log = log , trc = trc , tralpha = tralpha , axes = ax ) ax . set_yticks ( [ ] ) if i < len ( traces ) - 1 : plt . setp ( ax1 . get_xticklabels ( ) , visible = False ) if isinstance ( traces , list ) : ax . text ( 0.005 , 0.85 , "{0}::{1}" . format ( tr . id , tr . stats . starttime ) , bbox = dict ( facecolor = 'white' , alpha = 0.8 ) , transform = ax2 . transAxes ) elif isinstance ( traces , Stream ) : ax . text ( 0.005 , 0.85 , tr . id , bbox = dict ( facecolor = 'white' , alpha = 0.8 ) , transform = ax2 . transAxes ) ax . text ( 0.005 , 0.02 , str ( np . max ( tr . data ) . round ( 1 ) ) , bbox = dict ( facecolor = 'white' , alpha = 0.95 ) , transform = ax2 . transAxes ) ax . set_xlabel ( 'Time (s)' ) fig . subplots_adjust ( hspace = 0 ) fig . set_size_inches ( w = size [ 0 ] , h = size [ 1 ] , forward = True ) fig . text ( 0.04 , 0.5 , 'Frequency (Hz)' , va = 'center' , rotation = 'vertical' ) fig = _finalise_figure ( fig = fig , ** kwargs ) return fig
Plots seismic data with spectrogram behind .
14,522
def _spec_trace ( trace , cmap = None , wlen = 0.4 , log = False , trc = 'k' , tralpha = 0.9 , size = ( 10 , 2.5 ) , axes = None , title = None ) : import matplotlib . pyplot as plt if not axes : fig = plt . figure ( figsize = size ) ax1 = fig . add_subplot ( 111 ) else : ax1 = axes trace . spectrogram ( wlen = wlen , log = log , show = False , cmap = cmap , axes = ax1 ) fig = plt . gcf ( ) ax2 = ax1 . twinx ( ) y = trace . data x = np . linspace ( 0 , len ( y ) / trace . stats . sampling_rate , len ( y ) ) ax2 . plot ( x , y , color = trc , linewidth = 2.0 , alpha = tralpha ) ax2 . set_xlim ( min ( x ) , max ( x ) ) ax2 . set_ylim ( min ( y ) * 2 , max ( y ) * 2 ) if title : ax1 . set_title ( ' ' . join ( [ trace . stats . station , trace . stats . channel , trace . stats . starttime . datetime . strftime ( '%Y/%m/%d %H:%M:%S' ) ] ) ) if not axes : fig . set_size_inches ( size ) fig . show ( ) else : return ax1 , ax2
Function to plot a trace over that traces spectrogram .
14,523
def subspace_detector_plot ( detector , stachans , size , ** kwargs ) : import matplotlib . pyplot as plt if stachans == 'all' and not detector . multiplex : stachans = detector . stachans elif detector . multiplex : stachans = [ ( 'multi' , ' ' ) ] if np . isinf ( detector . dimension ) : msg = ' ' . join ( [ 'Infinite subspace dimension. Only plotting as many' , 'dimensions as events in design set' ] ) warnings . warn ( msg ) nrows = detector . v [ 0 ] . shape [ 1 ] else : nrows = detector . dimension fig , axes = plt . subplots ( nrows = nrows , ncols = len ( stachans ) , sharex = True , sharey = True , figsize = size ) x = np . arange ( len ( detector . u [ 0 ] ) , dtype = np . float32 ) if detector . multiplex : x /= len ( detector . stachans ) * detector . sampling_rate else : x /= detector . sampling_rate for column , stachan in enumerate ( stachans ) : channel = detector . u [ column ] for row , vector in enumerate ( channel . T [ 0 : nrows ] ) : if len ( stachans ) == 1 : if nrows == 1 : axis = axes else : axis = axes [ row ] else : axis = axes [ row , column ] if row == 0 : axis . set_title ( '.' . join ( stachan ) ) axis . plot ( x , vector , 'k' , linewidth = 1.1 ) if column == 0 : axis . set_ylabel ( 'Basis %s' % ( row + 1 ) , rotation = 0 ) if row == nrows - 1 : axis . set_xlabel ( 'Time (s)' ) axis . set_yticks ( [ ] ) plt . subplots_adjust ( hspace = 0.05 ) plt . subplots_adjust ( wspace = 0.05 ) fig = _finalise_figure ( fig = fig , ** kwargs ) return fig
Plotting for the subspace detector class .
14,524
def subspace_fc_plot ( detector , stachans , size , ** kwargs ) : import matplotlib . pyplot as plt if stachans == 'all' and not detector . multiplex : stachans = detector . stachans elif detector . multiplex : stachans = [ ( 'multi' , ' ' ) ] pfs = [ ] for x in range ( 1 , len ( stachans ) ) : if len ( stachans ) % x == 0 : pfs . append ( x ) if stachans == [ ( 'multi' , ' ' ) ] : ncols = 1 else : ncols = min ( pfs , key = lambda x : abs ( ( np . floor ( np . sqrt ( len ( stachans ) ) ) - x ) ) ) nrows = len ( stachans ) // ncols fig , axes = plt . subplots ( nrows = nrows , ncols = ncols , sharex = True , sharey = True , figsize = size , squeeze = False ) for column , axis in enumerate ( axes . reshape ( - 1 ) ) : axis . set_title ( '.' . join ( stachans [ column ] ) ) sig = diagsvd ( detector . sigma [ column ] , detector . u [ column ] . shape [ 0 ] , detector . v [ column ] . shape [ 0 ] ) A = np . dot ( sig , detector . v [ column ] ) if detector . dimension > max ( detector . v [ column ] . shape ) or detector . dimension == np . inf : dim = max ( detector . v [ column ] . shape ) + 1 else : dim = detector . dimension + 1 av_fc_dict = { i : [ ] for i in range ( dim ) } for ai in A . T : fcs = [ ] for j in range ( dim ) : av_fc_dict [ j ] . append ( float ( np . dot ( ai [ : j ] . T , ai [ : j ] ) ) ) fcs . append ( float ( np . dot ( ai [ : j ] . T , ai [ : j ] ) ) ) axis . plot ( fcs , color = 'grey' ) avg = [ np . average ( _dim [ 1 ] ) for _dim in av_fc_dict . items ( ) ] axis . plot ( avg , color = 'red' , linewidth = 3. ) if column % ncols == 0 or column == 0 : axis . set_ylabel ( 'Frac. E Capture (Fc)' ) if column + 1 > len ( stachans ) - ncols : axis . set_xlabel ( 'Subspace Dimension' ) plt . subplots_adjust ( hspace = 0.2 ) plt . subplots_adjust ( wspace = 0.2 ) fig = _finalise_figure ( fig = fig , ** kwargs ) return fig
Plot the fractional energy capture of the detector for all events in the design set
14,525
def _match_filter_plot ( stream , cccsum , template_names , rawthresh , plotdir , plot_format , i ) : import matplotlib . pyplot as plt plt . ioff ( ) stream_plot = copy . deepcopy ( stream [ 0 ] ) stream_plot = _plotting_decimation ( stream_plot , 10e5 , 4 ) cccsum_plot = Trace ( cccsum ) cccsum_plot . stats . sampling_rate = stream [ 0 ] . stats . sampling_rate cccsum_hist = cccsum_plot . copy ( ) cccsum_hist = cccsum_hist . decimate ( int ( stream [ 0 ] . stats . sampling_rate / 10 ) ) . data cccsum_plot = chunk_data ( cccsum_plot , 10 , 'Maxabs' ) . data stream_plot . data = stream_plot . data [ 0 : len ( cccsum_plot ) ] cccsum_plot = cccsum_plot [ 0 : len ( stream_plot . data ) ] cccsum_hist = cccsum_hist [ 0 : len ( stream_plot . data ) ] plot_name = ( plotdir + os . sep + 'cccsum_plot_' + template_names [ i ] + '_' + stream [ 0 ] . stats . starttime . datetime . strftime ( '%Y-%m-%d' ) + '.' + plot_format ) triple_plot ( cccsum = cccsum_plot , cccsum_hist = cccsum_hist , trace = stream_plot , threshold = rawthresh , save = True , savefile = plot_name )
Plotting function for match_filter .
14,526
def _plotting_decimation ( trace , max_len = 10e5 , decimation_step = 4 ) : trace_len = trace . stats . npts while trace_len > max_len : trace . decimate ( decimation_step ) trace_len = trace . stats . npts return trace
Decimate data until required length reached .
14,527
def make_images_responsive ( app , doctree ) : for fig in doctree . traverse ( condition = nodes . figure ) : if 'thumbnail' in fig [ 'classes' ] : continue for img in fig . traverse ( condition = nodes . image ) : img [ 'classes' ] . append ( 'img-responsive' )
Add Bootstrap img - responsive class to images .
14,528
def _manual_overrides ( _cache_date = None ) : log = logging . getLogger ( 'ciu' ) request = requests . get ( "https://raw.githubusercontent.com/brettcannon/" "caniusepython3/master/caniusepython3/overrides.json" ) if request . status_code == 200 : log . info ( "Overrides loaded from GitHub and cached" ) overrides = request . json ( ) else : log . info ( "Overrides loaded from included package data and cached" ) raw_bytes = pkgutil . get_data ( __name__ , 'overrides.json' ) overrides = json . loads ( raw_bytes . decode ( 'utf-8' ) ) return frozenset ( map ( packaging . utils . canonicalize_name , overrides . keys ( ) ) )
Read the overrides file .
14,529
def supports_py3 ( project_name ) : log = logging . getLogger ( "ciu" ) log . info ( "Checking {} ..." . format ( project_name ) ) request = requests . get ( "https://pypi.org/pypi/{}/json" . format ( project_name ) ) if request . status_code >= 400 : log = logging . getLogger ( "ciu" ) log . warning ( "problem fetching {}, assuming ported ({})" . format ( project_name , request . status_code ) ) return True response = request . json ( ) return any ( c . startswith ( "Programming Language :: Python :: 3" ) for c in response [ "info" ] [ "classifiers" ] )
Check with PyPI if a project supports Python 3 .
14,530
def check ( requirements_paths = [ ] , metadata = [ ] , projects = [ ] ) : dependencies = [ ] dependencies . extend ( projects_ . projects_from_requirements ( requirements_paths ) ) dependencies . extend ( projects_ . projects_from_metadata ( metadata ) ) dependencies . extend ( projects ) manual_overrides = pypi . manual_overrides ( ) for dependency in dependencies : if dependency in manual_overrides : continue elif not pypi . supports_py3 ( dependency ) : return False return True
Return True if all of the specified dependencies have been ported to Python 3 .
14,531
def projects_from_cli ( args ) : description = ( 'Determine if a set of project dependencies will work with ' 'Python 3' ) parser = argparse . ArgumentParser ( description = description ) req_help = 'path(s) to a pip requirements file (e.g. requirements.txt)' parser . add_argument ( '--requirements' , '-r' , nargs = '+' , default = ( ) , help = req_help ) meta_help = 'path(s) to a PEP 426 metadata file (e.g. PKG-INFO, pydist.json)' parser . add_argument ( '--metadata' , '-m' , nargs = '+' , default = ( ) , help = meta_help ) parser . add_argument ( '--projects' , '-p' , nargs = '+' , default = ( ) , help = 'name(s) of projects to test for Python 3 support' ) parser . add_argument ( '--verbose' , '-v' , action = 'store_true' , help = 'verbose output (e.g. list compatibility overrides)' ) parsed = parser . parse_args ( args ) if not ( parsed . requirements or parsed . metadata or parsed . projects ) : parser . error ( "Missing 'requirements', 'metadata', or 'projects'" ) projects = [ ] if parsed . verbose : logging . getLogger ( 'ciu' ) . setLevel ( logging . INFO ) projects . extend ( projects_ . projects_from_requirements ( parsed . requirements ) ) metadata = [ ] for metadata_path in parsed . metadata : with io . open ( metadata_path ) as file : metadata . append ( file . read ( ) ) projects . extend ( projects_ . projects_from_metadata ( metadata ) ) projects . extend ( map ( packaging . utils . canonicalize_name , parsed . projects ) ) return projects
Take arguments through the CLI can create a list of specified projects .
14,532
def message ( blockers ) : if not blockers : encoding = getattr ( sys . stdout , 'encoding' , '' ) if encoding : encoding = encoding . lower ( ) if encoding == 'utf-8' : flair = "\U0001F389 " else : flair = '' return [ flair + 'You have 0 projects blocking you from using Python 3!' ] flattened_blockers = set ( ) for blocker_reasons in blockers : for blocker in blocker_reasons : flattened_blockers . add ( blocker ) need = 'You need {0} project{1} to transition to Python 3.' formatted_need = need . format ( len ( flattened_blockers ) , 's' if len ( flattened_blockers ) != 1 else '' ) can_port = ( 'Of {0} {1} project{2}, {3} {4} no direct dependencies ' 'blocking {5} transition:' ) formatted_can_port = can_port . format ( 'those' if len ( flattened_blockers ) != 1 else 'that' , len ( flattened_blockers ) , 's' if len ( flattened_blockers ) != 1 else '' , len ( blockers ) , 'have' if len ( blockers ) != 1 else 'has' , 'their' if len ( blockers ) != 1 else 'its' ) return formatted_need , formatted_can_port
Create a sequence of key messages based on what is blocking .
14,533
def pprint_blockers ( blockers ) : pprinted = [ ] for blocker in sorted ( blockers , key = lambda x : tuple ( reversed ( x ) ) ) : buf = [ blocker [ 0 ] ] if len ( blocker ) > 1 : buf . append ( ' (which is blocking ' ) buf . append ( ', which is blocking ' . join ( blocker [ 1 : ] ) ) buf . append ( ')' ) pprinted . append ( '' . join ( buf ) ) return pprinted
Pretty print blockers into a sequence of strings .
14,534
def check ( projects ) : log = logging . getLogger ( 'ciu' ) log . info ( '{0} top-level projects to check' . format ( len ( projects ) ) ) print ( 'Finding and checking dependencies ...' ) blockers = dependencies . blockers ( projects ) print ( '' ) for line in message ( blockers ) : print ( line ) print ( '' ) for line in pprint_blockers ( blockers ) : print ( ' ' , line ) return len ( blockers ) == 0
Check the specified projects for Python 3 compatibility .
14,535
def reasons_to_paths ( reasons ) : blockers = set ( reasons . keys ( ) ) - set ( reasons . values ( ) ) paths = set ( ) for blocker in blockers : path = [ blocker ] parent = reasons [ blocker ] while parent : if parent in path : raise CircularDependencyError ( dict ( parent = parent , blocker = blocker , path = path ) ) path . append ( parent ) parent = reasons . get ( parent ) paths . add ( tuple ( path ) ) return paths
Calculate the dependency paths to the reasons of the blockers .
14,536
def dependencies ( project_name ) : log = logging . getLogger ( 'ciu' ) log . info ( 'Locating dependencies for {}' . format ( project_name ) ) located = distlib . locators . locate ( project_name , prereleases = True ) if not located : log . warning ( '{0} not found' . format ( project_name ) ) return None return { packaging . utils . canonicalize_name ( pypi . just_name ( dep ) ) for dep in located . run_requires }
Get the dependencies for a project .
14,537
def projects_from_requirements ( requirements ) : log = logging . getLogger ( 'ciu' ) valid_reqs = [ ] for requirements_path in requirements : with io . open ( requirements_path ) as file : requirements_text = file . read ( ) requirements_text = re . sub ( r"\\s*" , "" , requirements_text ) requirements_text = re . sub ( r"#.*" , "" , requirements_text ) reqs = [ ] for line in requirements_text . splitlines ( ) : if not line : continue try : reqs . append ( packaging . requirements . Requirement ( line ) ) except packaging . requirements . InvalidRequirement : log . warning ( 'Skipping {0!r}: could not parse requirement' . format ( line ) ) for req in reqs : if not req . name : log . warning ( 'A requirement lacks a name ' '(e.g. no `#egg` on a `file:` path)' ) elif req . url : log . warning ( 'Skipping {0}: URL-specified projects unsupported' . format ( req . name ) ) else : valid_reqs . append ( req . name ) return frozenset ( map ( packaging . utils . canonicalize_name , valid_reqs ) )
Extract the project dependencies from a Requirements specification .
14,538
def projects_from_metadata ( metadata ) : projects = [ ] for data in metadata : meta = distlib . metadata . Metadata ( fileobj = io . StringIO ( data ) ) projects . extend ( pypi . just_name ( project ) for project in meta . run_requires ) return frozenset ( map ( packaging . utils . canonicalize_name , projects ) )
Extract the project dependencies from a metadata spec .
14,539
def locateOnScreen ( image , minSearchTime = 0 , ** kwargs ) : start = time . time ( ) while True : try : screenshotIm = screenshot ( region = None ) retVal = locate ( image , screenshotIm , ** kwargs ) try : screenshotIm . fp . close ( ) except AttributeError : pass if retVal or time . time ( ) - start > minSearchTime : return retVal except ImageNotFoundException : if time . time ( ) - start > minSearchTime : if USE_IMAGE_NOT_FOUND_EXCEPTION : raise else : return None
minSearchTime - amount of time in seconds to repeat taking screenshots and trying to locate a match . The default of 0 performs a single search .
14,540
def getDetails ( self , ip_address = None ) : raw_details = self . _requestDetails ( ip_address ) raw_details [ 'country_name' ] = self . countries . get ( raw_details . get ( 'country' ) ) raw_details [ 'ip_address' ] = ipaddress . ip_address ( raw_details . get ( 'ip' ) ) raw_details [ 'latitude' ] , raw_details [ 'longitude' ] = self . _read_coords ( raw_details . get ( 'loc' ) ) return Details ( raw_details )
Get details for specified IP address as a Details object .
14,541
def _requestDetails ( self , ip_address = None ) : if ip_address not in self . cache : url = self . API_URL if ip_address : url += '/' + ip_address response = requests . get ( url , headers = self . _get_headers ( ) , ** self . request_options ) if response . status_code == 429 : raise RequestQuotaExceededError ( ) response . raise_for_status ( ) self . cache [ ip_address ] = response . json ( ) return self . cache [ ip_address ]
Get IP address data by sending request to IPinfo API .
14,542
def _get_headers ( self ) : headers = { 'user-agent' : 'IPinfoClient/Python{version}/1.0' . format ( version = sys . version_info [ 0 ] ) , 'accept' : 'application/json' } if self . access_token : headers [ 'authorization' ] = 'Bearer {}' . format ( self . access_token ) return headers
Built headers for request to IPinfo API .
14,543
def _read_country_names ( self , countries_file = None ) : if not countries_file : countries_file = os . path . join ( os . path . dirname ( __file__ ) , self . COUNTRY_FILE_DEFAULT ) with open ( countries_file ) as f : countries_json = f . read ( ) return json . loads ( countries_json )
Read list of countries from specified country file or default file .
14,544
def is_secure_option ( self , section , option ) : if not self . has_section ( section ) : return False if not self . has_option ( section , option ) : return False if ConfigParser . get ( self , section , option ) == self . _secure_placeholder : return True return False
Test an option to see if it is secured or not .
14,545
def items ( self , section ) : items = [ ] for k , v in ConfigParser . items ( self , section ) : if self . is_secure_option ( section , k ) : v = self . get ( section , k ) if v == '!!False!!' : v = False items . append ( ( k , v ) ) return items
Get all items for a section . Subclassed to ensure secure items come back with the unencrypted data .
14,546
def set ( self , section , option , value ) : if not value : value = '!!False!!' if self . is_secure_option ( section , option ) : self . set_secure ( section , option , value ) else : ConfigParser . set ( self , section , option , value )
Set an option value . Knows how to set options properly marked as secure .
14,547
def set_secure ( self , section , option , value ) : if self . keyring_available : s_option = "%s%s" % ( section , option ) self . _unsaved [ s_option ] = ( 'set' , value ) value = self . _secure_placeholder ConfigParser . set ( self , section , option , value )
Set an option and mark it as secure .
14,548
def get ( self , section , option , * args ) : if self . is_secure_option ( section , option ) and self . keyring_available : s_option = "%s%s" % ( section , option ) if self . _unsaved . get ( s_option , [ '' ] ) [ 0 ] == 'set' : res = self . _unsaved [ s_option ] [ 1 ] else : res = keyring . get_password ( self . keyring_name , s_option ) else : res = ConfigParser . get ( self , section , option , * args ) if res == '!!False!!' : return False return res
Get option value from section . If an option is secure populates the plain text .
14,549
def remove_option ( self , section , option ) : if self . is_secure_option ( section , option ) and self . keyring_available : s_option = "%s%s" % ( section , option ) self . _unsaved [ s_option ] = ( 'delete' , None ) ConfigParser . remove_option ( self , section , option )
Removes the option from ConfigParser as well as the secure storage backend
14,550
def encrypt_account ( self , id ) : for key in self . secured_field_names : value = self . parser . get ( id , key ) self . parser . set_secure ( id , key , value ) return self
Make sure that certain fields are encrypted .
14,551
def is_encrypted_account ( self , id ) : for key in self . secured_field_names : if not self . parser . is_secure_option ( id , key ) : return False return True
Are all fields for the account id encrypted?
14,552
def save ( self ) : with open ( self . file_name , 'w' ) as fp : self . parser . write ( fp ) return self
Save changes to config file
14,553
def authenticate ( self , username = None , password = None ) : u = self . username p = self . password if username and password : u = username p = password client = self . client ( ) query = client . authenticated_query ( username = u , password = p ) res = client . post ( query ) ofx = BeautifulSoup ( res , 'lxml' ) sonrs = ofx . find ( 'sonrs' ) code = int ( sonrs . find ( 'code' ) . contents [ 0 ] . strip ( ) ) try : status = sonrs . find ( 'message' ) . contents [ 0 ] . strip ( ) except Exception : status = '' if code == 0 : return 1 raise ValueError ( status )
Test the authentication credentials
14,554
def download ( self , days = 60 ) : days_ago = datetime . datetime . now ( ) - datetime . timedelta ( days = days ) as_of = time . strftime ( "%Y%m%d" , days_ago . timetuple ( ) ) query = self . _download_query ( as_of = as_of ) response = self . institution . client ( ) . post ( query ) return StringIO ( response )
Downloaded OFX response for the given time range
14,555
def combined_download ( accounts , days = 60 ) : client = Client ( institution = None ) out_file = StringIO ( ) out_file . write ( client . header ( ) ) out_file . write ( '<OFX>' ) for a in accounts : ofx = a . download ( days = days ) . read ( ) stripped = ofx . partition ( '<OFX>' ) [ 2 ] . partition ( '</OFX>' ) [ 0 ] out_file . write ( stripped ) out_file . write ( "</OFX>" ) out_file . seek ( 0 ) return out_file
Download OFX files and combine them into one
14,556
def bank_account_query ( self , number , date , account_type , bank_id ) : return self . authenticated_query ( self . _bareq ( number , date , account_type , bank_id ) )
Bank account statement request
14,557
def credit_card_account_query ( self , number , date ) : return self . authenticated_query ( self . _ccreq ( number , date ) )
CC Statement request
14,558
def _do_post ( self , query , extra_headers = [ ] ) : i = self . institution logging . debug ( 'posting data to %s' % i . url ) garbage , path = splittype ( i . url ) host , selector = splithost ( path ) h = HTTPSConnection ( host , timeout = 60 ) h . putrequest ( 'POST' , selector , skip_host = True , skip_accept_encoding = True ) headers = [ ( 'Content-Type' , 'application/x-ofx' ) , ( 'Host' , host ) , ( 'Content-Length' , len ( query ) ) , ( 'Connection' , 'Keep-Alive' ) ] if self . accept : headers . append ( ( 'Accept' , self . accept ) ) if self . user_agent : headers . append ( ( 'User-Agent' , self . user_agent ) ) for ehname , ehval in extra_headers : headers . append ( ( ehname , ehval ) ) logging . debug ( '---- request headers ----' ) for hname , hval in headers : logging . debug ( '%s: %s' , hname , hval ) h . putheader ( hname , hval ) logging . debug ( '---- request body (query) ----' ) logging . debug ( query ) h . endheaders ( query . encode ( ) ) res = h . getresponse ( ) response = res . read ( ) . decode ( 'ascii' , 'ignore' ) logging . debug ( '---- response ----' ) logging . debug ( res . __dict__ ) logging . debug ( 'Headers: %s' , res . getheaders ( ) ) logging . debug ( response ) res . close ( ) return res , response
Do a POST to the Institution .
14,559
def _build_raw_headers ( self , headers : Dict ) -> Tuple : raw_headers = [ ] for k , v in headers . items ( ) : raw_headers . append ( ( k . encode ( 'utf8' ) , v . encode ( 'utf8' ) ) ) return tuple ( raw_headers )
Convert a dict of headers to a tuple of tuples
14,560
async def _request_mock ( self , orig_self : ClientSession , method : str , url : 'Union[URL, str]' , * args : Tuple , ** kwargs : Dict ) -> 'ClientResponse' : url = normalize_url ( merge_params ( url , kwargs . get ( 'params' ) ) ) url_str = str ( url ) for prefix in self . _passthrough : if url_str . startswith ( prefix ) : return ( await self . patcher . temp_original ( orig_self , method , url , * args , ** kwargs ) ) response = await self . match ( method , url , ** kwargs ) if response is None : raise ClientConnectionError ( 'Connection refused: {} {}' . format ( method , url ) ) self . _responses . append ( response ) key = ( method , url ) self . requests . setdefault ( key , [ ] ) self . requests [ key ] . append ( RequestCall ( args , kwargs ) ) return response
Return mocked response object or raise connection error .
14,561
def normalize_url ( url : 'Union[URL, str]' ) -> 'URL' : url = URL ( url ) return url . with_query ( urlencode ( sorted ( parse_qsl ( url . query_string ) ) ) )
Normalize url to make comparisons .
14,562
def promote_alert_to_case ( self , alert_id ) : req = self . url + "/api/alert/{}/createCase" . format ( alert_id ) try : return requests . post ( req , headers = { 'Content-Type' : 'application/json' } , proxies = self . proxies , auth = self . auth , verify = self . cert , data = json . dumps ( { } ) ) except requests . exceptions . RequestException as the_exception : raise AlertException ( "Couldn't promote alert to case: {}" . format ( the_exception ) ) return None
This uses the TheHiveAPI to promote an alert to a case
14,563
def prepare ( * , operation = 'CREATE' , signers = None , recipients = None , asset = None , metadata = None , inputs = None ) : return prepare_transaction ( operation = operation , signers = signers , recipients = recipients , asset = asset , metadata = metadata , inputs = inputs , )
Prepares a transaction payload ready to be fulfilled .
14,564
def send_async ( self , transaction , headers = None ) : return self . transport . forward_request ( method = 'POST' , path = self . path , json = transaction , params = { 'mode' : 'async' } , headers = headers )
Submit a transaction to the Federation with the mode async .
14,565
def retrieve ( self , txid , headers = None ) : path = self . path + txid return self . transport . forward_request ( method = 'GET' , path = path , headers = None )
Retrieves the transaction with the given id .
14,566
def get ( self , public_key , spent = None , headers = None ) : return self . transport . forward_request ( method = 'GET' , path = self . path , params = { 'public_key' : public_key , 'spent' : spent } , headers = headers , )
Get transaction outputs by public key . The public_key parameter must be a base58 encoded ed25519 public key associated with transaction output ownership .
14,567
def retrieve ( self , block_height , headers = None ) : path = self . path + block_height return self . transport . forward_request ( method = 'GET' , path = path , headers = None )
Retrieves the block with the given block_height .
14,568
def get ( self , * , search , limit = 0 , headers = None ) : return self . transport . forward_request ( method = 'GET' , path = self . path , params = { 'search' : search , 'limit' : limit } , headers = headers )
Retrieves the assets that match a given text search string .
14,569
def prepare_create_transaction ( * , signers , recipients = None , asset = None , metadata = None ) : if not isinstance ( signers , ( list , tuple ) ) : signers = [ signers ] elif isinstance ( signers , tuple ) : signers = list ( signers ) if not recipients : recipients = [ ( signers , 1 ) ] elif not isinstance ( recipients , ( list , tuple ) ) : recipients = [ ( [ recipients ] , 1 ) ] elif isinstance ( recipients , tuple ) : recipients = [ ( list ( recipients ) , 1 ) ] transaction = Transaction . create ( signers , recipients , metadata = metadata , asset = asset [ 'data' ] if asset else None , ) return transaction . to_dict ( )
Prepares a CREATE transaction payload ready to be fulfilled .
14,570
def prepare_transfer_transaction ( * , inputs , recipients , asset , metadata = None ) : if not isinstance ( inputs , ( list , tuple ) ) : inputs = ( inputs , ) if not isinstance ( recipients , ( list , tuple ) ) : recipients = [ ( [ recipients ] , 1 ) ] if isinstance ( recipients , tuple ) : recipients = [ ( list ( recipients ) , 1 ) ] fulfillments = [ Input ( _fulfillment_from_details ( input_ [ 'fulfillment' ] ) , input_ [ 'owners_before' ] , fulfills = TransactionLink ( txid = input_ [ 'fulfills' ] [ 'transaction_id' ] , output = input_ [ 'fulfills' ] [ 'output_index' ] ) ) for input_ in inputs ] transaction = Transaction . transfer ( fulfillments , recipients , asset_id = asset [ 'id' ] , metadata = metadata , ) return transaction . to_dict ( )
Prepares a TRANSFER transaction payload ready to be fulfilled .
14,571
def fulfill_transaction ( transaction , * , private_keys ) : if not isinstance ( private_keys , ( list , tuple ) ) : private_keys = [ private_keys ] if isinstance ( private_keys , tuple ) : private_keys = list ( private_keys ) transaction_obj = Transaction . from_dict ( transaction ) try : signed_transaction = transaction_obj . sign ( private_keys ) except KeypairMismatchException as exc : raise MissingPrivateKeyError ( 'A private key is missing!' ) from exc return signed_transaction . to_dict ( )
Fulfills the given transaction .
14,572
def normalize_url ( node ) : if not node : node = DEFAULT_NODE elif '://' not in node : node = '//{}' . format ( node ) parts = urlparse ( node , scheme = 'http' , allow_fragments = False ) port = parts . port if parts . port else _get_default_port ( parts . scheme ) netloc = '{}:{}' . format ( parts . hostname , port ) return urlunparse ( ( parts . scheme , netloc , parts . path , '' , '' , '' ) )
Normalizes the given node url
14,573
def normalize_node ( node , headers = None ) : headers = { } if headers is None else headers if isinstance ( node , str ) : url = normalize_url ( node ) return { 'endpoint' : url , 'headers' : headers } url = normalize_url ( node [ 'endpoint' ] ) node_headers = node . get ( 'headers' , { } ) return { 'endpoint' : url , 'headers' : { ** headers , ** node_headers } }
Normalizes given node as str or dict with headers
14,574
def normalize_nodes ( * nodes , headers = None ) : if not nodes : return ( normalize_node ( DEFAULT_NODE , headers ) , ) normalized_nodes = ( ) for node in nodes : normalized_nodes += ( normalize_node ( node , headers ) , ) return normalized_nodes
Normalizes given dict or array of driver nodes
14,575
def request ( self , method , * , path = None , json = None , params = None , headers = None , timeout = None , backoff_cap = None , ** kwargs ) : backoff_timedelta = self . get_backoff_timedelta ( ) if timeout is not None and timeout < backoff_timedelta : raise TimeoutError if backoff_timedelta > 0 : time . sleep ( backoff_timedelta ) connExc = None timeout = timeout if timeout is None else timeout - backoff_timedelta try : response = self . _request ( method = method , timeout = timeout , url = self . node_url + path if path else self . node_url , json = json , params = params , headers = headers , ** kwargs , ) except ConnectionError as err : connExc = err raise err finally : self . update_backoff_time ( success = connExc is None , backoff_cap = backoff_cap ) return response
Performs an HTTP request with the given parameters .
14,576
def pick ( self , connections ) : if len ( connections ) == 1 : return connections [ 0 ] def key ( conn ) : return ( datetime . min if conn . backoff_time is None else conn . backoff_time ) return min ( * connections , key = key )
Picks a connection with the earliest backoff time .
14,577
def forward_request ( self , method , path = None , json = None , params = None , headers = None ) : error_trace = [ ] timeout = self . timeout backoff_cap = NO_TIMEOUT_BACKOFF_CAP if timeout is None else timeout / 2 while timeout is None or timeout > 0 : connection = self . connection_pool . get_connection ( ) start = time ( ) try : response = connection . request ( method = method , path = path , params = params , json = json , headers = headers , timeout = timeout , backoff_cap = backoff_cap , ) except ConnectionError as err : error_trace . append ( err ) continue else : return response . data finally : elapsed = time ( ) - start if timeout is not None : timeout -= elapsed raise TimeoutError ( error_trace )
Makes HTTP requests to the configured nodes .
14,578
def inputs_valid ( self , outputs = None ) : if self . operation == Transaction . CREATE : return self . _inputs_valid ( [ 'dummyvalue' for _ in self . inputs ] ) elif self . operation == Transaction . TRANSFER : return self . _inputs_valid ( [ output . fulfillment . condition_uri for output in outputs ] ) else : allowed_ops = ', ' . join ( self . __class__ . ALLOWED_OPERATIONS ) raise TypeError ( '`operation` must be one of {}' . format ( allowed_ops ) )
Validates the Inputs in the Transaction against given Outputs .
14,579
def _input_valid ( input_ , operation , message , output_condition_uri = None ) : ccffill = input_ . fulfillment try : parsed_ffill = Fulfillment . from_uri ( ccffill . serialize_uri ( ) ) except ( TypeError , ValueError , ParsingError , ASN1DecodeError , ASN1EncodeError ) : return False if operation == Transaction . CREATE : output_valid = True else : output_valid = output_condition_uri == ccffill . condition_uri message = sha3_256 ( message . encode ( ) ) if input_ . fulfills : message . update ( '{}{}' . format ( input_ . fulfills . txid , input_ . fulfills . output ) . encode ( ) ) ffill_valid = parsed_ffill . validate ( message = message . digest ( ) ) return output_valid and ffill_valid
Validates a single Input against a single Output .
14,580
def read ( self ) : self . found_visible = False is_multi_quote_header = self . MULTI_QUOTE_HDR_REGEX_MULTILINE . search ( self . text ) if is_multi_quote_header : self . text = self . MULTI_QUOTE_HDR_REGEX . sub ( is_multi_quote_header . groups ( ) [ 0 ] . replace ( '\n' , '' ) , self . text ) self . text = re . sub ( '([^\n])(?=\n ?[_-]{7,})' , '\\1\n' , self . text , re . MULTILINE ) self . lines = self . text . split ( '\n' ) self . lines . reverse ( ) for line in self . lines : self . _scan_line ( line ) self . _finish_fragment ( ) self . fragments . reverse ( ) return self
Creates new fragment for each line and labels as a signature quote or hidden .
14,581
def reply ( self ) : reply = [ ] for f in self . fragments : if not ( f . hidden or f . quoted ) : reply . append ( f . content ) return '\n' . join ( reply )
Captures reply message within email
14,582
def _scan_line ( self , line ) : is_quote_header = self . QUOTE_HDR_REGEX . match ( line ) is not None is_quoted = self . QUOTED_REGEX . match ( line ) is not None is_header = is_quote_header or self . HEADER_REGEX . match ( line ) is not None if self . fragment and len ( line . strip ( ) ) == 0 : if self . SIG_REGEX . match ( self . fragment . lines [ - 1 ] . strip ( ) ) : self . fragment . signature = True self . _finish_fragment ( ) if self . fragment and ( ( self . fragment . headers == is_header and self . fragment . quoted == is_quoted ) or ( self . fragment . quoted and ( is_quote_header or len ( line . strip ( ) ) == 0 ) ) ) : self . fragment . lines . append ( line ) else : self . _finish_fragment ( ) self . fragment = Fragment ( is_quoted , line , headers = is_header )
Reviews each line in email message and determines fragment type
14,583
def finish ( self ) : self . lines . reverse ( ) self . _content = '\n' . join ( self . lines ) self . lines = None
Creates block of content with lines belonging to fragment .
14,584
def check_in_out_dates ( self ) : if self . checkout and self . checkin : if self . checkin < self . date_order : raise ValidationError ( _ ( 'Check-in date should be greater than \ the current date.' ) ) if self . checkout < self . checkin : raise ValidationError ( _ ( 'Check-out date should be greater \ than Check-in date.' ) )
When date_order is less then check - in date or Checkout date should be greater than the check - in date .
14,585
def send_reservation_maill ( self ) : assert len ( self . _ids ) == 1 , 'This is for a single id at a time.' ir_model_data = self . env [ 'ir.model.data' ] try : template_id = ( ir_model_data . get_object_reference ( 'hotel_reservation' , 'mail_template_hotel_reservation' ) [ 1 ] ) except ValueError : template_id = False try : compose_form_id = ( ir_model_data . get_object_reference ( 'mail' , 'email_compose_message_wizard_form' ) [ 1 ] ) except ValueError : compose_form_id = False ctx = dict ( ) ctx . update ( { 'default_model' : 'hotel.reservation' , 'default_res_id' : self . _ids [ 0 ] , 'default_use_template' : bool ( template_id ) , 'default_template_id' : template_id , 'default_composition_mode' : 'comment' , 'force_send' : True , 'mark_so_as_sent' : True } ) return { 'type' : 'ir.actions.act_window' , 'view_type' : 'form' , 'view_mode' : 'form' , 'res_model' : 'mail.compose.message' , 'views' : [ ( compose_form_id , 'form' ) ] , 'view_id' : compose_form_id , 'target' : 'new' , 'context' : ctx , 'force_send' : True }
This function opens a window to compose an email template message loaded by default .
14,586
def walk ( filesystem , top , topdown = True , onerror = None , followlinks = False ) : def do_walk ( top_dir , top_most = False ) : top_dir = filesystem . normpath ( top_dir ) if not top_most and not followlinks and filesystem . islink ( top_dir ) : return try : top_contents = _classify_directory_contents ( filesystem , top_dir ) except OSError as exc : top_contents = None if onerror is not None : onerror ( exc ) if top_contents is not None : if topdown : yield top_contents for directory in top_contents [ 1 ] : if not followlinks and filesystem . islink ( directory ) : continue for contents in do_walk ( filesystem . joinpaths ( top_dir , directory ) ) : yield contents if not topdown : yield top_contents return do_walk ( top , top_most = True )
Perform an os . walk operation over the fake filesystem .
14,587
def inode ( self ) : if self . _inode is None : self . stat ( follow_symlinks = False ) return self . _inode
Return the inode number of the entry .
14,588
def walk ( self , top , topdown = True , onerror = None , followlinks = False ) : return walk ( self . filesystem , top , topdown , onerror , followlinks )
Perform a walk operation over the fake filesystem .
14,589
def add ( clss , func , deprecated_name ) : @ Deprecator ( func . __name__ , deprecated_name ) def _old_function ( * args , ** kwargs ) : return func ( * args , ** kwargs ) setattr ( clss , deprecated_name , _old_function )
Add the deprecated version of a member function to the given class . Gives a deprecation warning on usage .
14,590
def init_module ( filesystem ) : FakePath . filesystem = filesystem FakePathlibModule . PureWindowsPath . _flavour = _FakeWindowsFlavour ( filesystem ) FakePathlibModule . PurePosixPath . _flavour = _FakePosixFlavour ( filesystem )
Initializes the fake module with the fake file system .
14,591
def splitroot ( self , path , sep = None ) : if sep is None : sep = self . filesystem . path_separator if self . filesystem . is_windows_fs : return self . _splitroot_with_drive ( path , sep ) return self . _splitroot_posix ( path , sep )
Split path into drive root and rest .
14,592
def casefold_parts ( self , parts ) : if self . filesystem . is_windows_fs : return [ p . lower ( ) for p in parts ] return parts
Return the lower - case version of parts for a Windows filesystem .
14,593
def resolve ( self , path , strict ) : if self . filesystem . is_windows_fs : return self . _resolve_windows ( path , strict ) return self . _resolve_posix ( path , strict )
Make the path absolute resolving any symlinks .
14,594
def open ( self , mode = 'r' , buffering = - 1 , encoding = None , errors = None , newline = None ) : if self . _closed : self . _raise_closed ( ) return FakeFileOpen ( self . filesystem , use_io = True ) ( self . _path ( ) , mode , buffering , encoding , errors , newline )
Open the file pointed by this path and return a fake file object .
14,595
def touch ( self , mode = 0o666 , exist_ok = True ) : if self . _closed : self . _raise_closed ( ) if self . exists ( ) : if exist_ok : self . filesystem . utime ( self . _path ( ) , None ) else : self . filesystem . raise_os_error ( errno . EEXIST , self . _path ( ) ) else : fake_file = self . open ( 'w' ) fake_file . close ( ) self . chmod ( mode )
Create a fake file for the path with the given access mode if it doesn t exist .
14,596
def _copy_module ( old ) : saved = sys . modules . pop ( old . __name__ , None ) new = __import__ ( old . __name__ ) sys . modules [ old . __name__ ] = saved return new
Recompiles and creates new module object .
14,597
def contents ( self ) : if not IS_PY2 and isinstance ( self . byte_contents , bytes ) : return self . byte_contents . decode ( self . encoding or locale . getpreferredencoding ( False ) , errors = self . errors ) return self . byte_contents
Return the contents as string with the original encoding .
14,598
def set_large_file_size ( self , st_size ) : self . _check_positive_int ( st_size ) if self . st_size : self . size = 0 if self . filesystem : self . filesystem . change_disk_usage ( st_size , self . name , self . st_dev ) self . st_size = st_size self . _byte_contents = None
Sets the self . st_size attribute and replaces self . content with None .
14,599
def _set_initial_contents ( self , contents ) : contents = self . _encode_contents ( contents ) changed = self . _byte_contents != contents st_size = len ( contents ) if self . _byte_contents : self . size = 0 current_size = self . st_size or 0 self . filesystem . change_disk_usage ( st_size - current_size , self . name , self . st_dev ) self . _byte_contents = contents self . st_size = st_size self . epoch += 1 return changed
Sets the file contents and size . Called internally after initial file creation .