idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
800
def router_elements ( self ) : elements = devicetools . Elements ( ) for router in self . _router_numbers : element = self . _get_routername ( router ) inlet = self . _get_nodename ( router ) try : outlet = self . _get_nodename ( self . _up2down [ router ] ) except TypeError : outlet = self . last_node elements += devi...
A |Elements| collection of all routing basins .
801
def nodes ( self ) : return ( devicetools . Nodes ( self . node_prefix + routers for routers in self . _router_numbers ) + devicetools . Node ( self . last_node ) )
A |Nodes| collection of all required nodes .
802
def selection ( self ) : return selectiontools . Selection ( self . selection_name , self . nodes , self . elements )
A complete |Selection| object of all supplying and routing elements and required nodes .
803
def chars2str ( chars ) -> List [ str ] : strings = collections . deque ( ) for subchars in chars : substrings = collections . deque ( ) for char in subchars : if char : substrings . append ( char . decode ( 'utf-8' ) ) else : substrings . append ( '' ) strings . append ( '' . join ( substrings ) ) return list ( string...
Inversion function of function |str2chars| .
804
def create_dimension ( ncfile , name , length ) -> None : try : ncfile . createDimension ( name , length ) except BaseException : objecttools . augment_excmessage ( 'While trying to add dimension `%s` with length `%d` ' 'to the NetCDF file `%s`' % ( name , length , get_filepath ( ncfile ) ) )
Add a new dimension with the given name and length to the given NetCDF file .
805
def create_variable ( ncfile , name , datatype , dimensions ) -> None : default = fillvalue if ( datatype == 'f8' ) else None try : ncfile . createVariable ( name , datatype , dimensions = dimensions , fill_value = default ) ncfile [ name ] . long_name = name except BaseException : objecttools . augment_excmessage ( 'W...
Add a new variable with the given name datatype and dimensions to the given NetCDF file .
806
def query_variable ( ncfile , name ) -> netcdf4 . Variable : try : return ncfile [ name ] except ( IndexError , KeyError ) : raise OSError ( 'NetCDF file `%s` does not contain variable `%s`.' % ( get_filepath ( ncfile ) , name ) )
Return the variable with the given name from the given NetCDF file .
807
def query_timegrid ( ncfile ) -> timetools . Timegrid : timepoints = ncfile [ varmapping [ 'timepoints' ] ] refdate = timetools . Date . from_cfunits ( timepoints . units ) return timetools . Timegrid . from_timepoints ( timepoints = timepoints [ : ] , refdate = refdate , unit = timepoints . units . strip ( ) . split (...
Return the |Timegrid| defined by the given NetCDF file .
808
def query_array ( ncfile , name ) -> numpy . ndarray : variable = query_variable ( ncfile , name ) maskedarray = variable [ : ] fillvalue_ = getattr ( variable , '_FillValue' , numpy . nan ) if not numpy . isnan ( fillvalue_ ) : maskedarray [ maskedarray . mask ] = numpy . nan return maskedarray . data
Return the data of the variable with the given name from the given NetCDF file .
809
def log ( self , sequence , infoarray ) -> None : if isinstance ( sequence , sequencetools . ModelSequence ) : descr = sequence . descr_model else : descr = 'node' if self . _isolate : descr = '%s_%s' % ( descr , sequence . descr_sequence ) if ( ( infoarray is not None ) and ( infoarray . info [ 'type' ] != 'unmodified...
Prepare a |NetCDFFile| object suitable for the given |IOSequence| object when necessary and pass the given arguments to its |NetCDFFile . log| method .
810
def read ( self ) -> None : for folder in self . folders . values ( ) : for file_ in folder . values ( ) : file_ . read ( )
Call method |NetCDFFile . read| of all handled |NetCDFFile| objects .
811
def write ( self ) -> None : if self . folders : init = hydpy . pub . timegrids . init timeunits = init . firstdate . to_cfunits ( 'hours' ) timepoints = init . to_timepoints ( 'hours' ) for folder in self . folders . values ( ) : for file_ in folder . values ( ) : file_ . write ( timeunits , timepoints )
Call method |NetCDFFile . write| of all handled |NetCDFFile| objects .
812
def filenames ( self ) -> Tuple [ str , ... ] : return tuple ( sorted ( set ( itertools . chain ( * ( _ . keys ( ) for _ in self . folders . values ( ) ) ) ) ) )
A |tuple| of names of all handled |NetCDFFile| objects .
813
def log ( self , sequence , infoarray ) -> None : aggregated = ( ( infoarray is not None ) and ( infoarray . info [ 'type' ] != 'unmodified' ) ) descr = sequence . descr_sequence if aggregated : descr = '_' . join ( [ descr , infoarray . info [ 'type' ] ] ) if descr in self . variables : var_ = self . variables [ descr...
Pass the given |IoSequence| to a suitable instance of a |NetCDFVariableBase| subclass .
814
def filepath ( self ) -> str : return os . path . join ( self . _dirpath , self . name + '.nc' )
The NetCDF file path .
815
def read ( self ) -> None : try : with netcdf4 . Dataset ( self . filepath , "r" ) as ncfile : timegrid = query_timegrid ( ncfile ) for variable in self . variables . values ( ) : variable . read ( ncfile , timegrid ) except BaseException : objecttools . augment_excmessage ( f'While trying to read data from NetCDF file...
Open an existing NetCDF file temporarily and call method |NetCDFVariableDeep . read| of all handled |NetCDFVariableBase| objects .
816
def write ( self , timeunit , timepoints ) -> None : with netcdf4 . Dataset ( self . filepath , "w" ) as ncfile : ncfile . Conventions = 'CF-1.6' self . _insert_timepoints ( ncfile , timepoints , timeunit ) for variable in self . variables . values ( ) : variable . write ( ncfile )
Open a new NetCDF file temporarily and call method |NetCDFVariableBase . write| of all handled |NetCDFVariableBase| objects .
817
def get_index ( self , name_subdevice ) -> int : try : return self . dict_ [ name_subdevice ] except KeyError : raise OSError ( 'No data for sequence `%s` and (sub)device `%s` ' 'in NetCDF file `%s` available.' % ( self . name_sequence , name_subdevice , self . name_ncfile ) )
Item access to the wrapped |dict| object with a specialized error message .
818
def log ( self , sequence , infoarray ) -> None : descr_device = sequence . descr_device self . sequences [ descr_device ] = sequence self . arrays [ descr_device ] = infoarray
Log the given |IOSequence| object either for reading or writing data .
819
def sort_timeplaceentries ( self , timeentry , placeentry ) -> Tuple [ Any , Any ] : if self . _timeaxis : return placeentry , timeentry return timeentry , placeentry
Return a |tuple| containing the given timeentry and placeentry sorted in agreement with the currently selected timeaxis .
820
def get_timeplaceslice ( self , placeindex ) -> Union [ Tuple [ slice , int ] , Tuple [ int , slice ] ] : return self . sort_timeplaceentries ( slice ( None ) , int ( placeindex ) )
Return a |tuple| for indexing a complete time series of a certain location available in |NetCDFVariableBase . array| .
821
def subdevicenames ( self ) -> Tuple [ str , ... ] : self : NetCDFVariableBase return tuple ( self . sequences . keys ( ) )
A |tuple| containing the device names .
822
def get_slices ( self , idx , shape ) -> Tuple [ IntOrSlice , ... ] : slices = list ( self . get_timeplaceslice ( idx ) ) for length in shape : slices . append ( slice ( 0 , length ) ) return tuple ( slices )
Return a |tuple| of one |int| and some |slice| objects to accesses all values of a certain device within |NetCDFVariableDeep . array| .
823
def shape ( self ) -> Tuple [ int , ... ] : nmb_place = len ( self . sequences ) nmb_time = len ( hydpy . pub . timegrids . init ) nmb_others = collections . deque ( ) for sequence in self . sequences . values ( ) : nmb_others . append ( sequence . shape ) nmb_others_max = tuple ( numpy . max ( nmb_others , axis = 0 ) ...
Required shape of |NetCDFVariableDeep . array| .
824
def array ( self ) -> numpy . ndarray : array = numpy . full ( self . shape , fillvalue , dtype = float ) for idx , ( descr , subarray ) in enumerate ( self . arrays . items ( ) ) : sequence = self . sequences [ descr ] array [ self . get_slices ( idx , sequence . shape ) ] = subarray return array
The series data of all logged |IOSequence| objects contained in one single |numpy . ndarray| .
825
def shape ( self ) -> Tuple [ int , int ] : return self . sort_timeplaceentries ( len ( hydpy . pub . timegrids . init ) , len ( self . sequences ) )
Required shape of |NetCDFVariableAgg . array| .
826
def array ( self ) -> numpy . ndarray : array = numpy . full ( self . shape , fillvalue , dtype = float ) for idx , subarray in enumerate ( self . arrays . values ( ) ) : array [ self . get_timeplaceslice ( idx ) ] = subarray return array
The aggregated data of all logged |IOSequence| objects contained in one single |numpy . ndarray| object .
827
def shape ( self ) -> Tuple [ int , int ] : return self . sort_timeplaceentries ( len ( hydpy . pub . timegrids . init ) , sum ( len ( seq ) for seq in self . sequences . values ( ) ) )
Required shape of |NetCDFVariableFlat . array| .
828
def array ( self ) -> numpy . ndarray : array = numpy . full ( self . shape , fillvalue , dtype = float ) idx0 = 0 idxs : List [ Any ] = [ slice ( None ) ] for seq , subarray in zip ( self . sequences . values ( ) , self . arrays . values ( ) ) : for prod in self . _product ( seq . shape ) : subsubarray = subarray [ tu...
The series data of all logged |IOSequence| objects contained in one single |numpy . ndarray| object .
829
def update ( self ) : maxdt = self . subpars . pars . control . maxdt seconds = self . simulationstep . seconds self . value = numpy . ceil ( seconds / maxdt )
Determine the number of substeps .
830
def update ( self ) : con = self . subpars . pars . control der = self . subpars for ( toy , qs ) in con . q : setattr ( self , str ( toy ) , 2. * con . v + der . seconds / der . nmbsubsteps * qs ) self . refresh ( )
Calulate the auxilary term .
831
def prepare_io_example_1 ( ) -> Tuple [ devicetools . Nodes , devicetools . Elements ] : from hydpy import TestIO TestIO . clear ( ) from hydpy . core . filetools import SequenceManager hydpy . pub . sequencemanager = SequenceManager ( ) with TestIO ( ) : hydpy . pub . sequencemanager . inputdirpath = 'inputpath' hydpy...
Prepare an IO example configuration .
832
def get_postalcodes_around_radius ( self , pc , radius ) : postalcodes = self . get ( pc ) if postalcodes is None : raise PostalCodeNotFoundException ( "Could not find postal code you're searching for." ) else : pc = postalcodes [ 0 ] radius = float ( radius ) earth_radius = 6371 dlat = radius / earth_radius dlon = asi...
Bounding box calculations updated from pyzipcode
833
def get_all_team_ids ( ) : df = get_all_player_ids ( "all_data" ) df = pd . DataFrame ( { "TEAM_NAME" : df . TEAM_NAME . unique ( ) , "TEAM_ID" : df . TEAM_ID . unique ( ) } ) return df
Returns a pandas DataFrame with all Team IDs
834
def get_team_id ( team_name ) : df = get_all_team_ids ( ) df = df [ df . TEAM_NAME == team_name ] if len ( df ) == 0 : er = "Invalid team name or there is no team with that name." raise ValueError ( er ) team_id = df . TEAM_ID . iloc [ 0 ] return team_id
Returns the team ID associated with the team name that is passed in .
835
def get_game_logs ( self ) : logs = self . response . json ( ) [ 'resultSets' ] [ 0 ] [ 'rowSet' ] headers = self . response . json ( ) [ 'resultSets' ] [ 0 ] [ 'headers' ] df = pd . DataFrame ( logs , columns = headers ) df . GAME_DATE = pd . to_datetime ( df . GAME_DATE ) return df
Returns team game logs as a pandas DataFrame
836
def get_game_id ( self , date ) : df = self . get_game_logs ( ) game_id = df [ df . GAME_DATE == date ] . Game_ID . values [ 0 ] return game_id
Returns the Game ID associated with the date that is passed in .
837
def update_params ( self , parameters ) : self . url_paramaters . update ( parameters ) self . response = requests . get ( self . base_url , params = self . url_paramaters , headers = HEADERS ) self . response . raise_for_status ( ) return self
Pass in a dictionary to update url parameters for NBA stats API
838
def get_shots ( self ) : shots = self . response . json ( ) [ 'resultSets' ] [ 0 ] [ 'rowSet' ] headers = self . response . json ( ) [ 'resultSets' ] [ 0 ] [ 'headers' ] return pd . DataFrame ( shots , columns = headers )
Returns the shot chart data as a pandas DataFrame .
839
def unsubscribe ( self , subscription , max = None ) : if max is None : self . _send ( 'UNSUB %d' % subscription . sid ) self . _subscriptions . pop ( subscription . sid ) else : subscription . max = max self . _send ( 'UNSUB %d %s' % ( subscription . sid , max ) )
Unsubscribe will remove interest in the given subject . If max is provided an automatic Unsubscribe that is processed by the server when max messages have been received
840
def request ( self , subject , callback , msg = None ) : inbox = self . _build_inbox ( ) s = self . subscribe ( inbox , callback ) self . unsubscribe ( s , 1 ) self . publish ( subject , msg , inbox ) return s
ublish a message with an implicit inbox listener as the reply . Message is optional .
841
def draw_court ( ax = None , color = 'gray' , lw = 1 , outer_lines = False ) : if ax is None : ax = plt . gca ( ) hoop = Circle ( ( 0 , 0 ) , radius = 7.5 , linewidth = lw , color = color , fill = False ) backboard = Rectangle ( ( - 30 , - 12.5 ) , 60 , 0 , linewidth = lw , color = color ) outer_box = Rectangle ( ( - 8...
Returns an axes with a basketball court drawn onto to it .
842
def shot_chart ( x , y , kind = "scatter" , title = "" , color = "b" , cmap = None , xlim = ( - 250 , 250 ) , ylim = ( 422.5 , - 47.5 ) , court_color = "gray" , court_lw = 1 , outer_lines = False , flip_court = False , kde_shade = True , gridsize = None , ax = None , despine = False , ** kwargs ) : if ax is None : ax =...
Returns an Axes object with player shots plotted .
843
def shot_chart_jointplot ( x , y , data = None , kind = "scatter" , title = "" , color = "b" , cmap = None , xlim = ( - 250 , 250 ) , ylim = ( 422.5 , - 47.5 ) , court_color = "gray" , court_lw = 1 , outer_lines = False , flip_court = False , size = ( 12 , 11 ) , space = 0 , despine = False , joint_kws = None , margina...
Returns a seaborn JointGrid using sns . jointplot
844
def heatmap ( x , y , z , title = "" , cmap = plt . cm . YlOrRd , bins = 20 , xlim = ( - 250 , 250 ) , ylim = ( 422.5 , - 47.5 ) , facecolor = 'lightgray' , facecolor_alpha = 0.4 , court_color = "black" , court_lw = 0.5 , outer_lines = False , flip_court = False , ax = None , ** kwargs ) : mean , xedges , yedges , binn...
Returns an AxesImage object that contains a heatmap .
845
def bokeh_draw_court ( figure , line_color = 'gray' , line_width = 1 ) : figure . circle ( x = 0 , y = 0 , radius = 7.5 , fill_alpha = 0 , line_color = line_color , line_width = line_width ) figure . line ( x = range ( - 30 , 31 ) , y = - 12.5 , line_color = line_color ) figure . rect ( x = 0 , y = 47.5 , width = 160 ,...
Returns a figure with the basketball court lines drawn onto it
846
def bokeh_shot_chart ( data , x = "LOC_X" , y = "LOC_Y" , fill_color = "#1f77b4" , scatter_size = 10 , fill_alpha = 0.4 , line_alpha = 0.4 , court_line_color = 'gray' , court_line_width = 1 , hover_tool = False , tooltips = None , ** kwargs ) : source = ColumnDataSource ( data ) fig = figure ( width = 700 , height = 65...
Returns a figure with both FGA and basketball court lines drawn onto it .
847
def _kmedoids_run ( X , n_clusters , distance , max_iter , tol , rng ) : membs = np . empty ( shape = X . shape [ 0 ] , dtype = int ) centers = kmeans . _kmeans_init ( X , n_clusters , method = '' , rng = rng ) sse_last = 9999.9 n_iter = 0 for it in range ( 1 , max_iter ) : membs = kmeans . _assign_clusters ( X , cente...
Run a single trial of k - medoids clustering on dataset X and given number of clusters
848
def _init_mixture_params ( X , n_mixtures , init_method ) : init_priors = np . ones ( shape = n_mixtures , dtype = float ) / n_mixtures if init_method == 'kmeans' : km = _kmeans . KMeans ( n_clusters = n_mixtures , n_trials = 20 ) km . fit ( X ) init_means = km . centers_ else : inx_rand = np . random . choice ( X . sh...
Initialize mixture density parameters with equal priors random means identity covariance matrices
849
def __log_density_single ( x , mean , covar ) : n_dim = mean . shape [ 0 ] dx = x - mean covar_inv = scipy . linalg . inv ( covar ) covar_det = scipy . linalg . det ( covar ) den = np . dot ( np . dot ( dx . T , covar_inv ) , dx ) + n_dim * np . log ( 2 * np . pi ) + np . log ( covar_det ) return ( - 1 / 2 * den )
This is just a test function to calculate the normal density at x given mean and covariance matrix .
850
def _validate_params ( priors , means , covars ) : for i , ( p , m , cv ) in enumerate ( zip ( priors , means , covars ) ) : if np . any ( np . isinf ( p ) ) or np . any ( np . isnan ( p ) ) : raise ValueError ( "Component %d of priors is not valid " % i ) if np . any ( np . isinf ( m ) ) or np . any ( np . isnan ( m )...
Validation Check for M . L . paramateres
851
def fit ( self , X ) : params_dict = _fit_gmm_params ( X = X , n_mixtures = self . n_clusters , n_init = self . n_trials , init_method = self . init_method , n_iter = self . max_iter , tol = self . tol ) self . priors_ = params_dict [ 'priors' ] self . means_ = params_dict [ 'means' ] self . covars_ = params_dict [ 'co...
Fit mixture - density parameters with EM algorithm
852
def _kmeans_init ( X , n_clusters , method = 'balanced' , rng = None ) : n_samples = X . shape [ 0 ] if rng is None : cent_idx = np . random . choice ( n_samples , replace = False , size = n_clusters ) else : cent_idx = rng . choice ( n_samples , replace = False , size = n_clusters ) centers = X [ cent_idx , : ] mean_X...
Initialize k = n_clusters centroids randomly
853
def _cal_dist2center ( X , center ) : dmemb2cen = scipy . spatial . distance . cdist ( X , center . reshape ( 1 , X . shape [ 1 ] ) , metric = 'seuclidean' ) return ( np . sum ( dmemb2cen ) )
Calculate the SSE to the cluster center
854
def _kmeans_run ( X , n_clusters , max_iter , tol ) : membs = np . empty ( shape = X . shape [ 0 ] , dtype = int ) centers = _kmeans_init ( X , n_clusters ) sse_last = 9999.9 n_iter = 0 for it in range ( 1 , max_iter ) : membs = _assign_clusters ( X , centers ) centers , sse_arr = _update_centers ( X , membs , n_cluste...
Run a single trial of k - means clustering on dataset X and given number of clusters
855
def _kmeans ( X , n_clusters , max_iter , n_trials , tol ) : n_samples , n_features = X . shape [ 0 ] , X . shape [ 1 ] centers_best = np . empty ( shape = ( n_clusters , n_features ) , dtype = float ) labels_best = np . empty ( shape = n_samples , dtype = int ) for i in range ( n_trials ) : centers , labels , sse_tot ...
Run multiple trials of k - means clustering and outputt he best centers and cluster labels
856
def _cut_tree ( tree , n_clusters , membs ) : assert ( n_clusters >= 2 ) assert ( n_clusters <= len ( tree . leaves ( ) ) ) cut_centers = dict ( ) for i in range ( n_clusters - 1 ) : if i == 0 : search_set = set ( tree . children ( 0 ) ) node_set , cut_set = set ( ) , set ( ) else : search_set = node_set . union ( cut_...
Cut the tree to get desired number of clusters as n_clusters 2 < = n_desired < = n_clusters
857
def _add_tree_node ( tree , label , ilev , X = None , size = None , center = None , sse = None , parent = None ) : if size is None : size = X . shape [ 0 ] if ( center is None ) : center = np . mean ( X , axis = 0 ) if ( sse is None ) : sse = _kmeans . _cal_dist2center ( X , center ) center = list ( center ) datadict =...
Add a node to the tree if parent is not known the node is a root
858
def _bisect_kmeans ( X , n_clusters , n_trials , max_iter , tol ) : membs = np . empty ( shape = X . shape [ 0 ] , dtype = int ) centers = dict ( ) sse_arr = dict ( ) tree = treelib . Tree ( ) tree = _add_tree_node ( tree , 0 , ilev = 0 , X = X ) km = _kmeans . KMeans ( n_clusters = 2 , n_trials = n_trials , max_iter =...
Apply Bisecting Kmeans clustering to reach n_clusters number of clusters
859
def comparison_table ( self , caption = None , label = "tab:model_comp" , hlines = True , aic = True , bic = True , dic = True , sort = "bic" , descending = True ) : if sort == "bic" : assert bic , "You cannot sort by BIC if you turn it off" if sort == "aic" : assert aic , "You cannot sort by AIC if you turn it off" if...
Return a LaTeX ready table of model comparisons .
860
def plot_walks ( self , parameters = None , truth = None , extents = None , display = False , filename = None , chains = None , convolve = None , figsize = None , plot_weights = True , plot_posterior = True , log_weight = None ) : chains , parameters , truth , extents , _ = self . _sanitise ( chains , parameters , trut...
Plots the chain walk ; the parameter values as a function of step index .
861
def gelman_rubin ( self , chain = None , threshold = 0.05 ) : r if chain is None : return np . all ( [ self . gelman_rubin ( k , threshold = threshold ) for k in range ( len ( self . parent . chains ) ) ] ) index = self . parent . _get_chain ( chain ) assert len ( index ) == 1 , "Please specify only one chain, have %d ...
r Runs the Gelman Rubin diagnostic on the supplied chains .
862
def geweke ( self , chain = None , first = 0.1 , last = 0.5 , threshold = 0.05 ) : if chain is None : return np . all ( [ self . geweke ( k , threshold = threshold ) for k in range ( len ( self . parent . chains ) ) ] ) index = self . parent . _get_chain ( chain ) assert len ( index ) == 1 , "Please specify only one ch...
Runs the Geweke diagnostic on the supplied chains .
863
def get_latex_table ( self , parameters = None , transpose = False , caption = None , label = "tab:model_params" , hlines = True , blank_fill = "--" ) : if parameters is None : parameters = self . parent . _all_parameters for p in parameters : assert isinstance ( p , str ) , "Generating a LaTeX table requires all param...
Generates a LaTeX table from parameter summaries .
864
def get_summary ( self , squeeze = True , parameters = None , chains = None ) : results = [ ] if chains is None : chains = self . parent . chains else : if isinstance ( chains , ( int , str ) ) : chains = [ chains ] chains = [ self . parent . chains [ i ] for c in chains for i in self . parent . _get_chain ( c ) ] for ...
Gets a summary of the marginalised parameter distributions .
865
def get_max_posteriors ( self , parameters = None , squeeze = True , chains = None ) : results = [ ] if chains is None : chains = self . parent . chains else : if isinstance ( chains , ( int , str ) ) : chains = [ chains ] chains = [ self . parent . chains [ i ] for c in chains for i in self . parent . _get_chain ( c )...
Gets the maximum posterior point in parameter space from the passed parameters . Requires the chains to have set posterior values .
866
def get_correlations ( self , chain = 0 , parameters = None ) : parameters , cov = self . get_covariance ( chain = chain , parameters = parameters ) diag = np . sqrt ( np . diag ( cov ) ) divisor = diag [ None , : ] * diag [ : , None ] correlations = cov / divisor return parameters , correlations
Takes a chain and returns the correlation between chain parameters .
867
def get_covariance ( self , chain = 0 , parameters = None ) : index = self . parent . _get_chain ( chain ) assert len ( index ) == 1 , "Please specify only one chain, have %d chains" % len ( index ) chain = self . parent . chains [ index [ 0 ] ] if parameters is None : parameters = chain . parameters data = chain . get...
Takes a chain and returns the covariance between chain parameters .
868
def get_correlation_table ( self , chain = 0 , parameters = None , caption = "Parameter Correlations" , label = "tab:parameter_correlations" ) : parameters , cor = self . get_correlations ( chain = chain , parameters = parameters ) return self . _get_2d_latex_table ( parameters , cor , caption , label )
Gets a LaTeX table of parameter correlations .
869
def get_covariance_table ( self , chain = 0 , parameters = None , caption = "Parameter Covariance" , label = "tab:parameter_covariance" ) : parameters , cov = self . get_covariance ( chain = chain , parameters = parameters ) return self . _get_2d_latex_table ( parameters , cov , caption , label )
Gets a LaTeX table of parameter covariance .
870
def get_parameter_text ( self , lower , maximum , upper , wrap = False ) : if lower is None or upper is None : return "" upper_error = upper - maximum lower_error = maximum - lower if upper_error != 0 and lower_error != 0 : resolution = min ( np . floor ( np . log10 ( np . abs ( upper_error ) ) ) , np . floor ( np . lo...
Generates LaTeX appropriate text from marginalised parameter bounds .
871
def remove_chain ( self , chain = - 1 ) : if isinstance ( chain , str ) or isinstance ( chain , int ) : chain = [ chain ] chain = sorted ( [ i for c in chain for i in self . _get_chain ( c ) ] ) [ : : - 1 ] assert len ( chain ) == len ( list ( set ( chain ) ) ) , "Error, you are trying to remove a chain more than once....
Removes a chain from ChainConsumer . Calling this will require any configurations set to be redone!
872
def configure_truth ( self , ** kwargs ) : if kwargs . get ( "ls" ) is None and kwargs . get ( "linestyle" ) is None : kwargs [ "ls" ] = "--" kwargs [ "dashes" ] = ( 3 , 3 ) if kwargs . get ( "color" ) is None : kwargs [ "color" ] = "#000000" self . config_truth = kwargs self . _configured_truth = True return self
Configure the arguments passed to the axvline and axhline methods when plotting truth values .
873
def divide_chain ( self , chain = 0 ) : indexes = self . _get_chain ( chain ) con = ChainConsumer ( ) for index in indexes : chain = self . chains [ index ] assert chain . walkers is not None , "The chain you have selected was not added with any walkers!" num_walkers = chain . walkers data = np . split ( chain . chain ...
Returns a ChainConsumer instance containing all the walks of a given chain as individual chains themselves .
874
def threshold ( args ) : if args . fpr < 0 or args . fpr > 1 : print ( "Please specify a FPR between 0 and 1" ) sys . exit ( 1 ) motifs = read_motifs ( args . pwmfile ) s = Scanner ( ) s . set_motifs ( args . pwmfile ) s . set_threshold ( args . fpr , filename = args . inputfile ) print ( "Motif\tScore\tCutoff" ) for m...
Calculate motif score threshold for a given FPR .
875
def values_to_labels ( fg_vals , bg_vals ) : y_true = np . hstack ( ( np . ones ( len ( fg_vals ) ) , np . zeros ( len ( bg_vals ) ) ) ) y_score = np . hstack ( ( fg_vals , bg_vals ) ) return y_true , y_score
Convert two arrays of values to an array of labels and an array of scores .
876
def max_enrichment ( fg_vals , bg_vals , minbg = 2 ) : scores = np . hstack ( ( fg_vals , bg_vals ) ) idx = np . argsort ( scores ) x = np . hstack ( ( np . ones ( len ( fg_vals ) ) , np . zeros ( len ( bg_vals ) ) ) ) xsort = x [ idx ] l_fg = len ( fg_vals ) l_bg = len ( bg_vals ) m = 0 s = 0 for i in range ( len ( sc...
Computes the maximum enrichment .
877
def roc_auc_xlim ( x_bla , y_bla , xlim = 0.1 ) : x = x_bla [ : ] y = y_bla [ : ] x . sort ( ) y . sort ( ) u = { } for i in x + y : u [ i ] = 1 vals = sorted ( u . keys ( ) ) len_x = float ( len ( x ) ) len_y = float ( len ( y ) ) new_x = [ ] new_y = [ ] x_p = 0 y_p = 0 for val in vals [ : : - 1 ] : while len ( x ) > ...
Computes the ROC Area Under Curve until a certain FPR value .
878
def max_fmeasure ( fg_vals , bg_vals ) : x , y = roc_values ( fg_vals , bg_vals ) x , y = x [ 1 : ] , y [ 1 : ] p = y / ( y + x ) filt = np . logical_and ( ( p * y ) > 0 , ( p + y ) > 0 ) p = p [ filt ] y = y [ filt ] f = ( 2 * p * y ) / ( p + y ) if len ( f ) > 0 : return np . nanmax ( f ) else : return None
Computes the maximum F - measure .
879
def ks_pvalue ( fg_pos , bg_pos = None ) : if len ( fg_pos ) == 0 : return 1.0 a = np . array ( fg_pos , dtype = "float" ) / max ( fg_pos ) p = kstest ( a , "uniform" ) [ 1 ] return p
Computes the Kolmogorov - Smirnov p - value of position distribution .
880
def ks_significance ( fg_pos , bg_pos = None ) : p = ks_pvalue ( fg_pos , max ( fg_pos ) ) if p > 0 : return - np . log10 ( p ) else : return np . inf
Computes the - log10 of Kolmogorov - Smirnov p - value of position distribution .
881
def setup_data ( ) : ( x_train , y_train ) , ( x_test , y_test ) = mnist . load_data ( ) if K . image_data_format ( ) == 'channels_first' : x_train = x_train . reshape ( x_train . shape [ 0 ] , 1 , img_rows , img_cols ) x_test = x_test . reshape ( x_test . shape [ 0 ] , 1 , img_rows , img_cols ) input_shape = ( 1 , img...
Load and shape data for training with Keras + Pescador .
882
def build_model ( input_shape ) : model = Sequential ( ) model . add ( Conv2D ( 32 , kernel_size = ( 3 , 3 ) , activation = 'relu' , input_shape = input_shape ) ) model . add ( Conv2D ( 64 , kernel_size = ( 3 , 3 ) , activation = 'relu' ) ) model . add ( MaxPooling2D ( pool_size = ( 2 , 2 ) ) ) model . add ( Dropout ( ...
Create a compiled Keras model .
883
def sampler ( X , y ) : X = np . atleast_2d ( X ) y = np . atleast_1d ( y ) n = X . shape [ 0 ] while True : i = np . random . randint ( 0 , n ) yield { 'X' : X [ i ] , 'y' : y [ i ] }
A basic generator for sampling data .
884
def additive_noise ( stream , key = 'X' , scale = 1e-1 ) : for data in stream : noise_shape = data [ key ] . shape noise = scale * np . random . randn ( * noise_shape ) data [ key ] = data [ key ] + noise yield data
Add noise to a data stream .
885
def parse_denovo_params ( user_params = None ) : config = MotifConfig ( ) if user_params is None : user_params = { } params = config . get_default_params ( ) params . update ( user_params ) if params . get ( "torque" ) : logger . debug ( "Using torque" ) else : logger . debug ( "Using multiprocessing" ) params [ "backg...
Return default GimmeMotifs parameters .
886
def rankagg ( df , method = "stuart" ) : rmat = pd . DataFrame ( index = df . iloc [ : , 0 ] ) step = 1 / rmat . shape [ 0 ] for col in df . columns : rmat [ col ] = pd . DataFrame ( { col : np . arange ( step , 1 + step , step ) } , index = df [ col ] ) . loc [ rmat . index ] rmat = rmat . apply ( sorted , 1 , result_...
Return aggregated ranks .
887
def data_gen ( n_ops = 100 ) : while True : X = np . random . uniform ( size = ( 64 , 64 ) ) yield dict ( X = costly_function ( X , n_ops ) , y = np . random . randint ( 10 , size = ( 1 , ) ) )
Yield data while optionally burning compute cycles .
888
def mp_calc_stats ( motifs , fg_fa , bg_fa , bg_name = None ) : try : stats = calc_stats ( motifs , fg_fa , bg_fa , ncpus = 1 ) except Exception as e : raise sys . stderr . write ( "ERROR: {}\n" . format ( str ( e ) ) ) stats = { } if not bg_name : bg_name = "default" return bg_name , stats
Parallel calculation of motif statistics .
889
def _run_tool ( job_name , t , fastafile , params ) : try : result = t . run ( fastafile , params , mytmpdir ( ) ) except Exception as e : result = ( [ ] , "" , "{} failed to run: {}" . format ( job_name , e ) ) return job_name , result
Parallel motif prediction .
890
def predict_motifs ( infile , bgfile , outfile , params = None , stats_fg = None , stats_bg = None ) : required_params = [ "tools" , "available_tools" , "analysis" , "genome" , "use_strand" , "max_time" ] if params is None : params = parse_denovo_params ( ) else : for p in required_params : if p not in params : params ...
Predict motifs input is a FASTA - file
891
def add_motifs ( self , args ) : self . lock . acquire ( ) if args is None or len ( args ) != 2 or len ( args [ 1 ] ) != 3 : try : job = args [ 0 ] logger . warn ( "job %s failed" , job ) self . finished . append ( job ) except Exception : logger . warn ( "job failed" ) return job , ( motifs , stdout , stderr ) = args ...
Add motifs to the result object .
892
def wait_for_stats ( self ) : logging . debug ( "waiting for statistics to finish" ) for job in self . stat_jobs : job . get ( ) sleep ( 2 )
Make sure all jobs are finished .
893
def add_stats ( self , args ) : bg_name , stats = args logger . debug ( "Stats: %s %s" , bg_name , stats ) for motif_id in stats . keys ( ) : if motif_id not in self . stats : self . stats [ motif_id ] = { } self . stats [ motif_id ] [ bg_name ] = stats [ motif_id ]
Callback to add motif statistics .
894
def prepare_denovo_input_narrowpeak ( inputfile , params , outdir ) : bedfile = os . path . join ( outdir , "input.from.narrowpeak.bed" ) p = re . compile ( r'^(#|track|browser)' ) width = int ( params [ "width" ] ) logger . info ( "preparing input (narrowPeak to BED, width %s)" , width ) warn_no_summit = True with ope...
Prepare a narrowPeak file for de novo motif prediction .
895
def prepare_denovo_input_bed ( inputfile , params , outdir ) : logger . info ( "preparing input (BED)" ) width = int ( params [ "width" ] ) bedfile = os . path . join ( outdir , "input.bed" ) write_equalwidth_bedfile ( inputfile , width , bedfile ) abs_max = int ( params [ "abs_max" ] ) fraction = float ( params [ "fra...
Prepare a BED file for de novo motif prediction .
896
def create_background ( bg_type , fafile , outfile , genome = "hg18" , width = 200 , nr_times = 10 , custom_background = None ) : width = int ( width ) config = MotifConfig ( ) fg = Fasta ( fafile ) if bg_type in [ "genomic" , "gc" ] : if not genome : logger . error ( "Need a genome to create background" ) sys . exit (...
Create background of a specific type .
897
def create_backgrounds ( outdir , background = None , genome = "hg38" , width = 200 , custom_background = None ) : if background is None : background = [ "random" ] nr_sequences = { } if "gc" in background : pred_bg = "gc" else : pred_bg = background [ 0 ] create_background ( pred_bg , os . path . join ( outdir , "pred...
Create different backgrounds for motif prediction and validation .
898
def filter_significant_motifs ( fname , result , bg , metrics = None ) : sig_motifs = [ ] with open ( fname , "w" ) as f : for motif in result . motifs : stats = result . stats . get ( "%s_%s" % ( motif . id , motif . to_consensus ( ) ) , { } ) . get ( bg , { } ) if _is_significant ( stats , metrics ) : f . write ( "%s...
Filter significant motifs based on several statistics .
899
def best_motif_in_cluster ( single_pwm , clus_pwm , clusters , fg_fa , background , stats = None , metrics = ( "roc_auc" , "recall_at_fdr" ) ) : motifs = read_motifs ( single_pwm ) + read_motifs ( clus_pwm ) motifs = dict ( [ ( str ( m ) , m ) for m in motifs ] ) clustered_motifs = [ ] for clus , singles in clusters : ...
Return the best motif per cluster for a clustering results .