idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
60,400 | def _get_cmap_data ( data , kwargs ) -> Tuple [ colors . Normalize , np . ndarray ] : norm = kwargs . pop ( "cmap_normalize" , None ) if norm == "log" : cmap_max = kwargs . pop ( "cmap_max" , data . max ( ) ) cmap_min = kwargs . pop ( "cmap_min" , data [ data > 0 ] . min ( ) ) norm = colors . LogNorm ( cmap_min , cmap_max ) elif not norm : cmap_max = kwargs . pop ( "cmap_max" , data . max ( ) ) cmap_min = kwargs . pop ( "cmap_min" , 0 ) if cmap_min == "min" : cmap_min = data . min ( ) norm = colors . Normalize ( cmap_min , cmap_max , clip = True ) return norm , norm ( data ) | Get normalized values to be used with a colormap . |
60,401 | def _get_alpha_data ( data : np . ndarray , kwargs ) -> Union [ float , np . ndarray ] : alpha = kwargs . pop ( "alpha" , 1 ) if hasattr ( alpha , "__call__" ) : return np . vectorize ( alpha ) ( data ) return alpha | Get alpha values for all data points . |
60,402 | def _add_values ( ax : Axes , h1 : Histogram1D , data , * , value_format = lambda x : x , ** kwargs ) : from . common import get_value_format value_format = get_value_format ( value_format ) text_kwargs = { "ha" : "center" , "va" : "bottom" , "clip_on" : True } text_kwargs . update ( kwargs ) for x , y in zip ( h1 . bin_centers , data ) : ax . text ( x , y , str ( value_format ( y ) ) , ** text_kwargs ) | Show values next to each bin in a 1D plot . |
60,403 | def _add_colorbar ( ax : Axes , cmap : colors . Colormap , cmap_data : np . ndarray , norm : colors . Normalize ) : fig = ax . get_figure ( ) mappable = cm . ScalarMappable ( cmap = cmap , norm = norm ) mappable . set_array ( cmap_data ) fig . colorbar ( mappable , ax = ax ) | Show a colorbar right of the plot . |
60,404 | def _add_stats_box ( h1 : Histogram1D , ax : Axes , stats : Union [ str , bool ] = "all" ) : if stats in [ "all" , True ] : text = "Total: {0}\nMean: {1:.2f}\nStd.dev: {2:.2f}" . format ( h1 . total , h1 . mean ( ) , h1 . std ( ) ) elif stats == "total" : text = "Total: {0}" . format ( h1 . total ) else : raise ValueError ( "Invalid stats specification" ) ax . text ( 0.05 , 0.95 , text , transform = ax . transAxes , verticalalignment = 'top' , horizontalalignment = 'left' ) | Insert a small legend - like box with statistical information . |
60,405 | def normal_h1 ( size : int = 10000 , mean : float = 0 , sigma : float = 1 ) -> Histogram1D : data = np . random . normal ( mean , sigma , ( size , ) ) return h1 ( data , name = "normal" , axis_name = "x" , title = "1D normal distribution" ) | A simple 1D histogram with normal distribution . |
60,406 | def normal_h2 ( size : int = 10000 ) -> Histogram2D : data1 = np . random . normal ( 0 , 1 , ( size , ) ) data2 = np . random . normal ( 0 , 1 , ( size , ) ) return h2 ( data1 , data2 , name = "normal" , axis_names = tuple ( "xy" ) , title = "2D normal distribution" ) | A simple 2D histogram with normal distribution . |
60,407 | def normal_h3 ( size : int = 10000 ) -> HistogramND : data1 = np . random . normal ( 0 , 1 , ( size , ) ) data2 = np . random . normal ( 0 , 1 , ( size , ) ) data3 = np . random . normal ( 0 , 1 , ( size , ) ) return h3 ( [ data1 , data2 , data3 ] , name = "normal" , axis_names = tuple ( "xyz" ) , title = "3D normal distribution" ) | A simple 3D histogram with normal distribution . |
60,408 | def fist ( ) -> Histogram1D : import numpy as np from . . histogram1d import Histogram1D widths = [ 0 , 1.2 , 0.2 , 1 , 0.1 , 1 , 0.1 , 0.9 , 0.1 , 0.8 ] edges = np . cumsum ( widths ) heights = np . asarray ( [ 4 , 1 , 7.5 , 6 , 7.6 , 6 , 7.5 , 6 , 7.2 ] ) + 5 return Histogram1D ( edges , heights , axis_name = "Is this a fist?" , title = "Physt \"logo\"" ) | A simple histogram in the shape of a fist . |
60,409 | def require_compatible_version ( compatible_version , word = "File" ) : if isinstance ( compatible_version , str ) : compatible_version = parse_version ( compatible_version ) elif not isinstance ( compatible_version , Version ) : raise ValueError ( "Type of `compatible_version` not understood." ) current_version = parse_version ( CURRENT_VERSION ) if current_version < compatible_version : raise VersionError ( "{0} written for version >= {1}, this is {2}." . format ( word , str ( compatible_version ) , CURRENT_VERSION ) ) | Check that compatible version of input data is not too new . |
60,410 | def save_json ( histogram : Union [ HistogramBase , HistogramCollection ] , path : Optional [ str ] = None , ** kwargs ) -> str : data = histogram . to_dict ( ) data [ "physt_version" ] = CURRENT_VERSION if isinstance ( histogram , HistogramBase ) : data [ "physt_compatible" ] = COMPATIBLE_VERSION elif isinstance ( histogram , HistogramCollection ) : data [ "physt_compatible" ] = COLLECTION_COMPATIBLE_VERSION else : raise TypeError ( "Cannot save unknown type: {0}" . format ( type ( histogram ) ) ) text = json . dumps ( data , ** kwargs ) if path : with open ( path , "w" , encoding = "utf-8" ) as f : f . write ( text ) return text | Save histogram to JSON format . |
60,411 | def load_json ( path : str , encoding : str = "utf-8" ) -> HistogramBase : with open ( path , "r" , encoding = encoding ) as f : text = f . read ( ) return parse_json ( text ) | Load histogram from a JSON file . |
60,412 | def parse_json ( text : str , encoding : str = "utf-8" ) -> HistogramBase : data = json . loads ( text , encoding = encoding ) return create_from_dict ( data , format_name = "JSON" ) | Create histogram from a JSON string . |
60,413 | def histogram ( data , bins = None , * args , ** kwargs ) : import numpy as np from . histogram1d import Histogram1D , calculate_frequencies from . binnings import calculate_bins adaptive = kwargs . pop ( "adaptive" , False ) dtype = kwargs . pop ( "dtype" , None ) if isinstance ( data , tuple ) and isinstance ( data [ 0 ] , str ) : return histogram ( data [ 1 ] , bins , * args , name = data [ 0 ] , ** kwargs ) elif type ( data ) . __name__ == "DataFrame" : raise RuntimeError ( "Cannot create histogram from a pandas DataFrame. Use Series." ) dropna = kwargs . pop ( "dropna" , True ) weights = kwargs . pop ( "weights" , None ) keep_missed = kwargs . pop ( "keep_missed" , True ) name = kwargs . pop ( "name" , None ) axis_name = kwargs . pop ( "axis_name" , None ) title = kwargs . pop ( "title" , None ) if data is not None : array = np . asarray ( data ) if dropna : array = array [ ~ np . isnan ( array ) ] else : array = None binning = calculate_bins ( array , bins , * args , check_nan = not dropna and array is not None , adaptive = adaptive , ** kwargs ) if array is not None : ( frequencies , errors2 , underflow , overflow , stats ) = calculate_frequencies ( array , binning = binning , weights = weights , dtype = dtype ) else : frequencies = None errors2 = None underflow = 0 overflow = 0 stats = { "sum" : 0.0 , "sum2" : 0.0 } if not keep_missed : underflow = 0 overflow = 0 if not axis_name : if hasattr ( data , "name" ) : axis_name = data . name elif hasattr ( data , "fields" ) and len ( data . fields ) == 1 and isinstance ( data . fields [ 0 ] , str ) : axis_name = data . fields [ 0 ] return Histogram1D ( binning = binning , frequencies = frequencies , errors2 = errors2 , overflow = overflow , underflow = underflow , stats = stats , dtype = dtype , keep_missed = keep_missed , name = name , axis_name = axis_name , title = title ) | Facade function to create 1D histograms . |
60,414 | def histogram2d ( data1 , data2 , bins = 10 , * args , ** kwargs ) : import numpy as np if "axis_names" not in kwargs : if hasattr ( data1 , "name" ) and hasattr ( data2 , "name" ) : kwargs [ "axis_names" ] = [ data1 . name , data2 . name ] if data1 is not None and data2 is not None : data1 = np . asarray ( data1 ) data2 = np . asarray ( data2 ) data = np . concatenate ( [ data1 [ : , np . newaxis ] , data2 [ : , np . newaxis ] ] , axis = 1 ) else : data = None return histogramdd ( data , bins , * args , dim = 2 , ** kwargs ) | Facade function to create 2D histograms . |
60,415 | def histogramdd ( data , bins = 10 , * args , ** kwargs ) : import numpy as np from . import histogram_nd from . binnings import calculate_bins_nd adaptive = kwargs . pop ( "adaptive" , False ) dropna = kwargs . pop ( "dropna" , True ) name = kwargs . pop ( "name" , None ) title = kwargs . pop ( "title" , None ) dim = kwargs . pop ( "dim" , None ) axis_names = kwargs . pop ( "axis_names" , None ) if not "axis_names" in kwargs : if hasattr ( data , "columns" ) : try : kwargs [ "axis_names" ] = tuple ( data . columns ) except : pass if data is not None : data = np . asarray ( data ) if data . ndim != 2 : raise RuntimeError ( "Array must have shape (n, d)" ) if dim is not None and dim != data . shape [ 1 ] : raise RuntimeError ( "Dimension mismatch: {0}!={1}" . format ( dim , data . shape [ 1 ] ) ) _ , dim = data . shape if dropna : data = data [ ~ np . isnan ( data ) . any ( axis = 1 ) ] check_nan = not dropna else : if dim is None : raise RuntimeError ( "You have to specify either data or its dimension." ) data = np . zeros ( ( 0 , dim ) ) check_nan = False bin_schemas = calculate_bins_nd ( data , bins , * args , check_nan = check_nan , adaptive = adaptive , ** kwargs ) weights = kwargs . pop ( "weights" , None ) frequencies , errors2 , missed = histogram_nd . calculate_frequencies ( data , ndim = dim , binnings = bin_schemas , weights = weights ) kwargs [ "name" ] = name if title : kwargs [ "title" ] = title if axis_names : kwargs [ "axis_names" ] = axis_names if dim == 2 : return histogram_nd . Histogram2D ( binnings = bin_schemas , frequencies = frequencies , errors2 = errors2 , ** kwargs ) else : return histogram_nd . HistogramND ( dimension = dim , binnings = bin_schemas , frequencies = frequencies , errors2 = errors2 , ** kwargs ) | Facade function to create n - dimensional histograms . |
60,416 | def h3 ( data , * args , ** kwargs ) : import numpy as np if data is not None and isinstance ( data , ( list , tuple ) ) and not np . isscalar ( data [ 0 ] ) : if "axis_names" not in kwargs : kwargs [ "axis_names" ] = [ ( column . name if hasattr ( column , "name" ) else None ) for column in data ] data = np . concatenate ( [ item [ : , np . newaxis ] for item in data ] , axis = 1 ) else : kwargs [ "dim" ] = 3 return histogramdd ( data , * args , ** kwargs ) | Facade function to create 3D histograms . |
60,417 | def collection ( data , bins = 10 , * args , ** kwargs ) : from physt . histogram_collection import HistogramCollection if hasattr ( data , "columns" ) : data = { column : data [ column ] for column in data . columns } return HistogramCollection . multi_h1 ( data , bins , ** kwargs ) | Create histogram collection with shared binnning . |
60,418 | def write_root ( histogram : HistogramBase , hfile : uproot . write . TFile . TFileUpdate , name : str ) : hfile [ name ] = histogram | Write histogram to an open ROOT file . |
60,419 | def write ( histogram ) : histogram_dict = histogram . to_dict ( ) message = Histogram ( ) for field in SIMPLE_CONVERSION_FIELDS : setattr ( message , field , histogram_dict [ field ] ) message . frequencies . extend ( histogram . frequencies . flatten ( ) ) message . errors2 . extend ( histogram . errors2 . flatten ( ) ) for binning in histogram . _binnings : binning_message = message . binnings . add ( ) for edges in binning . bins : limits = binning_message . bins . add ( ) limits . lower = edges [ 0 ] limits . upper = edges [ 1 ] meta_message = message . meta for key in SIMPLE_META_KEYS : if key in histogram . meta_data : setattr ( meta_message , key , str ( histogram . meta_data [ key ] ) ) if "axis_names" in histogram . meta_data : meta_message . axis_names . extend ( histogram . meta_data [ "axis_names" ] ) message . physt_version = CURRENT_VERSION message . physt_compatible = COMPATIBLE_VERSION return message | Convert a histogram to a protobuf message . |
60,420 | def read ( message ) : require_compatible_version ( message . physt_compatible ) a_dict = _dict_from_v0342 ( message ) return create_from_dict ( a_dict , "Message" ) | Convert a parsed protobuf message into a histogram . |
60,421 | def make_bin_array ( bins ) -> np . ndarray : bins = np . asarray ( bins ) if bins . ndim == 1 : return np . hstack ( ( bins [ : - 1 , np . newaxis ] , bins [ 1 : , np . newaxis ] ) ) elif bins . ndim == 2 : if bins . shape [ 1 ] != 2 : raise RuntimeError ( "Binning schema with ndim==2 must have 2 columns" ) return bins else : raise RuntimeError ( "Binning schema must have ndim==1 or ndim==2" ) | Turn bin data into array understood by HistogramXX classes . |
60,422 | def to_numpy_bins ( bins ) -> np . ndarray : bins = np . asarray ( bins ) if bins . ndim == 1 : return bins if not is_consecutive ( bins ) : raise RuntimeError ( "Cannot create numpy bins from inconsecutive edges" ) return np . concatenate ( [ bins [ : 1 , 0 ] , bins [ : , 1 ] ] ) | Convert physt bin format to numpy edges . |
60,423 | def to_numpy_bins_with_mask ( bins ) -> Tuple [ np . ndarray , np . ndarray ] : bins = np . asarray ( bins ) if bins . ndim == 1 : edges = bins if bins . shape [ 0 ] > 1 : mask = np . arange ( bins . shape [ 0 ] - 1 ) else : mask = [ ] elif bins . ndim == 2 : edges = [ ] mask = [ ] j = 0 if bins . shape [ 0 ] > 0 : edges . append ( bins [ 0 , 0 ] ) for i in range ( bins . shape [ 0 ] - 1 ) : mask . append ( j ) edges . append ( bins [ i , 1 ] ) if bins [ i , 1 ] != bins [ i + 1 , 0 ] : edges . append ( bins [ i + 1 , 0 ] ) j += 1 j += 1 mask . append ( j ) edges . append ( bins [ - 1 , 1 ] ) else : raise RuntimeError ( "to_numpy_bins_with_mask: array with dim=1 or 2 expected" ) if not np . all ( np . diff ( edges ) > 0 ) : raise RuntimeError ( "to_numpy_bins_with_mask: edges array not monotone." ) return edges , mask | Numpy binning edges including gaps . |
60,424 | def is_rising ( bins ) -> bool : bins = make_bin_array ( bins ) if np . any ( bins [ : , 0 ] >= bins [ : , 1 ] ) : return False if np . any ( bins [ 1 : , 0 ] < bins [ : - 1 , 1 ] ) : return False return True | Check whether the bins are in raising order . |
60,425 | def get_data ( histogram : HistogramBase , density : bool = False , cumulative : bool = False , flatten : bool = False ) -> np . ndarray : if density : if cumulative : data = ( histogram / histogram . total ) . cumulative_frequencies else : data = histogram . densities else : if cumulative : data = histogram . cumulative_frequencies else : data = histogram . frequencies if flatten : data = data . flatten ( ) return data | Get histogram data based on plotting parameters . |
60,426 | def get_err_data ( histogram : HistogramBase , density : bool = False , cumulative : bool = False , flatten : bool = False ) -> np . ndarray : if cumulative : raise RuntimeError ( "Error bars not supported for cumulative plots." ) if density : data = histogram . errors / histogram . bin_sizes else : data = histogram . errors if flatten : data = data . flatten ( ) return data | Get histogram error data based on plotting parameters . |
60,427 | def get_value_format ( value_format : Union [ Callable , str ] = str ) -> Callable [ [ float ] , str ] : if value_format is None : value_format = "" if isinstance ( value_format , str ) : format_str = "{0:" + value_format + "}" def value_format ( x ) : return format_str . format ( x ) return value_format | Create a formatting function from a generic value_format argument . |
60,428 | def pop_kwargs_with_prefix ( prefix : str , kwargs : dict ) -> dict : keys = [ key for key in kwargs if key . startswith ( prefix ) ] return { key [ len ( prefix ) : ] : kwargs . pop ( key ) for key in keys } | Pop all items from a dictionary that have keys beginning with a prefix . |
60,429 | def bins ( self ) -> List [ np . ndarray ] : return [ binning . bins for binning in self . _binnings ] | List of bin matrices . |
60,430 | def select ( self , axis : AxisIdentifier , index , force_copy : bool = False ) -> HistogramBase : if index == slice ( None ) and not force_copy : return self axis_id = self . _get_axis ( axis ) array_index = [ slice ( None , None , None ) for i in range ( self . ndim ) ] array_index [ axis_id ] = index frequencies = self . _frequencies [ tuple ( array_index ) ] . copy ( ) errors2 = self . _errors2 [ tuple ( array_index ) ] . copy ( ) if isinstance ( index , int ) : return self . _reduce_dimension ( [ ax for ax in range ( self . ndim ) if ax != axis_id ] , frequencies , errors2 ) elif isinstance ( index , slice ) : if index . step is not None and index . step < 0 : raise IndexError ( "Cannot change the order of bins" ) copy = self . copy ( ) copy . _frequencies = frequencies copy . _errors2 = errors2 copy . _binnings [ axis_id ] = self . _binnings [ axis_id ] [ index ] return copy else : raise ValueError ( "Invalid index." ) | Select in an axis . |
60,431 | def accumulate ( self , axis : AxisIdentifier ) -> HistogramBase : new_one = self . copy ( ) axis_id = self . _get_axis ( axis ) new_one . _frequencies = np . cumsum ( new_one . frequencies , axis_id [ 0 ] ) return new_one | Calculate cumulative frequencies along a certain axis . |
60,432 | def T ( self ) -> "Histogram2D" : a_copy = self . copy ( ) a_copy . _binnings = list ( reversed ( a_copy . _binnings ) ) a_copy . axis_names = list ( reversed ( a_copy . axis_names ) ) a_copy . _frequencies = a_copy . _frequencies . T a_copy . _errors2 = a_copy . _errors2 . T return a_copy | Histogram with swapped axes . |
60,433 | def partial_normalize ( self , axis : AxisIdentifier = 0 , inplace : bool = False ) : axis = self . _get_axis ( axis ) if not inplace : copy = self . copy ( ) copy . partial_normalize ( axis , inplace = True ) return copy else : self . _coerce_dtype ( float ) if axis == 0 : divisor = self . _frequencies . sum ( axis = 0 ) else : divisor = self . _frequencies . sum ( axis = 1 ) [ : , np . newaxis ] divisor [ divisor == 0 ] = 1 self . _frequencies /= divisor self . _errors2 /= ( divisor * divisor ) return self | Normalize in rows or columns . |
60,434 | def numpy_binning ( data , bins = 10 , range = None , * args , ** kwargs ) -> NumpyBinning : if isinstance ( bins , int ) : if range : bins = np . linspace ( range [ 0 ] , range [ 1 ] , bins + 1 ) else : start = data . min ( ) stop = data . max ( ) bins = np . linspace ( start , stop , bins + 1 ) elif np . iterable ( bins ) : bins = np . asarray ( bins ) else : _ , bins = np . histogram ( data , bins , ** kwargs ) return NumpyBinning ( bins ) | Construct binning schema compatible with numpy . histogram |
60,435 | def human_binning ( data = None , bin_count : Optional [ int ] = None , * , range = None , ** kwargs ) -> FixedWidthBinning : subscales = np . array ( [ 0.5 , 1 , 2 , 2.5 , 5 , 10 ] ) if data is None and range is None : raise RuntimeError ( "Cannot guess optimum bin width without data." ) if bin_count is None : bin_count = ideal_bin_count ( data ) min_ = range [ 0 ] if range else data . min ( ) max_ = range [ 1 ] if range else data . max ( ) bw = ( max_ - min_ ) / bin_count power = np . floor ( np . log10 ( bw ) ) . astype ( int ) best_index = np . argmin ( np . abs ( np . log ( subscales * ( 10.0 ** power ) / bw ) ) ) bin_width = ( 10.0 ** power ) * subscales [ best_index ] return fixed_width_binning ( bin_width = bin_width , data = data , range = range , ** kwargs ) | Construct fixed - width ninning schema with bins automatically optimized to human - friendly widths . |
60,436 | def quantile_binning ( data = None , bins = 10 , * , qrange = ( 0.0 , 1.0 ) , ** kwargs ) -> StaticBinning : if np . isscalar ( bins ) : bins = np . linspace ( qrange [ 0 ] * 100 , qrange [ 1 ] * 100 , bins + 1 ) bins = np . percentile ( data , bins ) return static_binning ( bins = make_bin_array ( bins ) , includes_right_edge = True ) | Binning schema based on quantile ranges . |
60,437 | def static_binning ( data = None , bins = None , ** kwargs ) -> StaticBinning : return StaticBinning ( bins = make_bin_array ( bins ) , ** kwargs ) | Construct static binning with whatever bins . |
60,438 | def integer_binning ( data = None , ** kwargs ) -> StaticBinning : if "range" in kwargs : kwargs [ "range" ] = tuple ( r - 0.5 for r in kwargs [ "range" ] ) return fixed_width_binning ( data = data , bin_width = kwargs . pop ( "bin_width" , 1 ) , align = True , bin_shift = 0.5 , ** kwargs ) | Construct fixed - width binning schema with bins centered around integers . |
60,439 | def fixed_width_binning ( data = None , bin_width : Union [ float , int ] = 1 , * , range = None , includes_right_edge = False , ** kwargs ) -> FixedWidthBinning : result = FixedWidthBinning ( bin_width = bin_width , includes_right_edge = includes_right_edge , ** kwargs ) if range : result . _force_bin_existence ( range [ 0 ] ) result . _force_bin_existence ( range [ 1 ] , includes_right_edge = True ) if not kwargs . get ( "adaptive" ) : return result if data is not None and data . shape [ 0 ] : result . _force_bin_existence ( [ np . min ( data ) , np . max ( data ) ] , includes_right_edge = includes_right_edge ) return result | Construct fixed - width binning schema . |
60,440 | def exponential_binning ( data = None , bin_count : Optional [ int ] = None , * , range = None , ** kwargs ) -> ExponentialBinning : if bin_count is None : bin_count = ideal_bin_count ( data ) if range : range = ( np . log10 ( range [ 0 ] ) , np . log10 ( range [ 1 ] ) ) else : range = ( np . log10 ( data . min ( ) ) , np . log10 ( data . max ( ) ) ) log_width = ( range [ 1 ] - range [ 0 ] ) / bin_count return ExponentialBinning ( log_min = range [ 0 ] , log_width = log_width , bin_count = bin_count , ** kwargs ) | Construct exponential binning schema . |
60,441 | def calculate_bins ( array , _ = None , * args , ** kwargs ) -> BinningBase : if array is not None : if kwargs . pop ( "check_nan" , True ) : if np . any ( np . isnan ( array ) ) : raise RuntimeError ( "Cannot calculate bins in presence of NaN's." ) if kwargs . get ( "range" , None ) : array = array [ ( array >= kwargs [ "range" ] [ 0 ] ) & ( array <= kwargs [ "range" ] [ 1 ] ) ] if _ is None : bin_count = 10 binning = numpy_binning ( array , bin_count , * args , ** kwargs ) elif isinstance ( _ , BinningBase ) : binning = _ elif isinstance ( _ , int ) : binning = numpy_binning ( array , _ , * args , ** kwargs ) elif isinstance ( _ , str ) : if _ in bincount_methods : bin_count = ideal_bin_count ( array , method = _ ) binning = numpy_binning ( array , bin_count , * args , ** kwargs ) elif _ in binning_methods : method = binning_methods [ _ ] binning = method ( array , * args , ** kwargs ) else : raise RuntimeError ( "No binning method {0} available." . format ( _ ) ) elif callable ( _ ) : binning = _ ( array , * args , ** kwargs ) elif np . iterable ( _ ) : binning = static_binning ( array , _ , * args , ** kwargs ) else : raise RuntimeError ( "Binning {0} not understood." . format ( _ ) ) return binning | Find optimal binning from arguments . |
60,442 | def ideal_bin_count ( data , method : str = "default" ) -> int : n = data . size if n < 1 : return 1 if method == "default" : if n <= 32 : return 7 else : return ideal_bin_count ( data , "sturges" ) elif method == "sqrt" : return int ( np . ceil ( np . sqrt ( n ) ) ) elif method == "sturges" : return int ( np . ceil ( np . log2 ( n ) ) + 1 ) elif method == "doane" : if n < 3 : return 1 from scipy . stats import skew sigma = np . sqrt ( 6 * ( n - 2 ) / ( n + 1 ) * ( n + 3 ) ) return int ( np . ceil ( 1 + np . log2 ( n ) + np . log2 ( 1 + np . abs ( skew ( data ) ) / sigma ) ) ) elif method == "rice" : return int ( np . ceil ( 2 * np . power ( n , 1 / 3 ) ) ) | A theoretically ideal bin count . |
60,443 | def as_binning ( obj , copy : bool = False ) -> BinningBase : if isinstance ( obj , BinningBase ) : if copy : return obj . copy ( ) else : return obj else : bins = make_bin_array ( obj ) return StaticBinning ( bins ) | Ensure that an object is a binning |
60,444 | def to_dict ( self ) -> OrderedDict : result = OrderedDict ( ) result [ "adaptive" ] = self . _adaptive result [ "binning_type" ] = type ( self ) . __name__ self . _update_dict ( result ) return result | Dictionary representation of the binning schema . |
60,445 | def is_regular ( self , rtol : float = 1.e-5 , atol : float = 1.e-8 ) -> bool : return np . allclose ( np . diff ( self . bins [ 1 ] - self . bins [ 0 ] ) , 0.0 , rtol = rtol , atol = atol ) | Whether all bins have the same width . |
60,446 | def is_consecutive ( self , rtol : float = 1.e-5 , atol : float = 1.e-8 ) -> bool : if self . inconsecutive_allowed : if self . _consecutive is None : if self . _numpy_bins is not None : self . _consecutive = True self . _consecutive = is_consecutive ( self . bins , rtol , atol ) return self . _consecutive else : return True | Whether all bins are in a growing order . |
60,447 | def adapt ( self , other : 'BinningBase' ) : if np . array_equal ( self . bins , other . bins ) : return None , None elif not self . is_adaptive ( ) : raise RuntimeError ( "Cannot adapt non-adaptive binning." ) else : return self . _adapt ( other ) | Adapt this binning so that it contains all bins of another binning . |
60,448 | def numpy_bins ( self ) -> np . ndarray : if self . _numpy_bins is None : self . _numpy_bins = to_numpy_bins ( self . bins ) return self . _numpy_bins | Bins in the numpy format |
60,449 | def numpy_bins_with_mask ( self ) -> Tuple [ np . ndarray , np . ndarray ] : bwm = to_numpy_bins_with_mask ( self . bins ) if not self . includes_right_edge : bwm [ 0 ] . append ( np . inf ) return bwm | Bins in the numpy format including the gaps in inconsecutive binnings . |
60,450 | def as_static ( self , copy : bool = True ) -> 'StaticBinning' : if copy : return StaticBinning ( bins = self . bins . copy ( ) , includes_right_edge = self . includes_right_edge ) else : return self | Convert binning to a static form . |
60,451 | def histogram1d ( data , bins = None , * args , ** kwargs ) : import dask if not hasattr ( data , "dask" ) : data = dask . array . from_array ( data , chunks = int ( data . shape [ 0 ] / options [ "chunk_split" ] ) ) if not kwargs . get ( "adaptive" , True ) : raise RuntimeError ( "Only adaptive histograms supported for dask (currently)." ) kwargs [ "adaptive" ] = True def block_hist ( array ) : return original_h1 ( array , bins , * args , ** kwargs ) return _run_dask ( name = "dask_adaptive1d" , data = data , compute = kwargs . pop ( "compute" , True ) , method = kwargs . pop ( "dask_method" , "threaded" ) , func = block_hist ) | Facade function to create one - dimensional histogram using dask . |
60,452 | def histogram2d ( data1 , data2 , bins = None , * args , ** kwargs ) : import dask if "axis_names" not in kwargs : if hasattr ( data1 , "name" ) and hasattr ( data2 , "name" ) : kwargs [ "axis_names" ] = [ data1 . name , data2 . name ] if not hasattr ( data1 , "dask" ) : data1 = dask . array . from_array ( data1 , chunks = data1 . size ( ) / 100 ) if not hasattr ( data2 , "dask" ) : data2 = dask . array . from_array ( data2 , chunks = data2 . size ( ) / 100 ) data = dask . array . stack ( [ data1 , data2 ] , axis = 1 ) kwargs [ "dim" ] = 2 return histogramdd ( data , bins , * args , ** kwargs ) | Facade function to create 2D histogram using dask . |
60,453 | def all_subclasses ( cls : type ) -> Tuple [ type , ... ] : subclasses = [ ] for subclass in cls . __subclasses__ ( ) : subclasses . append ( subclass ) subclasses . extend ( all_subclasses ( subclass ) ) return tuple ( subclasses ) | All subclasses of a class . |
60,454 | def find_subclass ( base : type , name : str ) -> type : class_candidates = [ klass for klass in all_subclasses ( base ) if klass . __name__ == name ] if len ( class_candidates ) == 0 : raise RuntimeError ( "No \"{0}\" subclass of \"{1}\"." . format ( base . __name__ , name ) ) elif len ( class_candidates ) > 1 : raise RuntimeError ( "Multiple \"{0}\" subclasses of \"{1}\"." . format ( base . __name__ , name ) ) return class_candidates [ 0 ] | Find a named subclass of a base class . |
60,455 | def add ( self , histogram : Histogram1D ) : if self . binning and not self . binning == histogram . binning : raise ValueError ( "Cannot add histogram with different binning." ) self . histograms . append ( histogram ) | Add a histogram to the collection . |
60,456 | def normalize_bins ( self , inplace : bool = False ) -> "HistogramCollection" : col = self if inplace else self . copy ( ) sums = self . sum ( ) . frequencies for h in col . histograms : h . set_dtype ( float ) h . _frequencies /= sums h . _errors2 /= sums ** 2 return col | Normalize each bin in the collection so that the sum is 1 . 0 for each bin . |
60,457 | def multi_h1 ( cls , a_dict : Dict [ str , Any ] , bins = None , ** kwargs ) -> "HistogramCollection" : from physt . binnings import calculate_bins mega_values = np . concatenate ( list ( a_dict . values ( ) ) ) binning = calculate_bins ( mega_values , bins , ** kwargs ) title = kwargs . pop ( "title" , None ) name = kwargs . pop ( "name" , None ) collection = HistogramCollection ( binning = binning , title = title , name = name ) for key , value in a_dict . items ( ) : collection . create ( key , value ) return collection | Create a collection from multiple datasets . |
60,458 | def to_json ( self , path : Optional [ str ] = None , ** kwargs ) -> str : from . io import save_json return save_json ( self , path , ** kwargs ) | Convert to JSON representation . |
60,459 | def _get_axis ( self , name_or_index : AxisIdentifier ) -> int : if isinstance ( name_or_index , int ) : if name_or_index < 0 or name_or_index >= self . ndim : raise ValueError ( "No such axis, must be from 0 to {0}" . format ( self . ndim - 1 ) ) return name_or_index elif isinstance ( name_or_index , str ) : if name_or_index not in self . axis_names : named_axes = [ name for name in self . axis_names if name ] raise ValueError ( "No axis with such name: {0}, available names: {1}. In most places, you can also use numbers." . format ( name_or_index , ", " . join ( named_axes ) ) ) return self . axis_names . index ( name_or_index ) else : raise TypeError ( "Argument of type {0} not understood, int or str expected." . format ( type ( name_or_index ) ) ) | Get a zero - based index of an axis and check its existence . |
60,460 | def shape ( self ) -> Tuple [ int , ... ] : return tuple ( bins . bin_count for bins in self . _binnings ) | Shape of histogram s data . |
60,461 | def set_dtype ( self , value , check : bool = True ) : value , type_info = self . _eval_dtype ( value ) if value == self . _dtype : return if self . dtype is None or np . can_cast ( self . dtype , value ) : pass elif check : if np . issubdtype ( value , np . integer ) : if self . dtype . kind == "f" : for array in ( self . _frequencies , self . _errors2 ) : if np . any ( array % 1.0 ) : raise RuntimeError ( "Data contain non-integer values." ) for array in ( self . _frequencies , self . _errors2 ) : if np . any ( ( array > type_info . max ) | ( array < type_info . min ) ) : raise RuntimeError ( "Data contain values outside the specified range." ) self . _dtype = value self . _frequencies = self . _frequencies . astype ( value ) self . _errors2 = self . _errors2 . astype ( value ) self . _missed = self . _missed . astype ( value ) | Change data type of the bin contents . |
60,462 | def _coerce_dtype ( self , other_dtype ) : if self . _dtype is None : new_dtype = np . dtype ( other_dtype ) else : new_dtype = np . find_common_type ( [ self . _dtype , np . dtype ( other_dtype ) ] , [ ] ) if new_dtype != self . dtype : self . set_dtype ( new_dtype ) | Possibly change the bin content type to allow correct operations with other operand . |
60,463 | def normalize ( self , inplace : bool = False , percent : bool = False ) -> "HistogramBase" : if inplace : self /= self . total * ( .01 if percent else 1 ) return self else : return self / self . total * ( 100 if percent else 1 ) | Normalize the histogram so that the total weight is equal to 1 . |
60,464 | def _change_binning ( self , new_binning , bin_map : Iterable [ Tuple [ int , int ] ] , axis : int = 0 ) : axis = int ( axis ) if axis < 0 or axis >= self . ndim : raise RuntimeError ( "Axis must be in range 0..(ndim-1)" ) self . _reshape_data ( new_binning . bin_count , bin_map , axis ) self . _binnings [ axis ] = new_binning | Set new binnning and update the bin contents according to a map . |
60,465 | def _reshape_data ( self , new_size , bin_map , axis = 0 ) : if bin_map is None : return else : new_shape = list ( self . shape ) new_shape [ axis ] = new_size new_frequencies = np . zeros ( new_shape , dtype = self . _frequencies . dtype ) new_errors2 = np . zeros ( new_shape , dtype = self . _frequencies . dtype ) self . _apply_bin_map ( old_frequencies = self . _frequencies , new_frequencies = new_frequencies , old_errors2 = self . _errors2 , new_errors2 = new_errors2 , bin_map = bin_map , axis = axis ) self . _frequencies = new_frequencies self . _errors2 = new_errors2 | Reshape data to match new binning schema . |
60,466 | def _apply_bin_map ( self , old_frequencies , new_frequencies , old_errors2 , new_errors2 , bin_map , axis = 0 ) : if old_frequencies is not None and old_frequencies . shape [ axis ] > 0 : if isinstance ( bin_map , int ) : new_index = [ slice ( None ) for i in range ( self . ndim ) ] new_index [ axis ] = slice ( bin_map , bin_map + old_frequencies . shape [ axis ] ) new_frequencies [ tuple ( new_index ) ] += old_frequencies new_errors2 [ tuple ( new_index ) ] += old_errors2 else : for ( old , new ) in bin_map : new_index = [ slice ( None ) for i in range ( self . ndim ) ] new_index [ axis ] = new old_index = [ slice ( None ) for i in range ( self . ndim ) ] old_index [ axis ] = old new_frequencies [ tuple ( new_index ) ] += old_frequencies [ tuple ( old_index ) ] new_errors2 [ tuple ( new_index ) ] += old_errors2 [ tuple ( old_index ) ] | Fill new data arrays using a map . |
60,467 | def has_same_bins ( self , other : "HistogramBase" ) -> bool : if self . shape != other . shape : return False elif self . ndim == 1 : return np . allclose ( self . bins , other . bins ) elif self . ndim > 1 : for i in range ( self . ndim ) : if not np . allclose ( self . bins [ i ] , other . bins [ i ] ) : return False return True | Whether two histograms share the same binning . |
60,468 | def copy ( self , include_frequencies : bool = True ) -> "HistogramBase" : if include_frequencies : frequencies = np . copy ( self . frequencies ) missed = self . _missed . copy ( ) errors2 = np . copy ( self . errors2 ) stats = self . _stats or None else : frequencies = np . zeros_like ( self . _frequencies ) errors2 = np . zeros_like ( self . _errors2 ) missed = np . zeros_like ( self . _missed ) stats = None a_copy = self . __class__ . __new__ ( self . __class__ ) a_copy . _binnings = [ binning . copy ( ) for binning in self . _binnings ] a_copy . _dtype = self . dtype a_copy . _frequencies = frequencies a_copy . _errors2 = errors2 a_copy . _meta_data = self . _meta_data . copy ( ) a_copy . keep_missed = self . keep_missed a_copy . _missed = missed a_copy . _stats = stats return a_copy | Copy the histogram . |
60,469 | def to_dict ( self ) -> OrderedDict : result = OrderedDict ( ) result [ "histogram_type" ] = type ( self ) . __name__ result [ "binnings" ] = [ binning . to_dict ( ) for binning in self . _binnings ] result [ "frequencies" ] = self . frequencies . tolist ( ) result [ "dtype" ] = str ( np . dtype ( self . dtype ) ) result [ "errors2" ] = self . errors2 . tolist ( ) result [ "meta_data" ] = self . _meta_data result [ "missed" ] = self . _missed . tolist ( ) result [ "missed_keep" ] = self . keep_missed self . _update_dict ( result ) return result | Dictionary with all data in the histogram . |
60,470 | def _merge_meta_data ( cls , first : "HistogramBase" , second : "HistogramBase" ) -> dict : keys = set ( first . _meta_data . keys ( ) ) keys = keys . union ( set ( second . _meta_data . keys ( ) ) ) return { key : ( first . _meta_data . get ( key , None ) if first . _meta_data . get ( key , None ) == second . _meta_data . get ( key , None ) else None ) for key in keys } | Merge meta data of two histograms leaving only the equal values . |
60,471 | def mean ( self ) -> Optional [ float ] : if self . _stats : if self . total > 0 : return self . _stats [ "sum" ] / self . total else : return np . nan else : return None | Statistical mean of all values entered into histogram . |
60,472 | def std ( self ) -> Optional [ float ] : if self . _stats : return np . sqrt ( self . variance ( ) ) else : return None | Standard deviation of all values entered into histogram . |
60,473 | def variance ( self ) -> Optional [ float ] : if self . _stats : if self . total > 0 : return ( self . _stats [ "sum2" ] - self . _stats [ "sum" ] ** 2 / self . total ) / self . total else : return np . nan else : return None | Statistical variance of all values entered into histogram . |
60,474 | def find_bin ( self , value ) : ixbin = np . searchsorted ( self . bin_left_edges , value , side = "right" ) if ixbin == 0 : return - 1 elif ixbin == self . bin_count : if value <= self . bin_right_edges [ - 1 ] : return ixbin - 1 else : return self . bin_count elif value < self . bin_right_edges [ ixbin - 1 ] : return ixbin - 1 elif ixbin == self . bin_count : return self . bin_count else : return None | Index of bin corresponding to a value . |
60,475 | def fill ( self , value , weight = 1 ) : self . _coerce_dtype ( type ( weight ) ) if self . _binning . is_adaptive ( ) : map = self . _binning . force_bin_existence ( value ) self . _reshape_data ( self . _binning . bin_count , map ) ixbin = self . find_bin ( value ) if ixbin is None : self . overflow = np . nan self . underflow = np . nan elif ixbin == - 1 and self . keep_missed : self . underflow += weight elif ixbin == self . bin_count and self . keep_missed : self . overflow += weight else : self . _frequencies [ ixbin ] += weight self . _errors2 [ ixbin ] += weight ** 2 if self . _stats : self . _stats [ "sum" ] += weight * value self . _stats [ "sum2" ] += weight * value ** 2 return ixbin | Update histogram with a new value . |
60,476 | def fill_n ( self , values , weights = None , dropna : bool = True ) : values = np . asarray ( values ) if dropna : values = values [ ~ np . isnan ( values ) ] if self . _binning . is_adaptive ( ) : map = self . _binning . force_bin_existence ( values ) self . _reshape_data ( self . _binning . bin_count , map ) if weights : weights = np . asarray ( weights ) self . _coerce_dtype ( weights . dtype ) ( frequencies , errors2 , underflow , overflow , stats ) = calculate_frequencies ( values , self . _binning , dtype = self . dtype , weights = weights , validate_bins = False ) self . _frequencies += frequencies self . _errors2 += errors2 if self . keep_missed : self . underflow += underflow self . overflow += overflow if self . _stats : for key in self . _stats : self . _stats [ key ] += stats . get ( key , 0.0 ) | Update histograms with a set of values . |
60,477 | def to_xarray ( self ) -> "xarray.Dataset" : import xarray as xr data_vars = { "frequencies" : xr . DataArray ( self . frequencies , dims = "bin" ) , "errors2" : xr . DataArray ( self . errors2 , dims = "bin" ) , "bins" : xr . DataArray ( self . bins , dims = ( "bin" , "x01" ) ) } coords = { } attrs = { "underflow" : self . underflow , "overflow" : self . overflow , "inner_missed" : self . inner_missed , "keep_missed" : self . keep_missed } attrs . update ( self . _meta_data ) return xr . Dataset ( data_vars , coords , attrs ) | Convert to xarray . Dataset |
60,478 | def from_xarray ( cls , arr : "xarray.Dataset" ) -> "Histogram1D" : kwargs = { 'frequencies' : arr [ "frequencies" ] , 'binning' : arr [ "bins" ] , 'errors2' : arr [ "errors2" ] , 'overflow' : arr . attrs [ "overflow" ] , 'underflow' : arr . attrs [ "underflow" ] , 'keep_missed' : arr . attrs [ "keep_missed" ] } return cls ( ** kwargs ) | Convert form xarray . Dataset |
60,479 | def set_default_backend ( name : str ) : global _default_backend if name == "bokeh" : raise RuntimeError ( "Support for bokeh has been discontinued. At some point, we may return to support holoviews." ) if not name in backends : raise RuntimeError ( "Backend {0} is not supported and cannot be set as default." . format ( name ) ) _default_backend = name | Choose a default backend . |
60,480 | def _get_backend ( name : str = None ) : if not backends : raise RuntimeError ( "No plotting backend available. Please, install matplotlib (preferred) or bokeh (limited)." ) if not name : name = _default_backend if name == "bokeh" : raise RuntimeError ( "Support for bokeh has been discontinued. At some point, we may return to support holoviews." ) backend = backends . get ( name ) if not backend : raise RuntimeError ( "Backend {0} does not exist. Use one of the following: {1}" . format ( name , ", " . join ( backends . keys ( ) ) ) ) return name , backends [ name ] | Get a plotting backend . |
60,481 | def plot ( histogram : HistogramBase , kind : Optional [ str ] = None , backend : Optional [ str ] = None , ** kwargs ) : backend_name , backend = _get_backend ( backend ) if kind is None : kinds = [ t for t in backend . types if histogram . ndim in backend . dims [ t ] ] if not kinds : raise RuntimeError ( "No plot type is supported for {0}" . format ( histogram . __class__ . __name__ ) ) kind = kinds [ 0 ] if kind in backend . types : method = getattr ( backend , kind ) return method ( histogram , ** kwargs ) else : raise RuntimeError ( "Histogram type error: {0} missing in backend {1}" . format ( kind , backend_name ) ) | Universal plotting function . |
60,482 | def enable_inline_view ( f ) : @ wraps ( f ) def wrapper ( hist , write_to = None , write_format = "auto" , display = "auto" , indent = 2 , ** kwargs ) : vega_data = f ( hist , ** kwargs ) if display is True and not VEGA_IPYTHON_PLUGIN_ENABLED : raise RuntimeError ( "Cannot display vega plot: {0}" . format ( VEGA_ERROR ) ) if display == "auto" : display = write_to is None if write_to : write_vega ( vega_data , hist . title , write_to , write_format , indent ) return display_vega ( vega_data , display ) return wrapper | Decorator to enable in - line viewing in Python and saving to external file . |
60,483 | def write_vega ( vega_data , * , title : Optional [ str ] , write_to : str , write_format : str = "auto" , indent : int = 2 ) : spec = json . dumps ( vega_data , indent = indent ) if write_format == "html" or write_format is "auto" and write_to . endswith ( ".html" ) : output = HTML_TEMPLATE . replace ( "{{ title }}" , title or "Histogram" ) . replace ( "{{ spec }}" , spec ) elif write_format == "json" or write_format is "auto" and write_to . endswith ( ".json" ) : output = spec else : raise RuntimeError ( "Format not understood." ) with codecs . open ( write_to , "w" , encoding = "utf-8" ) as out : out . write ( output ) | Write vega dictionary to an external file . |
60,484 | def display_vega ( vega_data : dict , display : bool = True ) -> Union [ 'Vega' , dict ] : if VEGA_IPYTHON_PLUGIN_ENABLED and display : from vega3 import Vega return Vega ( vega_data ) else : return vega_data | Optionally display vega dictionary . |
60,485 | def bar ( h1 : Histogram1D , ** kwargs ) -> dict : vega = _create_figure ( kwargs ) _add_title ( h1 , vega , kwargs ) _create_scales ( h1 , vega , kwargs ) _create_axes ( h1 , vega , kwargs ) data = get_data ( h1 , kwargs . pop ( "density" , None ) , kwargs . pop ( "cumulative" , None ) ) . tolist ( ) lefts = h1 . bin_left_edges . astype ( float ) . tolist ( ) rights = h1 . bin_right_edges . astype ( float ) . tolist ( ) vega [ "data" ] = [ { "name" : "table" , "values" : [ { "x" : lefts [ i ] , "x2" : rights [ i ] , "y" : data [ i ] , } for i in range ( h1 . bin_count ) ] } ] alpha = kwargs . pop ( "alpha" , 1 ) vega [ "marks" ] = [ { "type" : "rect" , "from" : { "data" : "table" } , "encode" : { "enter" : { "x" : { "scale" : "xscale" , "field" : "x" } , "x2" : { "scale" : "xscale" , "field" : "x2" } , "y" : { "scale" : "yscale" , "value" : 0 } , "y2" : { "scale" : "yscale" , "field" : "y" } , "strokeWidth" : { "value" : kwargs . pop ( "lw" , 2 ) } } , "update" : { "fillOpacity" : [ { "value" : alpha } ] } , } } ] _create_tooltips ( h1 , vega , kwargs ) return vega | Bar plot of 1D histogram . |
60,486 | def scatter ( h1 : Histogram1D , ** kwargs ) -> dict : shape = kwargs . pop ( "shape" , DEFAULT_SCATTER_SHAPE ) mark_template = [ { "type" : "symbol" , "from" : { "data" : "series" } , "encode" : { "enter" : { "x" : { "scale" : "xscale" , "field" : "x" } , "y" : { "scale" : "yscale" , "field" : "y" } , "shape" : { "value" : shape } , "fill" : { "scale" : "series" , "field" : "c" } , } , } } ] vega = _scatter_or_line ( h1 , mark_template = mark_template , kwargs = kwargs ) return vega | Scatter plot of 1D histogram values . |
60,487 | def line ( h1 : Histogram1D , ** kwargs ) -> dict : lw = kwargs . pop ( "lw" , DEFAULT_STROKE_WIDTH ) mark_template = [ { "type" : "line" , "encode" : { "enter" : { "x" : { "scale" : "xscale" , "field" : "x" } , "y" : { "scale" : "yscale" , "field" : "y" } , "stroke" : { "scale" : "series" , "field" : "c" } , "strokeWidth" : { "value" : lw } } } , "from" : { "data" : "series" } , } ] vega = _scatter_or_line ( h1 , mark_template = mark_template , kwargs = kwargs ) return vega | Line plot of 1D histogram values . |
60,488 | def _create_figure ( kwargs : Mapping [ str , Any ] ) -> dict : return { "$schema" : "https://vega.github.io/schema/vega/v3.json" , "width" : kwargs . pop ( "width" , DEFAULT_WIDTH ) , "height" : kwargs . pop ( "height" , DEFAULT_HEIGHT ) , "padding" : kwargs . pop ( "padding" , DEFAULT_PADDING ) } | Create basic dictionary object with figure properties . |
60,489 | def _create_scales ( hist : HistogramBase , vega : dict , kwargs : dict ) : if hist . ndim == 1 : bins0 = hist . bins . astype ( float ) else : bins0 = hist . bins [ 0 ] . astype ( float ) xlim = kwargs . pop ( "xlim" , "auto" ) ylim = kwargs . pop ( "ylim" , "auto" ) if xlim is "auto" : nice_x = True else : nice_x = False if ylim is "auto" : nice_y = True else : nice_y = False vega [ "scales" ] = [ { "name" : "xscale" , "type" : "linear" , "range" : "width" , "nice" : nice_x , "zero" : None , "domain" : [ bins0 [ 0 , 0 ] , bins0 [ - 1 , 1 ] ] if xlim == "auto" else [ float ( xlim [ 0 ] ) , float ( xlim [ 1 ] ) ] , } , { "name" : "yscale" , "type" : "linear" , "range" : "height" , "nice" : nice_y , "zero" : True if hist . ndim == 1 else None , "domain" : { "data" : "table" , "field" : "y" } if ylim == "auto" else [ float ( ylim [ 0 ] ) , float ( ylim [ 1 ] ) ] } ] if hist . ndim >= 2 : bins1 = hist . bins [ 1 ] . astype ( float ) vega [ "scales" ] [ 1 ] [ "domain" ] = [ bins1 [ 0 , 0 ] , bins1 [ - 1 , 1 ] ] | Find proper scales for axes . |
60,490 | def _create_axes ( hist : HistogramBase , vega : dict , kwargs : dict ) : xlabel = kwargs . pop ( "xlabel" , hist . axis_names [ 0 ] ) ylabel = kwargs . pop ( "ylabel" , hist . axis_names [ 1 ] if len ( hist . axis_names ) >= 2 else None ) vega [ "axes" ] = [ { "orient" : "bottom" , "scale" : "xscale" , "title" : xlabel } , { "orient" : "left" , "scale" : "yscale" , "title" : ylabel } ] | Create axes in the figure . |
60,491 | def _create_tooltips ( hist : Histogram1D , vega : dict , kwargs : dict ) : if kwargs . pop ( "tooltips" , False ) : vega [ "signals" ] = vega . get ( "signals" , [ ] ) vega [ "signals" ] . append ( { "name" : "tooltip" , "value" : { } , "on" : [ { "events" : "rect:mouseover" , "update" : "datum" } , { "events" : "rect:mouseout" , "update" : "{}" } ] } ) font_size = kwargs . get ( "fontsize" , DEFAULT_FONTSIZE ) vega [ "marks" ] = vega . get ( "marks" , [ ] ) vega [ "marks" ] . append ( { "type" : "text" , "encode" : { "enter" : { "align" : { "value" : "center" } , "baseline" : { "value" : "bottom" } , "fill" : { "value" : "#333" } , "fontSize" : { "value" : font_size } } , "update" : { "x" : { "scale" : "xscale" , "signal" : "(tooltip.x + tooltip.x2) / 2" , "band" : 0.5 } , "y" : { "scale" : "yscale" , "signal" : "tooltip.y" , "offset" : - 2 } , "text" : { "signal" : "tooltip.y" } , "fillOpacity" : [ { "test" : "datum === tooltip" , "value" : 0 } , { "value" : 1 } ] } } } ) | In one - dimensional plots show values above the value on hover . |
60,492 | def _add_title ( hist : HistogramBase , vega : dict , kwargs : dict ) : title = kwargs . pop ( "title" , hist . title ) if title : vega [ "title" ] = { "text" : title } | Display plot title if available . |
60,493 | def _prepare_data ( data , transformed , klass , * args , ** kwargs ) : data = np . asarray ( data ) if not transformed : data = klass . transform ( data ) dropna = kwargs . get ( "dropna" , False ) if dropna : data = data [ ~ np . isnan ( data ) . any ( axis = 1 ) ] return data | Transform data for binning . |
60,494 | def polar_histogram ( xdata , ydata , radial_bins = "numpy" , phi_bins = 16 , transformed = False , * args , ** kwargs ) : dropna = kwargs . pop ( "dropna" , True ) data = np . concatenate ( [ xdata [ : , np . newaxis ] , ydata [ : , np . newaxis ] ] , axis = 1 ) data = _prepare_data ( data , transformed = transformed , klass = PolarHistogram , dropna = dropna ) if isinstance ( phi_bins , int ) : phi_range = ( 0 , 2 * np . pi ) if "phi_range" in "kwargs" : phi_range = kwargs [ "phi_range" ] elif "range" in "kwargs" : phi_range = kwargs [ "range" ] [ 1 ] phi_range = list ( phi_range ) + [ phi_bins + 1 ] phi_bins = np . linspace ( * phi_range ) bin_schemas = binnings . calculate_bins_nd ( data , [ radial_bins , phi_bins ] , * args , check_nan = not dropna , ** kwargs ) weights = kwargs . pop ( "weights" , None ) frequencies , errors2 , missed = histogram_nd . calculate_frequencies ( data , ndim = 2 , binnings = bin_schemas , weights = weights ) return PolarHistogram ( binnings = bin_schemas , frequencies = frequencies , errors2 = errors2 , missed = missed ) | Facade construction function for the PolarHistogram . |
60,495 | def spherical_histogram ( data = None , radial_bins = "numpy" , theta_bins = 16 , phi_bins = 16 , transformed = False , * args , ** kwargs ) : dropna = kwargs . pop ( "dropna" , True ) data = _prepare_data ( data , transformed = transformed , klass = SphericalHistogram , dropna = dropna ) if isinstance ( theta_bins , int ) : theta_range = ( 0 , np . pi ) if "theta_range" in "kwargs" : theta_range = kwargs [ "theta_range" ] elif "range" in "kwargs" : theta_range = kwargs [ "range" ] [ 1 ] theta_range = list ( theta_range ) + [ theta_bins + 1 ] theta_bins = np . linspace ( * theta_range ) if isinstance ( phi_bins , int ) : phi_range = ( 0 , 2 * np . pi ) if "phi_range" in "kwargs" : phi_range = kwargs [ "phi_range" ] elif "range" in "kwargs" : phi_range = kwargs [ "range" ] [ 2 ] phi_range = list ( phi_range ) + [ phi_bins + 1 ] phi_bins = np . linspace ( * phi_range ) bin_schemas = binnings . calculate_bins_nd ( data , [ radial_bins , theta_bins , phi_bins ] , * args , check_nan = not dropna , ** kwargs ) weights = kwargs . pop ( "weights" , None ) frequencies , errors2 , missed = histogram_nd . calculate_frequencies ( data , ndim = 3 , binnings = bin_schemas , weights = weights ) return SphericalHistogram ( binnings = bin_schemas , frequencies = frequencies , errors2 = errors2 , missed = missed ) | Facade construction function for the SphericalHistogram . |
60,496 | def cylindrical_histogram ( data = None , rho_bins = "numpy" , phi_bins = 16 , z_bins = "numpy" , transformed = False , * args , ** kwargs ) : dropna = kwargs . pop ( "dropna" , True ) data = _prepare_data ( data , transformed = transformed , klass = CylindricalHistogram , dropna = dropna ) if isinstance ( phi_bins , int ) : phi_range = ( 0 , 2 * np . pi ) if "phi_range" in "kwargs" : phi_range = kwargs [ "phi_range" ] elif "range" in "kwargs" : phi_range = kwargs [ "range" ] [ 1 ] phi_range = list ( phi_range ) + [ phi_bins + 1 ] phi_bins = np . linspace ( * phi_range ) bin_schemas = binnings . calculate_bins_nd ( data , [ rho_bins , phi_bins , z_bins ] , * args , check_nan = not dropna , ** kwargs ) weights = kwargs . pop ( "weights" , None ) frequencies , errors2 , missed = histogram_nd . calculate_frequencies ( data , ndim = 3 , binnings = bin_schemas , weights = weights ) return CylindricalHistogram ( binnings = bin_schemas , frequencies = frequencies , errors2 = errors2 , missed = missed ) | Facade construction function for the CylindricalHistogram . |
60,497 | def projection ( self , * axes , ** kwargs ) : axes , _ = self . _get_projection_axes ( * axes ) axes = tuple ( sorted ( axes ) ) if axes in self . _projection_class_map : klass = self . _projection_class_map [ axes ] return HistogramND . projection ( self , * axes , type = klass , ** kwargs ) else : return HistogramND . projection ( self , * axes , ** kwargs ) | Projection to lower - dimensional histogram . The inheriting class should implement the _projection_class_map class attribute to suggest class for the projection . If the arguments don t match any of the map keys HistogramND is used . |
60,498 | def enable_collection ( f ) : @ wraps ( f ) def new_f ( h : AbstractHistogram1D , ** kwargs ) : from physt . histogram_collection import HistogramCollection if isinstance ( h , HistogramCollection ) : return f ( h , ** kwargs ) else : return f ( HistogramCollection ( h ) , ** kwargs ) return new_f | Call the wrapped function with a HistogramCollection as argument . |
60,499 | def bar ( h : Histogram2D , * , barmode : str = DEFAULT_BARMODE , alpha : float = DEFAULT_ALPHA , ** kwargs ) : get_data_kwargs = pop_many ( kwargs , "density" , "cumulative" , "flatten" ) data = [ go . Bar ( x = histogram . bin_centers , y = get_data ( histogram , ** get_data_kwargs ) , width = histogram . bin_widths , name = histogram . name , opacity = alpha , ** kwargs ) for histogram in h ] layout = go . Layout ( barmode = barmode ) _add_ticks ( layout . xaxis , h [ 0 ] , kwargs ) figure = go . Figure ( data = data , layout = layout ) return figure | Bar plot . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.