idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
58,000
def iter_ROOT_classes ( ) : class_index = "http://root.cern.ch/root/html/ClassIndex.html" for s in minidom . parse ( urlopen ( class_index ) ) . getElementsByTagName ( "span" ) : if ( "class" , "typename" ) in s . attributes . items ( ) : class_name = s . childNodes [ 0 ] . nodeValue try : yield getattr ( QROOT , class_name ) except AttributeError : pass
Iterator over all available ROOT classes
58,001
def CMS_label ( text = "Preliminary 2012" , sqrts = 8 , pad = None ) : if pad is None : pad = ROOT . gPad with preserve_current_canvas ( ) : pad . cd ( ) left_margin = pad . GetLeftMargin ( ) top_margin = pad . GetTopMargin ( ) ypos = 1 - top_margin / 2. l = ROOT . TLatex ( left_margin , ypos , "CMS " + text ) l . SetTextAlign ( 12 ) l . SetNDC ( ) l . SetTextSize ( 0.90 * top_margin ) l . Draw ( ) keepalive ( pad , l ) if sqrts : right_margin = pad . GetRightMargin ( ) p = ROOT . TLatex ( 1 - right_margin , ypos , "#sqrt{{s}}={0:d}TeV" . format ( sqrts ) ) p . SetTextAlign ( 32 ) p . SetNDC ( ) p . SetTextSize ( 0.90 * top_margin ) p . Draw ( ) keepalive ( pad , p ) else : p = None pad . Modified ( ) pad . Update ( ) return l , p
Add a CMS Preliminary style label to the current Pad .
58,002
def make_channel ( name , samples , data = None , verbose = False ) : if verbose : llog = log [ 'make_channel' ] llog . info ( "creating channel {0}" . format ( name ) ) chan = Channel ( 'channel_{0}' . format ( name ) ) chan . SetStatErrorConfig ( 0.05 , "Poisson" ) if data is not None : if verbose : llog . info ( "setting data" ) chan . SetData ( data ) for sample in samples : if verbose : llog . info ( "adding sample {0}" . format ( sample . GetName ( ) ) ) chan . AddSample ( sample ) return chan
Create a Channel from a list of Samples
58,003
def make_measurement ( name , channels , lumi = 1.0 , lumi_rel_error = 0.1 , output_prefix = './histfactory' , POI = None , const_params = None , verbose = False ) : if verbose : llog = log [ 'make_measurement' ] llog . info ( "creating measurement {0}" . format ( name ) ) if not isinstance ( channels , ( list , tuple ) ) : channels = [ channels ] meas = Measurement ( 'measurement_{0}' . format ( name ) , '' ) meas . SetOutputFilePrefix ( output_prefix ) if POI is not None : if isinstance ( POI , string_types ) : if verbose : llog . info ( "setting POI {0}" . format ( POI ) ) meas . SetPOI ( POI ) else : if verbose : llog . info ( "adding POIs {0}" . format ( ', ' . join ( POI ) ) ) for p in POI : meas . AddPOI ( p ) if verbose : llog . info ( "setting lumi={0:f} +/- {1:f}" . format ( lumi , lumi_rel_error ) ) meas . lumi = lumi meas . lumi_rel_error = lumi_rel_error for channel in channels : if verbose : llog . info ( "adding channel {0}" . format ( channel . GetName ( ) ) ) meas . AddChannel ( channel ) if const_params is not None : if verbose : llog . info ( "adding constant parameters {0}" . format ( ', ' . join ( const_params ) ) ) for param in const_params : meas . AddConstantParam ( param ) return meas
Create a Measurement from a list of Channels
58,004
def make_workspace ( measurement , channel = None , name = None , silence = False ) : context = silence_sout_serr if silence else do_nothing with context ( ) : hist2workspace = ROOT . RooStats . HistFactory . HistoToWorkspaceFactoryFast ( measurement ) if channel is not None : workspace = hist2workspace . MakeSingleChannelModel ( measurement , channel ) else : workspace = hist2workspace . MakeCombinedModel ( measurement ) workspace = asrootpy ( workspace ) keepalive ( workspace , measurement ) if name is not None : workspace . SetName ( 'workspace_{0}' . format ( name ) ) return workspace
Create a workspace containing the model for a measurement
58,005
def measurements_from_xml ( filename , collect_histograms = True , cd_parent = False , silence = False ) : if not os . path . isfile ( filename ) : raise OSError ( "the file {0} does not exist" . format ( filename ) ) silence_context = silence_sout_serr if silence else do_nothing filename = os . path . abspath ( os . path . normpath ( filename ) ) if cd_parent : xml_directory = os . path . dirname ( filename ) parent = os . path . abspath ( os . path . join ( xml_directory , os . pardir ) ) cd_context = working_directory else : parent = None cd_context = do_nothing log . info ( "parsing XML in {0} ..." . format ( filename ) ) with cd_context ( parent ) : parser = ROOT . RooStats . HistFactory . ConfigParser ( ) with silence_context ( ) : measurements_vect = parser . GetMeasurementsFromXML ( filename ) ROOT . SetOwnership ( measurements_vect , False ) measurements = [ ] for m in measurements_vect : if collect_histograms : with silence_context ( ) : m . CollectHistograms ( ) measurements . append ( asrootpy ( m ) ) return measurements
Read in a list of Measurements from XML
58,006
def write_measurement ( measurement , root_file = None , xml_path = None , output_path = None , output_suffix = None , write_workspaces = False , apply_xml_patches = True , silence = False ) : context = silence_sout_serr if silence else do_nothing output_name = measurement . name if output_suffix is not None : output_name += '_{0}' . format ( output_suffix ) output_name = output_name . replace ( ' ' , '_' ) if xml_path is None : xml_path = 'xml_{0}' . format ( output_name ) if output_path is not None : xml_path = os . path . join ( output_path , xml_path ) if not os . path . exists ( xml_path ) : mkdir_p ( xml_path ) if root_file is None : root_file = 'ws_{0}.root' . format ( output_name ) if output_path is not None : root_file = os . path . join ( output_path , root_file ) own_file = False if isinstance ( root_file , string_types ) : root_file = root_open ( root_file , 'recreate' ) own_file = True with preserve_current_directory ( ) : root_file . cd ( ) log . info ( "writing histograms and measurement in {0} ..." . format ( root_file . GetName ( ) ) ) with context ( ) : measurement . writeToFile ( root_file ) out_m = root_file . Get ( measurement . name ) log . info ( "writing XML in {0} ..." . format ( xml_path ) ) with context ( ) : out_m . PrintXML ( xml_path ) if write_workspaces : log . info ( "writing combined model in {0} ..." . format ( root_file . GetName ( ) ) ) workspace = make_workspace ( measurement , silence = silence ) workspace . Write ( ) for channel in measurement . channels : log . info ( "writing model for channel `{0}` in {1} ..." . format ( channel . name , root_file . GetName ( ) ) ) workspace = make_workspace ( measurement , channel = channel , silence = silence ) workspace . Write ( ) if apply_xml_patches : patch_xml ( glob ( os . path . join ( xml_path , '*.xml' ) ) , root_file = os . path . basename ( root_file . GetName ( ) ) ) if own_file : root_file . Close ( )
Write a measurement and RooWorkspaces for all contained channels into a ROOT file and write the XML files into a directory .
58,007
def patch_xml ( files , root_file = None , float_precision = 3 ) : if float_precision < 0 : raise ValueError ( "precision must be greater than 0" ) def fix_path ( match ) : path = match . group ( 1 ) if path : head , tail = os . path . split ( path ) new_path = os . path . join ( os . path . basename ( head ) , tail ) else : new_path = '' return '<Input>{0}</Input>' . format ( new_path ) for xmlfilename in files : xmlfilename = os . path . abspath ( os . path . normpath ( xmlfilename ) ) patched_xmlfilename = '{0}.tmp' . format ( xmlfilename ) log . info ( "patching {0} ..." . format ( xmlfilename ) ) fin = open ( xmlfilename , 'r' ) fout = open ( patched_xmlfilename , 'w' ) for line in fin : if root_file is not None : line = re . sub ( 'InputFile="[^"]*"' , 'InputFile="{0}"' . format ( root_file ) , line ) line = line . replace ( '<StatError Activate="True" InputFile="" ' 'HistoName="" HistoPath="" />' , '<StatError Activate="True" />' ) line = re . sub ( '<Combination OutputFilePrefix="(\S*)" >' , '<Combination OutputFilePrefix="hist2workspace" >' , line ) line = re . sub ( '\w+=""' , '' , line ) line = re . sub ( '\s+/>' , ' />' , line ) line = re . sub ( '(\S)\s+</' , r'\1</' , line ) line = re . sub ( 'InputFileHigh="\S+"' , '' , line ) line = re . sub ( 'InputFileLow="\S+"' , '' , line ) line = line . replace ( '<ParamSetting Const="True"></ParamSetting>' , '' ) line = re . sub ( r'"(\d*\.\d{{{0:d},}})"' . format ( float_precision + 1 ) , lambda x : '"{0}"' . format ( str ( round ( float ( x . group ( 1 ) ) , float_precision ) ) ) , line ) line = re . sub ( '"\s\s+(\S)' , r'" \1' , line ) line = re . sub ( '<Input>(.*)</Input>' , fix_path , line ) fout . write ( line ) fin . close ( ) fout . close ( ) shutil . move ( patched_xmlfilename , xmlfilename ) if not os . path . isfile ( os . path . join ( os . path . dirname ( xmlfilename ) , 'HistFactorySchema.dtd' ) ) : rootsys = os . getenv ( 'ROOTSYS' , None ) if rootsys is not None : dtdfile = os . path . join ( rootsys , 'etc/HistFactorySchema.dtd' ) target = os . path . dirname ( xmlfilename ) if os . path . isfile ( dtdfile ) : log . info ( "copying {0} to {1} ..." . format ( dtdfile , target ) ) shutil . copy ( dtdfile , target ) else : log . warning ( "{0} does not exist" . format ( dtdfile ) ) else : log . warning ( "$ROOTSYS is not set so cannot find HistFactorySchema.dtd" )
Apply patches to HistFactory XML output from PrintXML
58,008
def path ( self ) : if isinstance ( self . dir , Directory ) : return self . dir . _path elif isinstance ( self . dir , ROOT . TDirectory ) : return self . dir . GetPath ( ) elif isinstance ( self . dir , _FolderView ) : return self . dir . path ( ) else : return str ( self . dir )
Get the path of the wrapped folder
58,009
def Get ( self , path ) : return self . merge_views ( x . Get ( path ) for x in self . dirs )
Merge the objects at path in all subdirectories
58,010
def python_logging_error_handler ( level , root_says_abort , location , msg ) : from . . utils import quickroot as QROOT if not Initialized . value : try : QROOT . kTRUE except AttributeError : return QROOT . kInfo , QROOT . kWarning , QROOT . kError , QROOT . kFatal , QROOT . kSysError QROOT . gErrorIgnoreLevel Initialized . value = True try : QROOT . kTRUE except RuntimeError : _ , exc , traceback = sys . exc_info ( ) caller = sys . _getframe ( 2 ) re_execute_with_exception ( caller , exc , traceback ) if level < QROOT . gErrorIgnoreLevel : return if sys . version_info [ 0 ] >= 3 : location = location . decode ( 'utf-8' ) msg = msg . decode ( 'utf-8' ) log = ROOT_log . getChild ( location . replace ( "::" , "." ) ) if level >= QROOT . kSysError or level >= QROOT . kFatal : lvl = logging . CRITICAL elif level >= QROOT . kError : lvl = logging . ERROR elif level >= QROOT . kWarning : lvl = logging . WARNING elif level >= QROOT . kInfo : lvl = logging . INFO else : lvl = logging . DEBUG if not SANE_REGEX . match ( msg ) : msg = repr ( msg ) [ 1 : - 1 ] lvl , msg = fixup_msg ( lvl , msg ) log . log ( lvl , msg ) abort = lvl >= ABORT_LEVEL or "rootpy.ALWAYSABORT" in msg or root_says_abort if abort and not "rootpy.NEVERABORT" in msg : caller = sys . _getframe ( 1 ) try : from . . import ROOTError raise ROOTError ( level , location , msg ) except RuntimeError : _ , exc , traceback = sys . exc_info ( ) if SHOWTRACE . enabled : from traceback import print_stack print_stack ( caller ) if DANGER . enabled : re_execute_with_exception ( caller , exc , traceback ) if root_says_abort : log . critical ( "abort().. expect a stack trace" ) ctypes . CDLL ( None ) . abort ( )
A python error handler for ROOT which maps ROOT s errors and warnings on to python s .
58,011
def preserve_current_canvas ( ) : old = ROOT . gPad try : yield finally : if old : old . cd ( ) elif ROOT . gPad : with invisible_canvas ( ) : pass
Context manager which ensures that the current canvas remains the current canvas when the context is left .
58,012
def preserve_batch_state ( ) : with LOCK : old = ROOT . gROOT . IsBatch ( ) try : yield finally : ROOT . gROOT . SetBatch ( old )
Context manager which ensures the batch state is the same on exit as it was on entry .
58,013
def invisible_canvas ( ) : with preserve_current_canvas ( ) : with preserve_batch_state ( ) : ROOT . gROOT . SetBatch ( ) c = ROOT . TCanvas ( ) try : c . cd ( ) yield c finally : c . Close ( ) c . IsA ( ) . Destructor ( c )
Context manager yielding a temporary canvas drawn in batch mode invisible to the user . Original state is restored on exit .
58,014
def thread_specific_tmprootdir ( ) : with preserve_current_directory ( ) : dname = "rootpy-tmp/thread/{0}" . format ( threading . current_thread ( ) . ident ) d = ROOT . gROOT . mkdir ( dname ) if not d : d = ROOT . gROOT . GetDirectory ( dname ) assert d , "Unexpected failure, can't cd to tmpdir." d . cd ( ) yield d
Context manager which makes a thread specific gDirectory to avoid interfering with the current file .
58,015
def working_directory ( path ) : prev_cwd = os . getcwd ( ) os . chdir ( path ) try : yield finally : os . chdir ( prev_cwd )
A context manager that changes the working directory to the given path and then changes it back to its previous value on exit .
58,016
def autobinning ( data , method = "freedman_diaconis" ) : name = method . replace ( "-" , "_" ) try : method = getattr ( BinningMethods , name ) if not isinstance ( method , types . FunctionType ) : raise AttributeError except AttributeError : raise ValueError ( "`{0}` is not a valid binning method" . format ( name ) ) if len ( data ) < 4 : return 1 , np . min ( data ) , np . max ( data ) return int ( np . ceil ( method ( data ) ) ) , np . min ( data ) , np . max ( data )
This method determines the optimal binning for histogramming .
58,017
def all_methods ( cls ) : def name ( fn ) : return fn . __get__ ( cls ) . __name__ . replace ( "_" , "-" ) return sorted ( name ( f ) for f in cls . __dict__ . values ( ) if isinstance ( f , staticmethod ) )
Return the names of all available binning methods
58,018
def doane ( data ) : from scipy . stats import skew n = len ( data ) sigma = np . sqrt ( 6. * ( n - 2. ) / ( n + 1. ) / ( n + 3. ) ) return 1 + np . log2 ( n ) + np . log2 ( 1 + np . abs ( skew ( data ) ) / sigma )
Modified Doane modified
58,019
def lock ( path , poll_interval = 5 , max_age = 60 ) : if max_age < 30 : raise ValueError ( "`max_age` must be at least 30 seconds" ) if poll_interval < 1 : raise ValueError ( "`poll_interval` must be at least 1 second" ) if poll_interval >= max_age : raise ValueError ( "`poll_interval` must be less than `max_age`" ) proc = '{0:d}@{1}' . format ( os . getpid ( ) , platform . node ( ) ) lock = LockFile ( path ) log . debug ( "{0} attempting to lock {1}" . format ( proc , path ) ) while not lock . i_am_locking ( ) : if lock . is_locked ( ) : try : age = time . time ( ) - os . stat ( lock . lock_file ) [ stat . ST_MTIME ] if age > max_age : lock . break_lock ( ) log . warning ( "{0} broke lock on {1} " "that is {2:d} seconds old" . format ( proc , path , int ( age ) ) ) except OSError : pass time . sleep ( 0.5 ) try : log . debug ( "{0} waiting for {1:d} seconds " "for lock on {2} to be released" . format ( proc , poll_interval , path ) ) lock . acquire ( timeout = float ( poll_interval ) ) except LockTimeout : pass log . debug ( "{0} locked {1}" . format ( proc , path ) ) yield lock lock . release ( ) log . debug ( "{0} released lock on {1}" . format ( proc , path ) )
Aquire a file lock in a thread - safe manner that also reaps stale locks possibly left behind by processes that crashed hard .
58,020
def proxy_global ( name , no_expand_macro = False , fname = 'func' , args = ( ) ) : if no_expand_macro : @ property def gSomething_no_func ( self ) : glob = self ( getattr ( ROOT , name ) ) def func ( ) : return glob glob . func = func return glob return gSomething_no_func @ property def gSomething ( self ) : obj_func = getattr ( getattr ( ROOT , name ) , fname ) try : obj = obj_func ( * args ) except ReferenceError : return None return self ( obj ) return gSomething
Used to automatically asrootpy ROOT s thread local variables
58,021
def AddEntry ( self , thing , label = None , style = None ) : if isinstance ( thing , HistStack ) : things = thing else : things = [ thing ] for thing in things : if getattr ( thing , 'inlegend' , True ) : thing_label = thing . GetTitle ( ) if label is None else label thing_style = getattr ( thing , 'legendstyle' , 'P' ) if style is None else style super ( Legend , self ) . AddEntry ( thing , thing_label , thing_style ) keepalive ( self , thing )
Add an entry to the legend .
58,022
def get_seh ( ) : if ON_RTD : return lambda x : x ErrorHandlerFunc_t = ctypes . CFUNCTYPE ( None , ctypes . c_int , ctypes . c_bool , ctypes . c_char_p , ctypes . c_char_p ) import ROOT dll = get_dll ( "libCore" ) SetErrorHandler = None try : if dll : SetErrorHandler = dll . _Z15SetErrorHandlerPFvibPKcS0_E except AttributeError : pass if not SetErrorHandler : log . warning ( "Couldn't find SetErrorHandler. " "Please submit a rootpy bug report." ) return lambda x : None SetErrorHandler . restype = ErrorHandlerFunc_t SetErrorHandler . argtypes = ErrorHandlerFunc_t , def _SetErrorHandler ( fn ) : log . debug ( "called SetErrorHandler()" ) eh = ErrorHandlerFunc_t ( fn ) _keep_alive . append ( eh ) return SetErrorHandler ( eh ) return _SetErrorHandler
Makes a function which can be used to set the ROOT error handler with a python function and returns the existing error handler .
58,023
def get_f_code_idx ( ) : frame = sys . _getframe ( ) frame_ptr = id ( frame ) LARGE_ENOUGH = 20 ptrs = [ ctypes . c_voidp . from_address ( frame_ptr + i * svp ) for i in range ( LARGE_ENOUGH ) ] ptrs = [ p . value for p in ptrs ] fcode_ptr = id ( frame . f_code ) try : threadstate_idx = ptrs . index ( fcode_ptr ) except ValueError : log . critical ( "rootpy bug! Please report this." ) raise return threadstate_idx
How many pointers into PyFrame is the f_code variable?
58,024
def get_frame_pointers ( frame = None ) : if frame is None : frame = sys . _getframe ( 2 ) frame = id ( frame ) F_TRACE_OFFSET = 6 Ppy_object = ctypes . POINTER ( ctypes . py_object ) trace = Ppy_object . from_address ( frame + ( F_CODE_IDX + F_TRACE_OFFSET ) * svp ) LASTI_OFFSET = F_TRACE_OFFSET + 4 lasti_addr = LASTI_OFFSET lineno_addr = LASTI_OFFSET + ctypes . sizeof ( ctypes . c_int ) f_lineno = ctypes . c_int . from_address ( lineno_addr ) f_lasti = ctypes . c_int . from_address ( lasti_addr ) return trace , f_lineno , f_lasti
Obtain writable pointers to frame . f_trace and frame . f_lineno .
58,025
def set_linetrace_on_frame ( f , localtrace = None ) : traceptr , _ , _ = get_frame_pointers ( f ) if localtrace is not None : ctypes . pythonapi . Py_IncRef ( localtrace ) addr = id ( localtrace ) else : addr = 0 traceptr . contents = ctypes . py_object . from_address ( addr )
Non - portable function to modify linetracing .
58,026
def re_execute_with_exception ( frame , exception , traceback ) : if sys . gettrace ( ) == globaltrace : return call_lineno = frame . f_lineno def intercept_next_line ( f , why , * args ) : if f is not frame : return set_linetrace_on_frame ( f ) back_like_nothing_happened ( ) if sys . version_info [ 0 ] < 3 : raise exception raise exception . with_traceback ( traceback ) set_linetrace_on_frame ( frame , intercept_next_line ) linestarts = list ( dis . findlinestarts ( frame . f_code ) ) linestarts = [ a for a , l in linestarts if l >= call_lineno ] dest = linestarts [ 0 ] oc = frame . f_code . co_code [ frame . f_lasti ] if sys . version_info [ 0 ] < 3 : oc = ord ( oc ) opcode_size = 2 if oc >= opcode . HAVE_ARGUMENT else 0 where = frame . f_lasti + 1 + opcode_size pc = PyCodeObject . from_address ( id ( frame . f_code ) ) back_like_nothing_happened = pc . co_code . contents . inject_jump ( where , dest ) sys . settrace ( globaltrace )
Dark magic . Causes frame to raise an exception at the current location with traceback appended to it .
58,027
def _inject_jump ( self , where , dest ) : if sys . version_info [ 0 ] < 3 : old_check_interval = sys . getcheckinterval ( ) sys . setcheckinterval ( 2 ** 20 ) else : old_check_interval = sys . getswitchinterval ( ) sys . setswitchinterval ( 1000 ) pb = ctypes . pointer ( self . ob_sval ) orig_bytes = [ pb [ where + i ] [ 0 ] for i in range ( 3 ) ] v = struct . pack ( "<BH" , opcode . opmap [ "JUMP_ABSOLUTE" ] , dest ) if sys . version_info [ 0 ] < 3 : for i in range ( 3 ) : pb [ where + i ] [ 0 ] = ord ( v [ i ] ) else : for i in range ( 3 ) : pb [ where + i ] [ 0 ] = v [ i ] def tidy_up ( ) : if sys . version_info [ 0 ] < 3 : sys . setcheckinterval ( old_check_interval ) else : sys . setswitchinterval ( old_check_interval ) for i in range ( 3 ) : pb [ where + i ] [ 0 ] = orig_bytes [ i ] return tidy_up
Monkeypatch bytecode at where to force it to jump to dest .
58,028
def Draw ( self , * args , ** kwargs ) : self . reset ( ) output = None while self . _rollover ( ) : if output is None : output = self . _tree . Draw ( * args , ** kwargs ) if output is not None : output = output . Clone ( ) if hasattr ( output , 'SetDirectory' ) : output . SetDirectory ( 0 ) else : newoutput = self . _tree . Draw ( * args , ** kwargs ) if newoutput is not None : if isinstance ( output , _GraphBase ) : output . Append ( newoutput ) else : output += newoutput return output
Loop over subfiles draw each and sum the output into a single histogram .
58,029
def interact_plain ( header = UP_LINE , local_ns = None , module = None , dummy = None , stack_depth = 1 , global_ns = None ) : frame = sys . _getframe ( stack_depth ) variables = { } if local_ns is not None : variables . update ( local_ns ) else : variables . update ( frame . f_locals ) if global_ns is not None : variables . update ( local_ns ) else : variables . update ( frame . f_globals ) shell = code . InteractiveConsole ( variables ) return shell . interact ( banner = header )
Create an interactive python console
58,030
def hist ( hists , stacked = True , reverse = False , xpadding = 0 , ypadding = .1 , yerror_in_padding = True , logy = None , snap = True , axes = None , ** kwargs ) : if axes is None : axes = plt . gca ( ) if logy is None : logy = axes . get_yscale ( ) == 'log' curr_xlim = axes . get_xlim ( ) curr_ylim = axes . get_ylim ( ) was_empty = not axes . has_data ( ) returns = [ ] if isinstance ( hists , _Hist ) : returns = _hist ( hists , axes = axes , logy = logy , ** kwargs ) _set_bounds ( hists , axes = axes , was_empty = was_empty , prev_xlim = curr_xlim , prev_ylim = curr_ylim , xpadding = xpadding , ypadding = ypadding , yerror_in_padding = yerror_in_padding , snap = snap , logy = logy ) elif stacked : if not reverse : hists = list ( hists ) [ : : - 1 ] for i , h in enumerate ( hists ) : kwargs_local = kwargs . copy ( ) if i == len ( hists ) - 1 : low = h . Clone ( ) low . Reset ( ) else : low = sum ( hists [ i + 1 : ] ) high = h + low high . alpha = getattr ( h , 'alpha' , None ) proxy = _hist ( high , bottom = low , axes = axes , logy = logy , ** kwargs ) returns . append ( proxy ) if not reverse : returns = returns [ : : - 1 ] _set_bounds ( sum ( hists ) , axes = axes , was_empty = was_empty , prev_xlim = curr_xlim , prev_ylim = curr_ylim , xpadding = xpadding , ypadding = ypadding , yerror_in_padding = yerror_in_padding , snap = snap , logy = logy ) else : for h in _maybe_reversed ( hists , reverse ) : returns . append ( _hist ( h , axes = axes , logy = logy , ** kwargs ) ) if reverse : returns = returns [ : : - 1 ] _set_bounds ( hists [ max ( range ( len ( hists ) ) , key = lambda idx : hists [ idx ] . max ( ) ) ] , axes = axes , was_empty = was_empty , prev_xlim = curr_xlim , prev_ylim = curr_ylim , xpadding = xpadding , ypadding = ypadding , yerror_in_padding = yerror_in_padding , snap = snap , logy = logy ) return returns
Make a matplotlib hist plot from a ROOT histogram stack or list of histograms .
58,031
def errorbar ( hists , xerr = True , yerr = True , xpadding = 0 , ypadding = .1 , xerror_in_padding = True , yerror_in_padding = True , emptybins = True , snap = True , axes = None , ** kwargs ) : if axes is None : axes = plt . gca ( ) curr_xlim = axes . get_xlim ( ) curr_ylim = axes . get_ylim ( ) was_empty = not axes . has_data ( ) if isinstance ( hists , ( _Hist , _Graph1DBase ) ) : returns = _errorbar ( hists , xerr , yerr , axes = axes , emptybins = emptybins , ** kwargs ) _set_bounds ( hists , axes = axes , was_empty = was_empty , prev_ylim = curr_ylim , xpadding = xpadding , ypadding = ypadding , xerror_in_padding = xerror_in_padding , yerror_in_padding = yerror_in_padding , snap = snap ) else : returns = [ ] for h in hists : returns . append ( errorbar ( h , xerr = xerr , yerr = yerr , axes = axes , xpadding = xpadding , ypadding = ypadding , xerror_in_padding = xerror_in_padding , yerror_in_padding = yerror_in_padding , snap = snap , emptybins = emptybins , ** kwargs ) ) return returns
Make a matplotlib errorbar plot from a ROOT histogram or graph or list of histograms and graphs .
58,032
def step ( h , logy = None , axes = None , ** kwargs ) : if axes is None : axes = plt . gca ( ) if logy is None : logy = axes . get_yscale ( ) == 'log' _set_defaults ( h , kwargs , [ 'common' , 'line' ] ) if kwargs . get ( 'color' ) is None : kwargs [ 'color' ] = h . GetLineColor ( 'mpl' ) y = np . array ( list ( h . y ( ) ) + [ 0. ] ) if logy : np . clip ( y , 1E-300 , 1E300 , out = y ) return axes . step ( list ( h . xedges ( ) ) , y , where = 'post' , ** kwargs )
Make a matplotlib step plot from a ROOT histogram .
58,033
def fill_between ( a , b , logy = None , axes = None , ** kwargs ) : if axes is None : axes = plt . gca ( ) if logy is None : logy = axes . get_yscale ( ) == 'log' if not isinstance ( a , _Hist ) or not isinstance ( b , _Hist ) : raise TypeError ( "fill_between only operates on 1D histograms" ) a . check_compatibility ( b , check_edges = True ) x = [ ] top = [ ] bottom = [ ] for abin , bbin in zip ( a . bins ( overflow = False ) , b . bins ( overflow = False ) ) : up = max ( abin . value , bbin . value ) dn = min ( abin . value , bbin . value ) x . extend ( [ abin . x . low , abin . x . high ] ) top . extend ( [ up , up ] ) bottom . extend ( [ dn , dn ] ) x = np . array ( x ) top = np . array ( top ) bottom = np . array ( bottom ) if logy : np . clip ( top , 1E-300 , 1E300 , out = top ) np . clip ( bottom , 1E-300 , 1E300 , out = bottom ) return axes . fill_between ( x , top , bottom , ** kwargs )
Fill the region between two histograms or graphs .
58,034
def hist2d ( h , axes = None , colorbar = False , ** kwargs ) : if axes is None : axes = plt . gca ( ) X , Y = np . meshgrid ( list ( h . x ( ) ) , list ( h . y ( ) ) ) x = X . ravel ( ) y = Y . ravel ( ) z = np . array ( h . z ( ) ) . T return_values = axes . hist2d ( x , y , weights = z . ravel ( ) , bins = ( list ( h . xedges ( ) ) , list ( h . yedges ( ) ) ) , ** kwargs ) if colorbar : mappable = return_values [ - 1 ] plt . colorbar ( mappable , ax = axes ) return return_values
Draw a 2D matplotlib histogram plot from a 2D ROOT histogram .
58,035
def imshow ( h , axes = None , colorbar = False , ** kwargs ) : kwargs . setdefault ( 'aspect' , 'auto' ) if axes is None : axes = plt . gca ( ) z = np . array ( h . z ( ) ) . T axis_image = axes . imshow ( z , extent = [ h . xedges ( 1 ) , h . xedges ( h . nbins ( 0 ) + 1 ) , h . yedges ( 1 ) , h . yedges ( h . nbins ( 1 ) + 1 ) ] , interpolation = 'nearest' , origin = 'lower' , ** kwargs ) if colorbar : plt . colorbar ( axis_image , ax = axes ) return axis_image
Draw a matplotlib imshow plot from a 2D ROOT histogram .
58,036
def contour ( h , axes = None , zoom = None , label_contour = False , ** kwargs ) : if axes is None : axes = plt . gca ( ) x = np . array ( list ( h . x ( ) ) ) y = np . array ( list ( h . y ( ) ) ) z = np . array ( h . z ( ) ) . T if zoom is not None : from scipy import ndimage if hasattr ( zoom , '__iter__' ) : zoom = list ( zoom ) x = ndimage . zoom ( x , zoom [ 0 ] ) y = ndimage . zoom ( y , zoom [ 1 ] ) else : x = ndimage . zoom ( x , zoom ) y = ndimage . zoom ( y , zoom ) z = ndimage . zoom ( z , zoom ) return_values = axes . contour ( x , y , z , ** kwargs ) if label_contour : plt . clabel ( return_values ) return return_values
Draw a matplotlib contour plot from a 2D ROOT histogram .
58,037
def _post_init ( self ) : if not hasattr ( self , '_buffer' ) : self . _buffer = TreeBuffer ( ) self . read_branches_on_demand = False self . _branch_cache = { } self . _current_entry = 0 self . _always_read = [ ] self . userdata = UserData ( ) self . _inited = True
The standard rootpy _post_init method that is used to initialize both new Trees and Trees retrieved from a File .
58,038
def always_read ( self , branches ) : if type ( branches ) not in ( list , tuple ) : raise TypeError ( "branches must be a list or tuple" ) self . _always_read = branches
Always read these branches even when in caching mode . Maybe you have caching enabled and there are branches you want to be updated for each entry even though you never access them directly . This is useful if you are iterating over an input tree and writing to an output tree sharing the same TreeBuffer and you want a direct copy of certain branches . If you have caching enabled but these branches are not specified here and never accessed then they will never be read from disk so the values of branches in memory will remain unchanged .
58,039
def branch_type ( cls , branch ) : typename = branch . GetClassName ( ) if not typename : leaf = branch . GetListOfLeaves ( ) [ 0 ] typename = leaf . GetTypeName ( ) leaf_count = leaf . GetLeafCount ( ) if leaf_count : length = leaf_count . GetMaximum ( ) else : length = leaf . GetLen ( ) if length > 1 : typename = '{0}[{1:d}]' . format ( typename , length ) return typename
Return the string representation for the type of a branch
58,040
def create_buffer ( self , ignore_unsupported = False ) : bufferdict = OrderedDict ( ) for branch in self . iterbranches ( ) : if not self . GetBranchStatus ( branch . GetName ( ) ) : continue if not BaseTree . branch_is_supported ( branch ) : log . warning ( "ignore unsupported branch `{0}`" . format ( branch . GetName ( ) ) ) continue bufferdict [ branch . GetName ( ) ] = Tree . branch_type ( branch ) self . set_buffer ( TreeBuffer ( bufferdict , ignore_unsupported = ignore_unsupported ) )
Create this tree s TreeBuffer
58,041
def create_branches ( self , branches ) : if not isinstance ( branches , TreeBuffer ) : branches = TreeBuffer ( branches ) self . set_buffer ( branches , create_branches = True )
Create branches from a TreeBuffer or dict mapping names to type names
58,042
def update_buffer ( self , treebuffer , transfer_objects = False ) : self . _buffer . update ( treebuffer ) if transfer_objects : self . _buffer . set_objects ( treebuffer )
Merge items from a TreeBuffer into this Tree s TreeBuffer
58,043
def set_buffer ( self , treebuffer , branches = None , ignore_branches = None , create_branches = False , visible = True , ignore_missing = False , ignore_duplicates = False , transfer_objects = False ) : if branches is None : branches = treebuffer . keys ( ) if ignore_branches is not None : branches = [ b for b in branches if b not in ignore_branches ] if create_branches : for name in branches : value = treebuffer [ name ] if self . has_branch ( name ) : if ignore_duplicates : log . warning ( "Skipping entry in buffer with the same name " "as an existing branch: `{0}`" . format ( name ) ) continue raise ValueError ( "Attempting to create two branches " "with the same name: `{0}`" . format ( name ) ) if isinstance ( value , Scalar ) : self . Branch ( name , value , '{0}/{1}' . format ( name , value . type ) ) elif isinstance ( value , Array ) : length = value . length_name or len ( value ) self . Branch ( name , value , '{0}[{2}]/{1}' . format ( name , value . type , length ) ) else : self . Branch ( name , value ) else : for name in branches : value = treebuffer [ name ] if self . has_branch ( name ) : self . SetBranchAddress ( name , value ) elif not ignore_missing : raise ValueError ( "Attempting to set address for " "branch `{0}` which does not exist" . format ( name ) ) else : log . warning ( "Skipping entry in buffer for which no " "corresponding branch in the " "tree exists: `{0}`" . format ( name ) ) if visible : newbuffer = TreeBuffer ( ) for branch in branches : if branch in treebuffer : newbuffer [ branch ] = treebuffer [ branch ] newbuffer . set_objects ( treebuffer ) self . update_buffer ( newbuffer , transfer_objects = transfer_objects )
Set the Tree buffer
58,044
def glob ( self , patterns , exclude = None ) : if isinstance ( patterns , string_types ) : patterns = [ patterns ] if isinstance ( exclude , string_types ) : exclude = [ exclude ] matches = [ ] for pattern in patterns : matches += fnmatch . filter ( self . iterbranchnames ( ) , pattern ) if exclude is not None : for exclude_pattern in exclude : matches = [ match for match in matches if not fnmatch . fnmatch ( match , exclude_pattern ) ] return matches
Return a list of branch names that match pattern . Exclude all matched branch names which also match a pattern in exclude . exclude may be a string or list of strings .
58,045
def CopyTree ( self , selection , * args , ** kwargs ) : return super ( BaseTree , self ) . CopyTree ( str ( selection ) , * args , ** kwargs )
Copy the tree while supporting a rootpy . tree . cut . Cut selection in addition to a simple string .
58,046
def to_array ( self , * args , ** kwargs ) : from root_numpy import tree2array return tree2array ( self , * args , ** kwargs )
Convert this tree into a NumPy structured array
58,047
def color_key ( tkey ) : name = tkey . GetName ( ) classname = tkey . GetClassName ( ) for class_regex , color in _COLOR_MATCHER : if class_regex . match ( classname ) : return colored ( name , color = color ) return name
Function which returns a colorized TKey name given its type
58,048
def cov ( m , y = None , rowvar = 1 , bias = 0 , ddof = None , weights = None , repeat_weights = 0 ) : import numpy as np if ddof is not None and ddof != int ( ddof ) : raise ValueError ( "ddof must be integer" ) X = np . array ( m , ndmin = 2 , dtype = float ) if X . size == 0 : return np . array ( m ) if X . shape [ 0 ] == 1 : rowvar = 1 if rowvar : axis = 0 tup = ( slice ( None ) , np . newaxis ) else : axis = 1 tup = ( np . newaxis , slice ( None ) ) if y is not None : y = np . array ( y , copy = False , ndmin = 2 , dtype = float ) X = np . concatenate ( ( X , y ) , axis ) if ddof is None : if bias == 0 : ddof = 1 else : ddof = 0 if weights is not None : weights = np . array ( weights , dtype = float ) weights_sum = weights . sum ( ) if weights_sum <= 0 : raise ValueError ( "sum of weights is non-positive" ) X -= np . average ( X , axis = 1 - axis , weights = weights ) [ tup ] if repeat_weights : fact = weights_sum - ddof else : weights /= weights_sum fact = ( 1. - np . power ( weights , 2 ) . sum ( ) ) else : weights = 1 X -= X . mean ( axis = 1 - axis ) [ tup ] if rowvar : N = X . shape [ 1 ] else : N = X . shape [ 0 ] fact = float ( N - ddof ) if not rowvar : return ( np . dot ( weights * X . T , X . conj ( ) ) / fact ) . squeeze ( ) else : return ( np . dot ( weights * X , X . T . conj ( ) ) / fact ) . squeeze ( )
Estimate a covariance matrix given data .
58,049
def corrcoef ( x , y = None , rowvar = 1 , bias = 0 , ddof = None , weights = None , repeat_weights = 0 ) : import numpy as np c = cov ( x , y , rowvar , bias , ddof , weights , repeat_weights ) if c . size == 0 : return c try : d = np . diag ( c ) except ValueError : return 1 return c / np . sqrt ( np . multiply . outer ( d , d ) )
Return correlation coefficients .
58,050
def safe ( self , parentheses = True ) : if not self : return "" string = str ( self ) string = string . replace ( "**" , "_pow_" ) string = string . replace ( "*" , "_mul_" ) string = string . replace ( "/" , "_div_" ) string = string . replace ( "==" , "_eq_" ) string = string . replace ( "<=" , "_leq_" ) string = string . replace ( ">=" , "_geq_" ) string = string . replace ( "<" , "_lt_" ) string = string . replace ( ">" , "_gt_" ) string = string . replace ( "&&" , "_and_" ) string = string . replace ( "||" , "_or_" ) string = string . replace ( "!" , "not_" ) if parentheses : string = string . replace ( "(" , "L" ) string = string . replace ( ")" , "R" ) else : string = string . replace ( "(" , "" ) string = string . replace ( ")" , "" ) string = string . replace ( " " , "" ) return string
Returns a string representation with special characters replaced by safer characters for use in file names .
58,051
def latex ( self ) : if not self : return "" s = str ( self ) s = s . replace ( "==" , " = " ) s = s . replace ( "<=" , " \leq " ) s = s . replace ( ">=" , " \geq " ) s = s . replace ( "&&" , r" \text{ and } " ) s = s . replace ( "||" , r" \text{ or } " ) return s
Returns a string representation for use in LaTeX
58,052
def replace ( self , name , newname ) : if not re . match ( "[a-zA-Z]\w*" , name ) : return None if not re . match ( "[a-zA-Z]\w*" , newname ) : return None def _replace ( match ) : return match . group ( 0 ) . replace ( match . group ( 'name' ) , newname ) pattern = re . compile ( "(\W|^)(?P<name>" + name + ")(\W|$)" ) cut = re . sub ( pattern , _replace , str ( self ) ) return Cut ( cut )
Replace all occurrences of name with newname
58,053
def save_image ( self , image_file ) : self . ensure_pyplot ( ) command = 'plt.gcf().savefig("%s")' % image_file self . process_input_line ( 'bookmark ipy_thisdir' , store_history = False ) self . process_input_line ( 'cd -b ipy_savedir' , store_history = False ) self . process_input_line ( command , store_history = False ) self . process_input_line ( 'cd -b ipy_thisdir' , store_history = False ) self . process_input_line ( 'bookmark -d ipy_thisdir' , store_history = False ) self . clear_cout ( )
Saves the image file to disk .
58,054
def decorate ( self , other = None , ** kwargs ) : if 'color' in kwargs : incompatible = [ ] for othercolor in ( 'linecolor' , 'fillcolor' , 'markercolor' ) : if othercolor in kwargs : incompatible . append ( othercolor ) if incompatible : raise ValueError ( "Setting both the `color` and the `{0}` attribute{1} " "is ambiguous. Please set only one." . format ( ', ' . join ( incompatible ) , 's' if len ( incompatible ) != 1 else '' ) ) if other is not None : decor = other . decorators if 'color' in kwargs : decor . pop ( 'linecolor' , None ) decor . pop ( 'fillcolor' , None ) decor . pop ( 'markercolor' , None ) decor . update ( kwargs ) kwargs = decor for key , value in kwargs . items ( ) : if key in Plottable . EXTRA_ATTRS_DEPRECATED : newkey = Plottable . EXTRA_ATTRS_DEPRECATED [ key ] warnings . warn ( "`{0}` is deprecated and will be removed in " "future versions. Use `{1}` instead" . format ( key , newkey ) , DeprecationWarning ) key = newkey if key in Plottable . EXTRA_ATTRS : setattr ( self , key , value ) elif key == 'markerstyle' : self . SetMarkerStyle ( value ) elif key == 'markercolor' : self . SetMarkerColor ( value ) elif key == 'markersize' : self . SetMarkerSize ( value ) elif key == 'fillcolor' : self . SetFillColor ( value ) elif key == 'fillstyle' : self . SetFillStyle ( value ) elif key == 'linecolor' : self . SetLineColor ( value ) elif key == 'linestyle' : self . SetLineStyle ( value ) elif key == 'linewidth' : self . SetLineWidth ( value ) elif key == 'color' : self . SetColor ( value ) else : raise AttributeError ( "unknown decoration attribute: `{0}`" . format ( key ) ) return self
Apply style options to a Plottable object .
58,055
def getitem ( self , index ) : if index >= getattr ( self . tree , self . size ) : raise IndexError ( index ) if self . __cache_objects and index in self . __cache : return self . __cache [ index ] obj = self . tree_object_cls ( self . tree , self . name , self . prefix , index ) if self . __cache_objects : self . __cache [ index ] = obj return obj
direct access without going through self . selection
58,056
def configure_defaults ( ) : log . debug ( "configure_defaults()" ) global initialized initialized = True if use_rootpy_handler : set_error_handler ( python_logging_error_handler ) if os . environ . get ( 'ROOTPY_BATCH' , False ) or IN_NOSETESTS : ROOT . gROOT . SetBatch ( True ) log . debug ( 'ROOT is running in batch mode' ) ROOT . gErrorIgnoreLevel = 0 this_dll = C . CDLL ( None ) try : EnableAutoDictionary = C . c_int . in_dll ( this_dll , "G__EnableAutoDictionary" ) except ValueError : pass else : EnableAutoDictionary . value = 0 for init in _initializations : init ( )
This function is executed immediately after ROOT s finalSetup
58,057
def rp_module_level_in_stack ( ) : from traceback import extract_stack from rootpy import _ROOTPY_SOURCE_PATH modlevel_files = [ filename for filename , _ , func , _ in extract_stack ( ) if func == "<module>" ] return any ( path . startswith ( _ROOTPY_SOURCE_PATH ) for path in modlevel_files )
Returns true if we re during a rootpy import
58,058
def monitor_deletion ( ) : monitors = { } def set_deleted ( x ) : def _ ( weakref ) : del monitors [ x ] return _ def monitor ( item , name ) : monitors [ name ] = ref ( item , set_deleted ( name ) ) def is_alive ( name ) : return monitors . get ( name , None ) is not None return monitor , is_alive
Function for checking for correct deletion of weakref - able objects .
58,059
def canvases_with ( drawable ) : return [ c for c in ROOT . gROOT . GetListOfCanvases ( ) if drawable in find_all_primitives ( c ) ]
Return a list of all canvases where drawable has been painted .
58,060
def tick_length_pixels ( pad , xaxis , yaxis , xlength , ylength = None ) : if ylength is None : ylength = xlength xaxis . SetTickLength ( xlength / float ( pad . height_pixels ) ) yaxis . SetTickLength ( ylength / float ( pad . width_pixels ) )
Set the axes tick lengths in pixels
58,061
def reset ( self ) : if self . resetable : for i in range ( len ( self ) ) : self [ i ] = self . default
Reset the value to the default
58,062
def minimize ( func , minimizer_type = None , minimizer_algo = None , strategy = None , retry = 0 , scan = False , print_level = None ) : llog = log [ 'minimize' ] min_opts = ROOT . Math . MinimizerOptions if minimizer_type is None : minimizer_type = min_opts . DefaultMinimizerType ( ) if minimizer_algo is None : minimizer_algo = min_opts . DefaultMinimizerAlgo ( ) if strategy is None : strategy = min_opts . DefaultStrategy ( ) if print_level is None : print_level = min_opts . DefaultPrintLevel ( ) if print_level < 0 : msg_service = ROOT . RooMsgService . instance ( ) msg_level = msg_service . globalKillBelow ( ) msg_service . setGlobalKillBelow ( ROOT . RooFit . FATAL ) minim = Minimizer ( func ) minim . setPrintLevel ( print_level ) minim . setStrategy ( strategy ) if scan : llog . info ( "running scan algorithm ..." ) minim . minimize ( 'Minuit2' , 'Scan' ) llog . info ( "minimizing with {0} {1} using strategy {2}" . format ( minimizer_type , minimizer_algo , strategy ) ) status = minim . minimize ( minimizer_type , minimizer_algo ) iretry = 0 while iretry < retry and status not in ( 0 , 1 ) : if strategy < 2 : strategy += 1 minim . setStrategy ( strategy ) llog . warning ( "minimization failed with status {0:d}" . format ( status ) ) llog . info ( "retrying minimization with strategy {0:d}" . format ( strategy ) ) status = minim . minimize ( minimizer_type , minimizer_algo ) if status in ( 0 , 1 ) : llog . info ( "found minimum" ) else : llog . warning ( "minimization failed with status {0:d}" . format ( status ) ) if print_level < 0 : msg_service . setGlobalKillBelow ( msg_level ) return minim
Minimize a RooAbsReal function
58,063
def make_string ( obj ) : if inspect . isclass ( obj ) : if issubclass ( obj , Object ) : return obj . _ROOT . __name__ if issubclass ( obj , string_types ) : return 'string' return obj . __name__ if not isinstance ( obj , string_types ) : raise TypeError ( "expected string or class" ) return obj
If obj is a string return that otherwise attempt to figure out the name of a type .
58,064
def generate ( declaration , headers = None , has_iterators = False ) : global NEW_DICTS log . debug ( "requesting dictionary for {0}" . format ( declaration ) ) if headers : if isinstance ( headers , string_types ) : headers = sorted ( headers . split ( ';' ) ) log . debug ( "using the headers {0}" . format ( ', ' . join ( headers ) ) ) unique_name = ';' . join ( [ declaration ] + headers ) else : unique_name = declaration unique_name = unique_name . replace ( ' ' , '' ) if unique_name in LOADED_DICTS : log . debug ( "dictionary for {0} is already loaded" . format ( declaration ) ) return if sys . version_info [ 0 ] < 3 : libname = hashlib . sha512 ( unique_name ) . hexdigest ( ) [ : 16 ] else : libname = hashlib . sha512 ( unique_name . encode ( 'utf-8' ) ) . hexdigest ( ) [ : 16 ] libnameso = libname + ".so" if ROOT . gROOT . GetVersionInt ( ) < 53403 : cls = ROOT . gROOT . GetClass ( declaration ) if cls and not cls . IsLoaded ( ) : log . debug ( "removing {0} from gROOT.GetListOfClasses()" . format ( declaration ) ) ROOT . gROOT . GetListOfClasses ( ) . Remove ( cls ) if exists ( pjoin ( DICTS_PATH , libnameso ) ) : log . debug ( "loading previously generated dictionary for {0}" . format ( declaration ) ) if ( ROOT . gInterpreter . Load ( pjoin ( DICTS_PATH , libnameso ) ) not in ( 0 , 1 ) ) : raise RuntimeError ( "failed to load the library for '{0}' @ {1}" . format ( declaration , libname ) ) LOADED_DICTS [ unique_name ] = None return with lock ( pjoin ( DICTS_PATH , "lock" ) , poll_interval = 5 , max_age = 60 ) : log . info ( "generating dictionary for {0} ..." . format ( declaration ) ) includes = '' if headers is not None : for header in headers : if re . match ( '^<.+>$' , header ) : includes += '#include {0}\n' . format ( header ) else : includes += '#include "{0}"\n' . format ( header ) source = LINKDEF % locals ( ) sourcepath = os . path . join ( DICTS_PATH , '{0}.C' . format ( libname ) ) log . debug ( "source path: {0}" . format ( sourcepath ) ) with open ( sourcepath , 'w' ) as sourcefile : sourcefile . write ( source ) log . debug ( "include path: {0}" . format ( ROOT . gSystem . GetIncludePath ( ) ) ) if ( ROOT . gSystem . CompileMacro ( sourcepath , 'k-' , libname , DICTS_PATH ) != 1 ) : raise RuntimeError ( "failed to compile the library for '{0}'" . format ( sourcepath ) ) LOADED_DICTS [ unique_name ] = None NEW_DICTS = True
Compile and load the reflection dictionary for a type .
58,065
def ensure_built ( self , headers = None ) : if not self . params : return else : for child in self . params : child . ensure_built ( headers = headers ) if headers is None : headers = self . guess_headers generate ( str ( self ) , headers , has_iterators = self . name in HAS_ITERATORS )
Make sure that a dictionary exists for this type .
58,066
def guess_headers ( self ) : name = self . name . replace ( "*" , "" ) headers = [ ] if name in KNOWN_TYPES : headers . append ( KNOWN_TYPES [ name ] ) elif name in STL : headers . append ( '<{0}>' . format ( name ) ) elif hasattr ( ROOT , name ) and name . startswith ( "T" ) : headers . append ( '<{0}.h>' . format ( name ) ) elif '::' in name : headers . append ( '<{0}.h>' . format ( name . replace ( '::' , '/' ) ) ) elif name == 'allocator' : headers . append ( '<memory>' ) else : try : CPPGrammar . BASIC_TYPE . parseString ( name , parseAll = True ) except ParseException as e : log . warning ( "unable to guess headers required for {0}" . format ( name ) ) if self . params : for child in self . params : headers . extend ( child . guess_headers ) return list ( set ( headers ) )
Attempt to guess what headers may be required in order to use this type . Returns guess_headers of all children recursively .
58,067
def cls ( self ) : return SmartTemplate ( self . name ) ( ", " . join ( map ( str , self . params ) ) )
Return the class definition for this type
58,068
def from_string ( cls , string ) : cls . TYPE . setParseAction ( cls . make ) try : return cls . TYPE . parseString ( string , parseAll = True ) [ 0 ] except ParseException : log . error ( "Failed to parse '{0}'" . format ( string ) ) raise
Parse string into a CPPType instance
58,069
def callback ( cfunc ) : return C . c_voidp . from_address ( C . cast ( cfunc , C . c_voidp ) . value )
Turn a ctypes CFUNCTYPE instance into a value which can be passed into PyROOT
58,070
def objectproxy_realaddress ( obj ) : voidp = QROOT . TPython . ObjectProxy_AsVoidPtr ( obj ) return C . addressof ( C . c_char . from_buffer ( voidp ) )
Obtain a real address as an integer from an objectproxy .
58,071
def set_style ( style , mpl = False , ** kwargs ) : if mpl : import matplotlib as mpl style_dictionary = { } if isinstance ( style , string_types ) : style_dictionary = get_style ( style , mpl = True , ** kwargs ) log . info ( "using matplotlib style '{0}'" . format ( style ) ) elif isinstance ( style , dict ) : style_dictionary = style log . info ( "using user-defined matplotlib style" ) else : raise TypeError ( "style must be a matplotlib style name or dict" ) for k , v in style_dictionary . items ( ) : mpl . rcParams [ k ] = v else : if isinstance ( style , string_types ) : style = get_style ( style , ** kwargs ) log . info ( "using ROOT style '{0}'" . format ( style . GetName ( ) ) ) style . cd ( )
If mpl is False accept either style name or a TStyle instance . If mpl is True accept either style name or a matplotlib . rcParams - like dictionary
58,072
def cd_previous ( self ) : if self . _prev_dir is None or isinstance ( self . _prev_dir , ROOT . TROOT ) : return False if isinstance ( self . _prev_dir , ROOT . TFile ) : if self . _prev_dir . IsOpen ( ) and self . _prev_dir . IsWritable ( ) : self . _prev_dir . cd ( ) return True return False if not self . _prev_dir . IsWritable ( ) : return False prev_file = self . _prev_dir . GetFile ( ) if prev_file and prev_file . IsOpen ( ) : self . _prev_dir . cd ( ) return True return False
cd to the gDirectory before this file was open .
58,073
def Close ( self , * args ) : super ( _DirectoryBase , self ) . Close ( * args ) return self . cd_previous ( )
Like ROOT s Close but reverts to the gDirectory before this file was opened .
58,074
def keys ( self , latest = False ) : if latest : keys = { } for key in self . keys ( ) : name = key . GetName ( ) if name in keys : if key . GetCycle ( ) > keys [ name ] . GetCycle ( ) : keys [ name ] = key else : keys [ name ] = key return keys . values ( ) return [ asrootpy ( key ) for key in self . GetListOfKeys ( ) ]
Return a list of the keys in this directory .
58,075
def Get ( self , path , rootpy = True , ** kwargs ) : thing = super ( _DirectoryBase , self ) . Get ( path ) if not thing : raise DoesNotExist keepalive ( thing , self ) if rootpy : return asrootpy ( thing , ** kwargs ) return thing
Return the requested object cast as its corresponding subclass in rootpy if one exists and rootpy = True otherwise return the unadulterated TObject .
58,076
def GetKey ( self , path , cycle = 9999 , rootpy = True , ** kwargs ) : key = super ( _DirectoryBase , self ) . GetKey ( path , cycle ) if not key : raise DoesNotExist if rootpy : return asrootpy ( key , ** kwargs ) return key
Override TDirectory s GetKey and also handle accessing keys nested arbitrarily deep in subdirectories .
58,077
def mkdir ( self , path , title = "" , recurse = False ) : head , tail = os . path . split ( os . path . normpath ( path ) ) if tail == "" : raise ValueError ( "invalid directory name: {0}" . format ( path ) ) with preserve_current_directory ( ) : dest = self if recurse : parent_dirs = head . split ( os . path . sep ) for parent_dir in parent_dirs : try : newdest = dest . GetDirectory ( parent_dir ) dest = newdest except DoesNotExist : dest = dest . mkdir ( parent_dir ) elif head != "" : dest = dest . GetDirectory ( head ) if tail in dest : raise ValueError ( "{0} already exists" . format ( path ) ) newdir = asrootpy ( super ( _DirectoryBase , dest ) . mkdir ( tail , title ) ) return newdir
Make a new directory . If recurse is True create parent directories as required . Return the newly created TDirectory .
58,078
def rm ( self , path , cycle = ';*' ) : rdir = self with preserve_current_directory ( ) : dirname , objname = os . path . split ( os . path . normpath ( path ) ) if dirname : rdir = rdir . Get ( dirname ) rdir . Delete ( objname + cycle )
Delete an object at path relative to this directory
58,079
def copytree ( self , dest_dir , src = None , newname = None , exclude = None , overwrite = False ) : def copy_object ( obj , dest , name = None ) : if name is None : name = obj . GetName ( ) if not overwrite and name in dest : raise ValueError ( "{0} already exists in {1} and `overwrite=False`" . format ( name , dest . _path ) ) dest . cd ( ) if isinstance ( obj , ROOT . R . TTree ) : new_obj = obj . CloneTree ( - 1 , "fast" ) new_obj . Write ( name , ROOT . R . TObject . kOverwrite ) else : obj . Write ( name , ROOT . R . TObject . kOverwrite ) with preserve_current_directory ( ) : if isinstance ( src , string_types ) : src = asrootpy ( self . Get ( src ) ) else : src = self if isinstance ( dest_dir , string_types ) : try : dest_dir = asrootpy ( self . GetDirectory ( dest_dir ) ) except DoesNotExist : dest_dir = self . mkdir ( dest_dir ) if isinstance ( src , ROOT . R . TDirectory ) : cp_name = newname if newname is not None else src . GetName ( ) if cp_name not in dest_dir : new_dir = dest_dir . mkdir ( cp_name ) else : new_dir = dest_dir . get ( cp_name ) for ( path , dirnames , objects ) in src . walk ( maxdepth = 0 ) : for object_name in objects : if exclude and exclude ( path , object_name ) : continue thing = src . Get ( object_name ) copy_object ( thing , new_dir ) for dirname in dirnames : if exclude and exclude ( path , dirname ) : continue rdir = src . GetDirectory ( dirname ) rdir . copytree ( new_dir , exclude = exclude , overwrite = overwrite ) else : copy_object ( src , dest_dir , name = newname )
Copy this directory or just one contained object into another directory .
58,080
def find ( self , regexp , negate_regexp = False , class_pattern = None , find_fnc = re . search , refresh_cache = False ) : if refresh_cache or not hasattr ( self , 'cache' ) : self . _populate_cache ( ) b = self . cache split_regexp = regexp . split ( '/' ) if split_regexp [ 0 ] == '' : for d in split_regexp : if d in b : b = b [ d ] else : break else : b = b [ '' ] for path , ( obj , classname ) in b [ 'obj' ] : if class_pattern : if not fnmatch ( classname , class_pattern ) : continue joined_path = os . path . join ( * [ '/' , path , obj ] ) result = find_fnc ( regexp , joined_path ) if ( result is not None ) ^ negate_regexp : yield joined_path , result
yield the full path of the matching regular expression and the match itself
58,081
def start_new_gui_thread ( ) : PyGUIThread = getattr ( ROOT , 'PyGUIThread' , None ) if PyGUIThread is not None : assert not PyGUIThread . isAlive ( ) , "GUI thread already running!" assert _processRootEvents , ( "GUI thread wasn't started when rootwait was imported, " "so it can't be restarted" ) ROOT . keeppolling = 1 ROOT . PyGUIThread = threading . Thread ( None , _processRootEvents , None , ( ROOT , ) ) ROOT . PyGUIThread . finishSchedule = _finishSchedule ROOT . PyGUIThread . setDaemon ( 1 ) ROOT . PyGUIThread . start ( ) log . debug ( "successfully started a new GUI thread" )
Attempt to start a new GUI thread if possible .
58,082
def stop_gui_thread ( ) : PyGUIThread = getattr ( ROOT , 'PyGUIThread' , None ) if PyGUIThread is None or not PyGUIThread . isAlive ( ) : log . debug ( "no existing GUI thread is runnng" ) return False ROOT . keeppolling = 0 try : PyGUIThread . finishSchedule ( ) except AttributeError : log . debug ( "unable to call finishSchedule() on PyGUIThread" ) pass PyGUIThread . join ( ) log . debug ( "successfully stopped the existing GUI thread" ) return True
Try to stop the GUI thread . If it was running returns True otherwise False .
58,083
def wait_for_zero_canvases ( middle_mouse_close = False ) : if not __ACTIVE : wait_failover ( wait_for_zero_canvases ) return @ dispatcher def count_canvases ( ) : if not get_visible_canvases ( ) : try : ROOT . gSystem . ExitLoop ( ) except AttributeError : pass @ dispatcher def exit_application_loop ( ) : ROOT . gSystem . ExitLoop ( ) sh = ROOT . TSignalHandler ( ROOT . kSigInterrupt , True ) sh . Add ( ) sh . Connect ( "Notified()" , "TPyDispatcher" , exit_application_loop , "Dispatch()" ) visible_canvases = get_visible_canvases ( ) for canvas in visible_canvases : log . debug ( "waiting for canvas {0} to close" . format ( canvas . GetName ( ) ) ) canvas . Update ( ) if middle_mouse_close : attach_event_handler ( canvas ) if not getattr ( canvas , "_py_close_dispatcher_attached" , False ) : canvas . _py_close_dispatcher_attached = True canvas . Connect ( "Closed()" , "TPyDispatcher" , count_canvases , "Dispatch()" ) keepalive ( canvas , count_canvases ) if visible_canvases and not ROOT . gROOT . IsBatch ( ) : run_application_until_done ( ) for canvas in visible_canvases : if getattr ( canvas , "_py_close_dispatcher_attached" , False ) : canvas . _py_close_dispatcher_attached = False canvas . Disconnect ( "Closed()" , count_canvases , "Dispatch()" )
Wait for all canvases to be closed or CTRL - c .
58,084
def wait_for_frame ( frame ) : if not frame : return @ dispatcher def close ( ) : ROOT . gSystem . ExitLoop ( ) if not getattr ( frame , "_py_close_dispatcher_attached" , False ) : frame . _py_close_dispatcher_attached = True frame . Connect ( "CloseWindow()" , "TPyDispatcher" , close , "Dispatch()" ) @ dispatcher def exit_application_loop ( ) : ROOT . gSystem . ExitLoop ( ) sh = ROOT . TSignalHandler ( ROOT . kSigInterrupt , True ) sh . Add ( ) sh . Connect ( "Notified()" , "TPyDispatcher" , exit_application_loop , "Dispatch()" ) if not ROOT . gROOT . IsBatch ( ) : run_application_until_done ( ) frame . Disconnect ( "CloseWindow()" , close , "Dispatch()" )
wait until a TGMainFrame is closed or ctrl - c
58,085
def wait_for_browser_close ( b ) : if b : if not __ACTIVE : wait_failover ( wait_for_browser_close ) return wait_for_frame ( b . GetBrowserImp ( ) . GetMainFrame ( ) )
Can be used to wait until a TBrowser is closed
58,086
def log_trace ( logger , level = logging . DEBUG , show_enter = True , show_exit = True ) : def wrap ( function ) : l = logger . getChild ( function . __name__ ) . log @ wraps ( function ) def thunk ( * args , ** kwargs ) : global trace_depth trace_depth . value += 1 try : start = time ( ) if show_enter : l ( level , "{0}> {1} {2}" . format ( " " * trace_depth . value , args , kwargs ) ) try : result = function ( * args , ** kwargs ) except : _ , result , _ = sys . exc_info ( ) raise finally : if show_exit : l ( level , "{0}< return {1} [{2:.2f} sec]" . format ( " " * trace_depth . value , result , time ( ) - start ) ) finally : trace_depth . value -= 1 return result return thunk return wrap
log a statement on function entry and exit
58,087
def log_stack ( logger , level = logging . INFO , limit = None , frame = None ) : if showing_stack . inside : return showing_stack . inside = True try : if frame is None : frame = sys . _getframe ( 1 ) stack = "" . join ( traceback . format_stack ( frame , limit ) ) for line in ( l [ 2 : ] for l in stack . split ( "\n" ) if l . strip ( ) ) : logger . log ( level , line ) finally : showing_stack . inside = False
Display the current stack on logger .
58,088
def showdeletion ( self , * objects ) : from . . memory import showdeletion as S for o in objects : S . monitor_object_cleanup ( o )
Record a stack trace at the point when an ROOT TObject is deleted
58,089
def trace ( self , level = logging . DEBUG , show_enter = True , show_exit = True ) : from . import log_trace return log_trace ( self , level , show_enter , show_exit )
Functions decorated with this function show function entry and exit with values defaults to debug log level .
58,090
def frame_unique ( f ) : return f . f_code . co_filename , f . f_code . co_name , f . f_lineno
A tuple representing a value which is unique to a given frame s line of execution
58,091
def show_stack_depth ( self , record , frame ) : logger = self depths = [ - 1 ] msg = record . getMessage ( ) while logger : to_match = getattr ( logger , "show_stack_regexes" , ( ) ) for regex , depth , once , min_level in to_match : if record . levelno < min_level : continue if not regex . match ( record . msg ) : continue unique = regex , self . frame_unique ( frame ) , record . name if once : if unique in logger . shown_stack_frames : continue logger . shown_stack_frames . add ( unique ) depths . append ( depth ) logger = logger . parent return max ( depths )
Compute the maximum stack depth to show requested by any hooks returning - 1 if there are none matching or if we ve already emitted one for the line of code referred to .
58,092
def getChild ( self , suffix ) : if suffix is None : return self if self . root is not self : if suffix . startswith ( self . name + "." ) : suffix = suffix [ len ( self . name + "." ) : ] suf_parts = suffix . split ( "." ) if len ( suf_parts ) > 1 and suf_parts [ - 1 ] == suf_parts [ - 2 ] : suffix = "." . join ( suf_parts [ : - 1 ] ) suffix = '.' . join ( ( self . name , suffix ) ) return self . manager . getLogger ( suffix )
Taken from CPython 2 . 7 modified to remove duplicate prefix and suffixes
58,093
def method_file_check ( f ) : @ wraps ( f ) def wrapper ( self , * args , ** kwargs ) : curr_dir = ROOT . gDirectory if isinstance ( curr_dir , ROOT . TROOT ) or not curr_dir : raise RuntimeError ( "You must first create a File before calling {0}.{1}" . format ( self . __class__ . __name__ , _get_qualified_name ( f ) ) ) if not curr_dir . IsWritable ( ) : raise RuntimeError ( "Calling {0}.{1} requires that the " "current File is writable" . format ( self . __class__ . __name__ , _get_qualified_name ( f ) ) ) return f ( self , * args , ** kwargs ) return wrapper
A decorator to check that a TFile as been created before f is called . This function can decorate methods .
58,094
def chainable ( f ) : @ wraps ( f ) def wrapper ( self , * args , ** kwargs ) : f ( self , * args , ** kwargs ) return self return wrapper
Decorator which causes a void function to return self
58,095
def snake_case_methods ( cls , debug = False ) : if not CONVERT_SNAKE_CASE : return cls root_base = cls . _ROOT members = inspect . getmembers ( root_base ) names = { } for name , member in members : lower_name = name . lower ( ) if lower_name in names : del names [ lower_name ] else : names [ lower_name ] = None for name , member in members : if name . lower ( ) not in names : continue if name [ 0 ] == '_' or name . islower ( ) : continue if not inspect . ismethod ( member ) and not inspect . isfunction ( member ) : continue new_name = camel_to_snake ( name ) value = None skip = False for c in cls . mro ( ) : if new_name in c . __dict__ : skip = True break if name in c . __dict__ : value = c . __dict__ [ name ] break else : value = getattr ( cls , name ) if skip : continue setattr ( cls , new_name , value ) return cls
A class decorator adding snake_case methods that alias capitalized ROOT methods . cls must subclass a ROOT class and define the _ROOT class variable .
58,096
def sync ( lock ) : def sync ( f ) : @ wraps ( f ) def new_function ( * args , ** kwargs ) : lock . acquire ( ) try : return f ( * args , ** kwargs ) finally : lock . release ( ) return new_function return sync
A synchronization decorator
58,097
def as_ufloat ( roorealvar ) : if isinstance ( roorealvar , ( U . AffineScalarFunc , U . Variable ) ) : return roorealvar return U . ufloat ( ( roorealvar . getVal ( ) , roorealvar . getError ( ) ) )
Cast a RooRealVar to an uncertainties . ufloat
58,098
def correlated_values ( param_names , roofitresult ) : pars = roofitresult . floatParsFinal ( ) pars = [ pars [ i ] for i in range ( pars . getSize ( ) ) ] parnames = [ p . GetName ( ) for p in pars ] values = [ ( p . getVal ( ) , p . getError ( ) ) for p in pars ] matrix = asrootpy ( roofitresult . correlationMatrix ( ) ) . to_numpy ( ) uvalues = U . correlated_values_norm ( values , matrix . tolist ( ) ) uvalues = dict ( ( n , v ) for n , v in zip ( parnames , uvalues ) ) assert all ( n in uvalues for n in parnames ) , ( "name {0} isn't in parameter list {1}" . format ( n , parnames ) ) return tuple ( uvalues [ n ] for n in param_names )
Return symbolic values from a RooFitResult taking into account covariance
58,099
def checkattr ( metacls , attr , value ) : if not isinstance ( value , ( types . MethodType , types . FunctionType , classmethod , staticmethod , property ) ) : if attr in dir ( type ( 'dummy' , ( object , ) , { } ) ) + [ '__metaclass__' , '__qualname__' ] : return if attr . startswith ( '_' ) : raise SyntaxError ( "TreeModel attribute `{0}` " "must not start with `_`" . format ( attr ) ) if not inspect . isclass ( value ) : if not isinstance ( value , Column ) : raise TypeError ( "TreeModel attribute `{0}` " "must be an instance of " "`rootpy.tree.treetypes.Column`" . format ( attr ) ) return if not issubclass ( value , ( ROOT . TObject , ROOT . ObjectProxy ) ) : raise TypeError ( "TreeModel attribute `{0}` must inherit " "from `ROOT.TObject` or `ROOT.ObjectProxy`" . format ( attr ) )
Only allow class attributes that are instances of rootpy . types . Column ROOT . TObject or ROOT . ObjectProxy