idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
57,900
def precmd ( self , line ) : if line . startswith ( 'help' ) : if not q ( "`help in key`.q" ) : try : q ( "\\l help.q" ) except kerr : return '-1"no help available - install help.q"' if line == 'help' : line += "`" return line
Support for help
57,901
def onecmd ( self , line ) : if line == '\\' : return True elif line == 'EOF' : print ( '\r' , end = '' ) return True else : try : v = q ( line ) except kerr as e : print ( "'%s" % e . args [ 0 ] ) else : if v != q ( '::' ) : v . show ( ) return False
Interpret the line
57,902
def run ( q_prompt = False ) : lines , columns = console_size ( ) q ( r'\c %d %d' % ( lines , columns ) ) if len ( sys . argv ) > 1 : try : q ( r'\l %s' % sys . argv [ 1 ] ) except kerr as e : print ( e ) raise SystemExit ( 1 ) else : del sys . argv [ 1 ] if q_prompt : q ( ) ptp . run ( )
Run a prompt - toolkit based REPL
57,903
def get_unit ( a ) : typestr = a . dtype . str i = typestr . find ( '[' ) if i == - 1 : raise TypeError ( "Expected a datetime64 array, not %s" , a . dtype ) return typestr [ i + 1 : - 1 ]
Extract the time unit from array s dtype
57,904
def k2a ( a , x ) : func , scale = None , 1 t = abs ( x . _t ) if 12 <= t <= 15 : unit = get_unit ( a ) attr , shift , func , scale = _UNIT [ unit ] a [ : ] = getattr ( x , attr ) . data a += shift elif 16 <= t <= 19 : unit = get_unit ( a ) func , scale = _SCALE [ unit ] a [ : ] = x . timespan . data else : a [ : ] = list ( x ) if func is not None : func = getattr ( numpy , func ) a [ : ] = func ( a . view ( dtype = 'i8' ) , scale ) if a . dtype . char in 'mM' : n = x . null if n . any : a [ n ] = None
Rescale data from a K object x to array a .
57,905
def show ( self , start = 0 , geometry = None , output = None ) : if output is None : output = sys . stdout if geometry is None : geometry = q . value ( kp ( "\\c" ) ) else : geometry = self . _I ( geometry ) if start < 0 : start += q . count ( self ) if self . _id ( ) != nil . _id ( ) : r = self . _show ( geometry , start ) else : r = '::\n' if isinstance ( output , type ) : return output ( r ) try : output . write ( r ) except TypeError : output . write ( str ( r ) )
pretty - print data to the console
57,906
def select ( self , columns = ( ) , by = ( ) , where = ( ) , ** kwds ) : return self . _seu ( 'select' , columns , by , where , kwds )
select from self
57,907
def exec_ ( self , columns = ( ) , by = ( ) , where = ( ) , ** kwds ) : return self . _seu ( 'exec' , columns , by , where , kwds )
exec from self
57,908
def update ( self , columns = ( ) , by = ( ) , where = ( ) , ** kwds ) : return self . _seu ( 'update' , columns , by , where , kwds )
update from self
57,909
def dict ( cls , * args , ** kwds ) : if args : if len ( args ) > 1 : raise TypeError ( "Too many positional arguments" ) x = args [ 0 ] keys = [ ] vals = [ ] try : x_keys = x . keys except AttributeError : for k , v in x : keys . append ( k ) vals . append ( v ) else : keys = x_keys ( ) vals = [ x [ k ] for k in keys ] return q ( '!' , keys , vals ) else : if kwds : keys = [ ] vals = [ ] for k , v in kwds . items ( ) : keys . append ( k ) vals . append ( v ) return q ( '!' , keys , vals ) else : return q ( '()!()' )
Construct a q dictionary
57,910
def logical_lines ( lines ) : if isinstance ( lines , string_types ) : lines = StringIO ( lines ) buf = [ ] for line in lines : if buf and not line . startswith ( ' ' ) : chunk = '' . join ( buf ) . strip ( ) if chunk : yield chunk buf [ : ] = [ ] buf . append ( line ) chunk = '' . join ( buf ) . strip ( ) if chunk : yield chunk
Merge lines into chunks according to q rules
57,911
def q ( line , cell = None , _ns = None ) : if cell is None : return pyq . q ( line ) if _ns is None : _ns = vars ( sys . modules [ '__main__' ] ) input = output = None preload = [ ] outs = { } try : h = pyq . q ( '0i' ) if line : for opt , value in getopt ( line . split ( ) , "h:l:o:i:12" ) [ 0 ] : if opt == '-l' : preload . append ( value ) elif opt == '-h' : h = pyq . K ( str ( ':' + value ) ) elif opt == '-o' : output = str ( value ) elif opt == '-i' : input = str ( value ) . split ( ',' ) elif opt in ( '-1' , '-2' ) : outs [ int ( opt [ 1 ] ) ] = None if outs : if int ( h ) != 0 : raise ValueError ( "Cannot redirect remote std stream" ) for fd in outs : tmpfd , tmpfile = mkstemp ( ) try : pyq . q ( r'\%d %s' % ( fd , tmpfile ) ) finally : os . unlink ( tmpfile ) os . close ( tmpfd ) r = None for script in preload : h ( pyq . kp ( r"\l " + script ) ) if input is not None : for chunk in logical_lines ( cell ) : func = "{[%s]%s}" % ( ';' . join ( input ) , chunk ) args = tuple ( _ns [ i ] for i in input ) if r != Q_NONE : r . show ( ) r = h ( ( pyq . kp ( func ) , ) + args ) if outs : _forward_outputs ( outs ) else : for chunk in logical_lines ( cell ) : if r != Q_NONE : r . show ( ) r = h ( pyq . kp ( chunk ) ) if outs : _forward_outputs ( outs ) except pyq . kerr as e : print ( "'%s" % e ) else : if output is not None : if output . startswith ( 'q.' ) : pyq . q ( '@[`.;;:;]' , output [ 2 : ] , r ) else : _ns [ output ] = r else : if r != Q_NONE : return r
Run q code .
57,912
def load_ipython_extension ( ipython ) : ipython . register_magic_function ( q , 'line_cell' ) fmr = ipython . display_formatter . formatters [ 'text/plain' ] fmr . for_type ( pyq . K , _q_formatter )
Register %q and %%q magics and pretty display for K objects
57,913
def get_prompt_tokens ( _ ) : namespace = q ( r'\d' ) if namespace == '.' : namespace = '' return [ ( Token . Generic . Prompt , 'q%s)' % namespace ) ]
Return a list of tokens for the prompt
57,914
def cmdloop ( self , intro = None ) : style = style_from_pygments ( BasicStyle , style_dict ) self . preloop ( ) stop = None while not stop : line = prompt ( get_prompt_tokens = get_prompt_tokens , lexer = lexer , get_bottom_toolbar_tokens = get_bottom_toolbar_tokens , history = history , style = style , true_color = True , on_exit = 'return-none' , on_abort = 'return-none' , completer = QCompleter ( ) ) if line is None or line . strip ( ) == r'\\' : raise SystemExit else : line = self . precmd ( line ) stop = self . onecmd ( line ) stop = self . postcmd ( stop , line ) self . postloop ( )
A Cmd . cmdloop implementation
57,915
def eval ( source , kwd_dict = None , ** kwds ) : kwd_dict = kwd_dict or kwds with ctx ( kwd_dict ) : return handleLine ( source )
Evaluate a snuggs expression .
57,916
def _make_matchers ( self , crontab ) : crontab = _aliases . get ( crontab , crontab ) ct = crontab . split ( ) if len ( ct ) == 5 : ct . insert ( 0 , '0' ) ct . append ( '*' ) elif len ( ct ) == 6 : ct . insert ( 0 , '0' ) _assert ( len ( ct ) == 7 , "improper number of cron entries specified; got %i need 5 to 7" % ( len ( ct , ) ) ) matchers = [ _Matcher ( which , entry ) for which , entry in enumerate ( ct ) ] return Matcher ( * matchers )
This constructs the full matcher struct .
57,917
def next ( self , now = None , increments = _increments , delta = True , default_utc = WARN_CHANGE ) : if default_utc is WARN_CHANGE and ( isinstance ( now , _number_types ) or ( now and not now . tzinfo ) or now is None ) : warnings . warn ( WARNING_CHANGE_MESSAGE , FutureWarning , 2 ) default_utc = False now = now or ( datetime . utcnow ( ) if default_utc and default_utc is not WARN_CHANGE else datetime . now ( ) ) if isinstance ( now , _number_types ) : now = datetime . utcfromtimestamp ( now ) if default_utc else datetime . fromtimestamp ( now ) onow , now = now , now . replace ( tzinfo = None ) tz = onow . tzinfo future = now . replace ( microsecond = 0 ) + increments [ 0 ] ( ) if future < now : _test = lambda : future . year < self . matchers . year if now . microsecond : future = now . replace ( microsecond = 0 ) else : _test = lambda : self . matchers . year < future . year to_test = ENTRIES - 1 while to_test >= 0 : if not self . _test_match ( to_test , future ) : inc = increments [ to_test ] ( future , self . matchers ) future += inc for i in xrange ( 0 , to_test ) : future = increments [ ENTRIES + i ] ( future , inc ) try : if _test ( ) : return None except : print ( future , type ( future ) , type ( inc ) ) raise to_test = ENTRIES - 1 continue to_test -= 1 match = [ self . _test_match ( i , future ) for i in xrange ( ENTRIES ) ] _assert ( all ( match ) , "\nYou have discovered a bug with crontab, please notify the\n" "author with the following information:\n" "crontab: %r\n" "now: %r" , ' ' . join ( m . input for m in self . matchers ) , now ) if not delta : onow = now = datetime ( 1970 , 1 , 1 ) delay = future - now if tz : delay += _fix_none ( onow . utcoffset ( ) ) if hasattr ( tz , 'localize' ) : delay -= _fix_none ( tz . localize ( future ) . utcoffset ( ) ) else : delay -= _fix_none ( future . replace ( tzinfo = tz ) . utcoffset ( ) ) return delay . days * 86400 + delay . seconds + delay . microseconds / 1000000.
How long to wait in seconds before this crontab entry can next be executed .
57,918
def _tostring ( value ) : if value is True : value = 'true' elif value is False : value = 'false' elif value is None : value = '' return unicode ( value )
Convert value to XML compatible string
57,919
def _fromstring ( value ) : if value is None : return None if value . lower ( ) == 'true' : return True elif value . lower ( ) == 'false' : return False try : return int ( value ) except ValueError : pass try : if float ( '-inf' ) < float ( value ) < float ( 'inf' ) : return float ( value ) except ValueError : pass return value
Convert XML string value to None boolean int or float
57,920
def by_col ( cls , df , cols , w = None , inplace = False , pvalue = 'sim' , outvals = None , ** stat_kws ) : if outvals is None : outvals = [ ] outvals . extend ( [ 'bb' , 'p_sim_bw' , 'p_sim_bb' ] ) pvalue = '' return _univariate_handler ( df , cols , w = w , inplace = inplace , pvalue = pvalue , outvals = outvals , stat = cls , swapname = 'bw' , ** stat_kws )
Function to compute a Join_Count statistic on a dataframe
57,921
def Moran_BV_matrix ( variables , w , permutations = 0 , varnames = None ) : try : import pandas if isinstance ( variables , pandas . DataFrame ) : varnames = pandas . Index . tolist ( variables . columns ) variables_n = [ ] for var in varnames : variables_n . append ( variables [ str ( var ) ] . values ) else : variables_n = variables except ImportError : variables_n = variables results = _Moran_BV_Matrix_array ( variables = variables_n , w = w , permutations = permutations , varnames = varnames ) return results
Bivariate Moran Matrix
57,922
def _Moran_BV_Matrix_array ( variables , w , permutations = 0 , varnames = None ) : if varnames is None : varnames = [ 'x{}' . format ( i ) for i in range ( k ) ] k = len ( variables ) rk = list ( range ( 0 , k - 1 ) ) results = { } for i in rk : for j in range ( i + 1 , k ) : y1 = variables [ i ] y2 = variables [ j ] results [ i , j ] = Moran_BV ( y1 , y2 , w , permutations = permutations ) results [ j , i ] = Moran_BV ( y2 , y1 , w , permutations = permutations ) results [ i , j ] . varnames = { 'x' : varnames [ i ] , 'y' : varnames [ j ] } results [ j , i ] . varnames = { 'x' : varnames [ j ] , 'y' : varnames [ i ] } return results
Base calculation for MORAN_BV_Matrix
57,923
def by_col ( cls , df , x , y = None , w = None , inplace = False , pvalue = 'sim' , outvals = None , ** stat_kws ) : return _bivariate_handler ( df , x , y = y , w = w , inplace = inplace , pvalue = pvalue , outvals = outvals , swapname = cls . __name__ . lower ( ) , stat = cls , ** stat_kws )
Function to compute a Moran_BV statistic on a dataframe
57,924
def by_col ( cls , df , events , populations , w = None , inplace = False , pvalue = 'sim' , outvals = None , swapname = '' , ** stat_kws ) : if not inplace : new = df . copy ( ) cls . by_col ( new , events , populations , w = w , inplace = True , pvalue = pvalue , outvals = outvals , swapname = swapname , ** stat_kws ) return new if isinstance ( events , str ) : events = [ events ] if isinstance ( populations , str ) : populations = [ populations ] if len ( populations ) < len ( events ) : populations = populations * len ( events ) if len ( events ) != len ( populations ) : raise ValueError ( 'There is not a one-to-one matching between events and ' 'populations!\nEvents: {}\n\nPopulations:' ' {}' . format ( events , populations ) ) adjusted = stat_kws . pop ( 'adjusted' , True ) if isinstance ( adjusted , bool ) : adjusted = [ adjusted ] * len ( events ) if swapname is '' : swapname = cls . __name__ . lower ( ) rates = [ assuncao_rate ( df [ e ] , df [ pop ] ) if adj else df [ e ] . astype ( float ) / df [ pop ] for e , pop , adj in zip ( events , populations , adjusted ) ] names = [ '-' . join ( ( e , p ) ) for e , p in zip ( events , populations ) ] out_df = df . copy ( ) rate_df = out_df . from_items ( list ( zip ( names , rates ) ) ) stat_df = _univariate_handler ( rate_df , names , w = w , inplace = False , pvalue = pvalue , outvals = outvals , swapname = swapname , stat = Moran , ** stat_kws ) for col in stat_df . columns : df [ col ] = stat_df [ col ]
Function to compute a Moran_Rate statistic on a dataframe
57,925
def flatten ( l , unique = True ) : l = reduce ( lambda x , y : x + y , l ) if not unique : return list ( l ) return list ( set ( l ) )
flatten a list of lists
57,926
def weighted_median ( d , w ) : dtype = [ ( 'w' , '%s' % w . dtype ) , ( 'v' , '%s' % d . dtype ) ] d_w = np . array ( list ( zip ( w , d ) ) , dtype = dtype ) d_w . sort ( order = 'v' ) reordered_w = d_w [ 'w' ] . cumsum ( ) cumsum_threshold = reordered_w [ - 1 ] * 1.0 / 2 median_inx = ( reordered_w >= cumsum_threshold ) . nonzero ( ) [ 0 ] [ 0 ] if reordered_w [ median_inx ] == cumsum_threshold and len ( d ) - 1 > median_inx : return np . sort ( d ) [ median_inx : median_inx + 2 ] . mean ( ) return np . sort ( d ) [ median_inx ]
A utility function to find a median of d based on w
57,927
def sum_by_n ( d , w , n ) : t = len ( d ) h = t // n d = d * w return np . array ( [ sum ( d [ i : i + h ] ) for i in range ( 0 , t , h ) ] )
A utility function to summarize a data array into n values after weighting the array with another weight array w
57,928
def crude_age_standardization ( e , b , n ) : r = e * 1.0 / b b_by_n = sum_by_n ( b , 1.0 , n ) age_weight = b * 1.0 / b_by_n . repeat ( len ( e ) // n ) return sum_by_n ( r , age_weight , n )
A utility function to compute rate through crude age standardization
57,929
def direct_age_standardization ( e , b , s , n , alpha = 0.05 ) : age_weight = ( 1.0 / b ) * ( s * 1.0 / sum_by_n ( s , 1.0 , n ) . repeat ( len ( s ) // n ) ) adjusted_r = sum_by_n ( e , age_weight , n ) var_estimate = sum_by_n ( e , np . square ( age_weight ) , n ) g_a = np . square ( adjusted_r ) / var_estimate g_b = var_estimate / adjusted_r k = [ age_weight [ i : i + len ( b ) // n ] . max ( ) for i in range ( 0 , len ( b ) , len ( b ) // n ) ] g_a_k = np . square ( adjusted_r + k ) / ( var_estimate + np . square ( k ) ) g_b_k = ( var_estimate + np . square ( k ) ) / ( adjusted_r + k ) summed_b = sum_by_n ( b , 1.0 , n ) res = [ ] for i in range ( len ( adjusted_r ) ) : if adjusted_r [ i ] == 0 : upper = 0.5 * chi2 . ppf ( 1 - 0.5 * alpha ) lower = 0.0 else : lower = gamma . ppf ( 0.5 * alpha , g_a [ i ] , scale = g_b [ i ] ) upper = gamma . ppf ( 1 - 0.5 * alpha , g_a_k [ i ] , scale = g_b_k [ i ] ) res . append ( ( adjusted_r [ i ] , lower , upper ) ) return res
A utility function to compute rate through direct age standardization
57,930
def indirect_age_standardization ( e , b , s_e , s_b , n , alpha = 0.05 ) : smr = standardized_mortality_ratio ( e , b , s_e , s_b , n ) s_r_all = sum ( s_e * 1.0 ) / sum ( s_b * 1.0 ) adjusted_r = s_r_all * smr e_by_n = sum_by_n ( e , 1.0 , n ) log_smr = np . log ( smr ) log_smr_sd = 1.0 / np . sqrt ( e_by_n ) norm_thres = norm . ppf ( 1 - 0.5 * alpha ) log_smr_lower = log_smr - norm_thres * log_smr_sd log_smr_upper = log_smr + norm_thres * log_smr_sd smr_lower = np . exp ( log_smr_lower ) * s_r_all smr_upper = np . exp ( log_smr_upper ) * s_r_all res = list ( zip ( adjusted_r , smr_lower , smr_upper ) ) return res
A utility function to compute rate through indirect age standardization
57,931
def _univariate_handler ( df , cols , stat = None , w = None , inplace = True , pvalue = 'sim' , outvals = None , swapname = '' , ** kwargs ) : if not inplace : new_df = df . copy ( ) _univariate_handler ( new_df , cols , stat = stat , w = w , pvalue = pvalue , inplace = True , outvals = outvals , swapname = swapname , ** kwargs ) return new_df if w is None : for name in df . _metadata : this_obj = df . __dict__ . get ( name ) if isinstance ( this_obj , W ) : w = this_obj if w is None : raise Exception ( 'Weights not provided and no weights attached to frame!' ' Please provide a weight or attach a weight to the' ' dataframe' ) if outvals is None : outvals = [ ] outvals . insert ( 0 , '_statistic' ) if pvalue . lower ( ) in [ 'all' , 'both' , '*' ] : raise NotImplementedError ( "If you want more than one type of PValue,add" " the targeted pvalue type to outvals. For example:" " Geary(df, cols=['HOVAL'], w=w, outvals=['p_z_sim', " "'p_rand']" ) if pvalue is not '' : outvals . append ( 'p_' + pvalue . lower ( ) ) if isinstance ( cols , str ) : cols = [ cols ] def column_stat ( column ) : return stat ( column . values , w = w , ** kwargs ) stat_objs = df [ cols ] . apply ( column_stat ) for col in cols : stat_obj = stat_objs [ col ] y = kwargs . get ( 'y' ) if y is not None : col += '-' + y . name outcols = [ '_' . join ( ( col , val ) ) for val in outvals ] for colname , attname in zip ( outcols , outvals ) : df [ colname ] = stat_obj . __getattribute__ ( attname ) if swapname is not '' : df . columns = [ _swap_ending ( col , swapname ) if col . endswith ( '_statistic' ) else col for col in df . columns ]
Compute a univariate descriptive statistic stat over columns cols in df .
57,932
def _bivariate_handler ( df , x , y = None , w = None , inplace = True , pvalue = 'sim' , outvals = None , ** kwargs ) : real_swapname = kwargs . pop ( 'swapname' , '' ) if isinstance ( y , str ) : y = [ y ] if isinstance ( x , str ) : x = [ x ] if not inplace : new_df = df . copy ( ) _bivariate_handler ( new_df , x , y = y , w = w , inplace = True , swapname = real_swapname , pvalue = pvalue , outvals = outvals , ** kwargs ) return new_df if y is None : y = x for xi , yi in _it . product ( x , y ) : if xi == yi : continue _univariate_handler ( df , cols = xi , w = w , y = df [ yi ] , inplace = True , pvalue = pvalue , outvals = outvals , swapname = '' , ** kwargs ) if real_swapname is not '' : df . columns = [ _swap_ending ( col , real_swapname ) if col . endswith ( '_statistic' ) else col for col in df . columns ]
Compute a descriptive bivariate statistic over two sets of columns x and y contained in df .
57,933
def _swap_ending ( s , ending , delim = '_' ) : parts = [ x for x in s . split ( delim ) [ : - 1 ] if x != '' ] parts . append ( ending ) return delim . join ( parts )
Replace the ending of a string delimited into an arbitrary number of chunks by delim with the ending provided
57,934
def is_sequence ( i , include = None ) : return ( hasattr ( i , '__getitem__' ) and iterable ( i ) or bool ( include ) and isinstance ( i , include ) )
Return a boolean indicating whether i is a sequence in the SymPy sense . If anything that fails the test below should be included as being a sequence for your application set include to that object s type ; multiple types should be passed as a tuple of types .
57,935
def as_int ( n ) : try : result = int ( n ) if result != n : raise TypeError except TypeError : raise ValueError ( '%s is not an integer' % n ) return result
Convert the argument to a builtin integer .
57,936
def default_sort_key ( item , order = None ) : from sympy . core import S , Basic from sympy . core . sympify import sympify , SympifyError from sympy . core . compatibility import iterable if isinstance ( item , Basic ) : return item . sort_key ( order = order ) if iterable ( item , exclude = string_types ) : if isinstance ( item , dict ) : args = item . items ( ) unordered = True elif isinstance ( item , set ) : args = item unordered = True else : args = list ( item ) unordered = False args = [ default_sort_key ( arg , order = order ) for arg in args ] if unordered : args = sorted ( args ) cls_index , args = 10 , ( len ( args ) , tuple ( args ) ) else : if not isinstance ( item , string_types ) : try : item = sympify ( item ) except SympifyError : pass else : if isinstance ( item , Basic ) : return default_sort_key ( item ) cls_index , args = 0 , ( 1 , ( str ( item ) , ) ) return ( cls_index , 0 , item . __class__ . __name__ ) , args , S . One . sort_key ( ) , S . One
Return a key that can be used for sorting .
57,937
def var ( names , ** args ) : def traverse ( symbols , frame ) : for symbol in symbols : if isinstance ( symbol , Basic ) : frame . f_globals [ symbol . __str__ ( ) ] = symbol else : traverse ( symbol , frame ) from inspect import currentframe frame = currentframe ( ) . f_back try : syms = symbols ( names , ** args ) if syms is not None : if isinstance ( syms , Basic ) : frame . f_globals [ syms . __str__ ( ) ] = syms else : traverse ( syms , frame ) finally : del frame return syms
Create symbols and inject them into the global namespace .
57,938
def _combine_attribute_arguments ( self , attr_dict , attr ) : if attr_dict is None : attr_dict = attr else : try : attr_dict . update ( attr ) except AttributeError : raise AttributeError ( "attr_dict argument \ must be a dictionary." ) return attr_dict
Combines attr_dict and attr dictionaries by updating attr_dict with attr .
57,939
def remove_node ( self , node ) : if not self . has_node ( node ) : raise ValueError ( "No such node exists." ) for hyperedge_id in self . _star [ node ] : frozen_nodes = self . _hyperedge_attributes [ hyperedge_id ] [ "__frozen_nodes" ] del self . _node_set_to_hyperedge [ frozen_nodes ] del self . _hyperedge_attributes [ hyperedge_id ] del self . _star [ node ] del self . _node_attributes [ node ]
Removes a node and its attributes from the hypergraph . Removes every hyperedge that contains this node .
57,940
def add_hyperedge ( self , nodes , attr_dict = None , ** attr ) : attr_dict = self . _combine_attribute_arguments ( attr_dict , attr ) if not nodes : raise ValueError ( "nodes argument cannot be empty." ) frozen_nodes = frozenset ( nodes ) is_new_hyperedge = not self . has_hyperedge ( frozen_nodes ) if is_new_hyperedge : self . add_nodes ( frozen_nodes ) hyperedge_id = self . _assign_next_hyperedge_id ( ) for node in frozen_nodes : self . _star [ node ] . add ( hyperedge_id ) self . _node_set_to_hyperedge [ frozen_nodes ] = hyperedge_id self . _hyperedge_attributes [ hyperedge_id ] = { "nodes" : nodes , "__frozen_nodes" : frozen_nodes , "weight" : 1 } else : hyperedge_id = self . _node_set_to_hyperedge [ frozen_nodes ] self . _hyperedge_attributes [ hyperedge_id ] . update ( attr_dict ) return hyperedge_id
Adds a hyperedge to the hypergraph along with any related attributes of the hyperedge . This method will automatically add any node from the node set that was not in the hypergraph . A hyperedge without a weight attribute specified will be assigned the default value of 1 .
57,941
def add_hyperedges ( self , hyperedges , attr_dict = None , ** attr ) : attr_dict = self . _combine_attribute_arguments ( attr_dict , attr ) hyperedge_ids = [ ] for nodes in hyperedges : hyperedge_id = self . add_hyperedge ( nodes , attr_dict . copy ( ) ) hyperedge_ids . append ( hyperedge_id ) return hyperedge_ids
Adds multiple hyperedges to the graph along with any related attributes of the hyperedges . If any node of a hyperedge has not previously been added to the hypergraph it will automatically be added here . Hyperedges without a weight attribute specified will be assigned the default value of 1 .
57,942
def get_hyperedge_id ( self , nodes ) : frozen_nodes = frozenset ( nodes ) if not self . has_hyperedge ( frozen_nodes ) : raise ValueError ( "No such hyperedge exists." ) return self . _node_set_to_hyperedge [ frozen_nodes ]
From a set of nodes returns the ID of the hyperedge that this set comprises .
57,943
def get_hyperedge_attribute ( self , hyperedge_id , attribute_name ) : if not self . has_hyperedge_id ( hyperedge_id ) : raise ValueError ( "No such hyperedge exists." ) elif attribute_name not in self . _hyperedge_attributes [ hyperedge_id ] : raise ValueError ( "No such attribute exists." ) else : return copy . copy ( self . _hyperedge_attributes [ hyperedge_id ] [ attribute_name ] )
Given a hyperedge ID and the name of an attribute get a copy of that hyperedge s attribute .
57,944
def get_hyperedge_attributes ( self , hyperedge_id ) : if not self . has_hyperedge_id ( hyperedge_id ) : raise ValueError ( "No such hyperedge exists." ) dict_to_copy = self . _hyperedge_attributes [ hyperedge_id ] . items ( ) attributes = { } for attr_name , attr_value in dict_to_copy : if attr_name != "__frozen_nodes" : attributes [ attr_name ] = copy . copy ( attr_value ) return attributes
Given a hyperedge ID get a dictionary of copies of that hyperedge s attributes .
57,945
def get_star ( self , node ) : if node not in self . _node_attributes : raise ValueError ( "No such node exists." ) return self . _star [ node ] . copy ( )
Given a node get a copy of that node s star that is the set of hyperedges that the node belongs to .
57,946
def _F_outdegree ( H , F ) : if not isinstance ( H , DirectedHypergraph ) : raise TypeError ( "Algorithm only applicable to directed hypergraphs" ) return F ( [ len ( H . get_forward_star ( node ) ) for node in H . get_node_set ( ) ] )
Returns the result of a function F applied to the set of outdegrees in in the hypergraph .
57,947
def _F_indegree ( H , F ) : if not isinstance ( H , DirectedHypergraph ) : raise TypeError ( "Algorithm only applicable to directed hypergraphs" ) return F ( [ len ( H . get_backward_star ( node ) ) for node in H . get_node_set ( ) ] )
Returns the result of a function F applied to the list of indegrees in in the hypergraph .
57,948
def _F_hyperedge_tail_cardinality ( H , F ) : if not isinstance ( H , DirectedHypergraph ) : raise TypeError ( "Algorithm only applicable to directed hypergraphs" ) return F ( [ len ( H . get_hyperedge_tail ( hyperedge_id ) ) for hyperedge_id in H . get_hyperedge_id_set ( ) ] )
Returns the result of a function F applied to the set of cardinalities of hyperedge tails in the hypergraph .
57,949
def _F_hyperedge_head_cardinality ( H , F ) : if not isinstance ( H , DirectedHypergraph ) : raise TypeError ( "Algorithm only applicable to directed hypergraphs" ) return F ( [ len ( H . get_hyperedge_head ( hyperedge_id ) ) for hyperedge_id in H . get_hyperedge_id_set ( ) ] )
Returns the result of a function F applied to the set of cardinalities of hyperedge heads in the hypergraph .
57,950
def get_hyperedge_weight_matrix ( H , hyperedge_ids_to_indices ) : hyperedge_weights = { } for hyperedge_id in H . hyperedge_id_iterator ( ) : hyperedge_weights . update ( { hyperedge_ids_to_indices [ hyperedge_id ] : H . get_hyperedge_weight ( hyperedge_id ) } ) hyperedge_weight_vector = [ ] for i in range ( len ( hyperedge_weights . keys ( ) ) ) : hyperedge_weight_vector . append ( hyperedge_weights . get ( i ) ) return sparse . diags ( [ hyperedge_weight_vector ] , [ 0 ] )
Creates the diagonal matrix W of hyperedge weights as a sparse matrix .
57,951
def get_hyperedge_degree_matrix ( M ) : degrees = M . sum ( 0 ) . transpose ( ) new_degree = [ ] for degree in degrees : new_degree . append ( int ( degree [ 0 : ] ) ) return sparse . diags ( [ new_degree ] , [ 0 ] )
Creates the diagonal matrix of hyperedge degrees D_e as a sparse matrix where a hyperedge degree is the cardinality of the hyperedge .
57,952
def fast_inverse ( M ) : diags = M . diagonal ( ) new_diag = [ ] for value in diags : new_diag . append ( 1.0 / value ) return sparse . diags ( [ new_diag ] , [ 0 ] )
Computes the inverse of a diagonal matrix .
57,953
def node_iterator ( self ) : return iter ( self . _node_attributes ) def has_hypernode ( self , hypernode ) : return hypernode in self . _hypernode_attributes
Provides an iterator over the nodes .
57,954
def add_hypernode ( self , hypernode , composing_nodes = set ( ) , attr_dict = None , ** attr ) : attr_dict = self . _combine_attribute_arguments ( attr_dict , attr ) if not self . has_hypernode ( hypernode ) : attr_dict [ "__composing_nodes" ] = composing_nodes added_nodes = composing_nodes removed_nodes = set ( ) self . _hypernode_attributes [ hypernode ] = attr_dict else : self . _hypernode_attributes [ hypernode ] . update ( attr_dict ) added_nodes = composing_nodes - self . _hypernode_attributes [ hypernode ] [ "__composing_nodes" ] removed_nodes = self . _hypernode_attributes [ hypernode ] [ "__composing_nodes" ] - composing_nodes for node in added_nodes : _add_hypernode_membership ( node , hypernode ) for node in remove_nodes : _remove_hypernode_membership ( node , hypernode )
Adds a hypernode to the graph along with any related attributes of the hypernode .
57,955
def _create_random_starter ( node_count ) : pi = np . zeros ( node_count , dtype = float ) for i in range ( node_count ) : pi [ i ] = random . random ( ) summation = np . sum ( pi ) for i in range ( node_count ) : pi [ i ] = pi [ i ] / summation return pi
Creates the random starter for the random walk .
57,956
def _has_converged ( pi_star , pi ) : node_count = pi . shape [ 0 ] EPS = 10e-6 for i in range ( node_count ) : if pi [ i ] - pi_star [ i ] > EPS : return False return True
Checks if the random walk has converged .
57,957
def add_element ( self , priority , element , count = None ) : if count is None : count = next ( self . counter ) entry = [ priority , count , element ] self . element_finder [ element ] = entry heapq . heappush ( self . pq , entry )
Adds an element with a specific priority .
57,958
def reprioritize ( self , priority , element ) : if element not in self . element_finder : raise ValueError ( "No such element in the priority queue." ) entry = self . element_finder [ element ] self . add_element ( priority , element , entry [ 1 ] ) entry [ 1 ] = self . INVALID
Updates the priority of an element .
57,959
def contains_element ( self , element ) : return ( element in self . element_finder ) and ( self . element_finder [ element ] [ 1 ] != self . INVALID )
Determines if an element is contained in the priority queue .
57,960
def is_empty ( self ) : while self . pq : if self . pq [ 0 ] [ 1 ] != self . INVALID : return False else : _ , _ , element = heapq . heappop ( self . pq ) if element in self . element_finder : del self . element_finder [ element ] return True
Determines if the priority queue has any elements . Performs removal of any elements that were marked - as - invalid .
57,961
def is_connected ( H , source_node , target_node ) : visited_nodes , Pv , Pe = visit ( H , source_node ) return target_node in visited_nodes
Checks if a target node is connected to a source node . That is this method determines if a target node can be visited from the source node in the sense of the Visit algorithm .
57,962
def is_b_connected ( H , source_node , target_node ) : b_visited_nodes , Pv , Pe , v = b_visit ( H , source_node ) return target_node in b_visited_nodes
Checks if a target node is B - connected to a source node .
57,963
def is_f_connected ( H , source_node , target_node ) : f_visited_nodes , Pv , Pe , v = f_visit ( H , source_node ) return target_node in f_visited_nodes
Checks if a target node is F - connected to a source node .
57,964
def from_networkx_graph ( nx_graph ) : import networkx as nx if not isinstance ( nx_graph , nx . Graph ) : raise TypeError ( "Transformation only applicable to undirected \ NetworkX graphs" ) G = UndirectedHypergraph ( ) for node in nx_graph . nodes_iter ( ) : G . add_node ( node , copy . copy ( nx_graph . node [ node ] ) ) for edge in nx_graph . edges_iter ( ) : G . add_hyperedge ( [ edge [ 0 ] , edge [ 1 ] ] , copy . copy ( nx_graph [ edge [ 0 ] ] [ edge [ 1 ] ] ) ) return G
Returns an UndirectedHypergraph object that is the graph equivalent of the given NetworkX Graph object .
57,965
def from_networkx_digraph ( nx_digraph ) : import networkx as nx if not isinstance ( nx_digraph , nx . DiGraph ) : raise TypeError ( "Transformation only applicable to directed \ NetworkX graphs" ) G = DirectedHypergraph ( ) for node in nx_digraph . nodes_iter ( ) : G . add_node ( node , copy . copy ( nx_digraph . node [ node ] ) ) for edge in nx_digraph . edges_iter ( ) : tail_node = edge [ 0 ] head_node = edge [ 1 ] G . add_hyperedge ( tail_node , head_node , copy . copy ( nx_digraph [ tail_node ] [ head_node ] ) ) return G
Returns a DirectedHypergraph object that is the graph equivalent of the given NetworkX DiGraph object .
57,966
def get_tail_incidence_matrix ( H , nodes_to_indices , hyperedge_ids_to_indices ) : if not isinstance ( H , DirectedHypergraph ) : raise TypeError ( "Algorithm only applicable to directed hypergraphs" ) rows , cols = [ ] , [ ] for hyperedge_id , hyperedge_index in hyperedge_ids_to_indices . items ( ) : for node in H . get_hyperedge_tail ( hyperedge_id ) : rows . append ( nodes_to_indices . get ( node ) ) cols . append ( hyperedge_index ) values = np . ones ( len ( rows ) , dtype = int ) node_count = len ( H . get_node_set ( ) ) hyperedge_count = len ( H . get_hyperedge_id_set ( ) ) return sparse . csc_matrix ( ( values , ( rows , cols ) ) , shape = ( node_count , hyperedge_count ) )
Creates the incidence matrix of the tail nodes of the given hypergraph as a sparse matrix .
57,967
def add_node ( self , node , attr_dict = None , ** attr ) : attr_dict = self . _combine_attribute_arguments ( attr_dict , attr ) if not self . has_node ( node ) : self . _node_attributes [ node ] = attr_dict self . _forward_star [ node ] = set ( ) self . _backward_star [ node ] = set ( ) else : self . _node_attributes [ node ] . update ( attr_dict )
Adds a node to the graph along with any related attributes of the node .
57,968
def add_nodes ( self , nodes , attr_dict = None , ** attr ) : attr_dict = self . _combine_attribute_arguments ( attr_dict , attr ) for node in nodes : if type ( node ) is tuple : new_node , node_attr_dict = node new_dict = attr_dict . copy ( ) new_dict . update ( node_attr_dict ) self . add_node ( new_node , new_dict ) else : self . add_node ( node , attr_dict . copy ( ) )
Adds multiple nodes to the graph along with any related attributes of the nodes .
57,969
def remove_node ( self , node ) : if not self . has_node ( node ) : raise ValueError ( "No such node exists." ) forward_star = self . get_forward_star ( node ) for hyperedge_id in forward_star : self . remove_hyperedge ( hyperedge_id ) backward_star = self . get_backward_star ( node ) for hyperedge_id in backward_star - forward_star : self . remove_hyperedge ( hyperedge_id ) del self . _forward_star [ node ] del self . _backward_star [ node ] del self . _node_attributes [ node ]
Removes a node and its attributes from the hypergraph . Removes every hyperedge that contains this node in either the head or the tail .
57,970
def get_node_attribute ( self , node , attribute_name ) : if not self . has_node ( node ) : raise ValueError ( "No such node exists." ) elif attribute_name not in self . _node_attributes [ node ] : raise ValueError ( "No such attribute exists." ) else : return copy . copy ( self . _node_attributes [ node ] [ attribute_name ] )
Given a node and the name of an attribute get a copy of that node s attribute .
57,971
def get_node_attributes ( self , node ) : if not self . has_node ( node ) : raise ValueError ( "No such node exists." ) attributes = { } for attr_name , attr_value in self . _node_attributes [ node ] . items ( ) : attributes [ attr_name ] = copy . copy ( attr_value ) return attributes
Given a node get a dictionary with copies of that node s attributes .
57,972
def add_hyperedge ( self , tail , head , attr_dict = None , ** attr ) : attr_dict = self . _combine_attribute_arguments ( attr_dict , attr ) if not tail and not head : raise ValueError ( "tail and head arguments \ cannot both be empty." ) frozen_tail = frozenset ( tail ) frozen_head = frozenset ( head ) if frozen_tail not in self . _successors : self . _successors [ frozen_tail ] = { } if frozen_head not in self . _predecessors : self . _predecessors [ frozen_head ] = { } is_new_hyperedge = not self . has_hyperedge ( frozen_tail , frozen_head ) if is_new_hyperedge : self . add_nodes ( frozen_head ) self . add_nodes ( frozen_tail ) hyperedge_id = self . _assign_next_hyperedge_id ( ) for node in frozen_tail : self . _forward_star [ node ] . add ( hyperedge_id ) for node in frozen_head : self . _backward_star [ node ] . add ( hyperedge_id ) self . _successors [ frozen_tail ] [ frozen_head ] = hyperedge_id self . _predecessors [ frozen_head ] [ frozen_tail ] = hyperedge_id self . _hyperedge_attributes [ hyperedge_id ] = { "tail" : tail , "__frozen_tail" : frozen_tail , "head" : head , "__frozen_head" : frozen_head , "weight" : 1 } else : hyperedge_id = self . _successors [ frozen_tail ] [ frozen_head ] self . _hyperedge_attributes [ hyperedge_id ] . update ( attr_dict ) return hyperedge_id
Adds a hyperedge to the hypergraph along with any related attributes of the hyperedge . This method will automatically add any node from the tail and head that was not in the hypergraph . A hyperedge without a weight attribute specified will be assigned the default value of 1 .
57,973
def add_hyperedges ( self , hyperedges , attr_dict = None , ** attr ) : attr_dict = self . _combine_attribute_arguments ( attr_dict , attr ) hyperedge_ids = [ ] for hyperedge in hyperedges : if len ( hyperedge ) == 3 : tail , head , hyperedge_attr_dict = hyperedge new_dict = attr_dict . copy ( ) new_dict . update ( hyperedge_attr_dict ) hyperedge_id = self . add_hyperedge ( tail , head , new_dict ) else : tail , head = hyperedge hyperedge_id = self . add_hyperedge ( tail , head , attr_dict . copy ( ) ) hyperedge_ids . append ( hyperedge_id ) return hyperedge_ids
Adds multiple hyperedges to the graph along with any related attributes of the hyperedges . If any node in the tail or head of any hyperedge has not previously been added to the hypergraph it will automatically be added here . Hyperedges without a weight attribute specified will be assigned the default value of 1 .
57,974
def get_hyperedge_id ( self , tail , head ) : frozen_tail = frozenset ( tail ) frozen_head = frozenset ( head ) if not self . has_hyperedge ( frozen_tail , frozen_head ) : raise ValueError ( "No such hyperedge exists." ) return self . _successors [ frozen_tail ] [ frozen_head ]
From a tail and head set of nodes returns the ID of the hyperedge that these sets comprise .
57,975
def get_forward_star ( self , node ) : if node not in self . _node_attributes : raise ValueError ( "No such node exists." ) return self . _forward_star [ node ] . copy ( )
Given a node get a copy of that node s forward star .
57,976
def get_backward_star ( self , node ) : if node not in self . _node_attributes : raise ValueError ( "No such node exists." ) return self . _backward_star [ node ] . copy ( )
Given a node get a copy of that node s backward star .
57,977
def get_successors ( self , tail ) : frozen_tail = frozenset ( tail ) if frozen_tail not in self . _successors : return set ( ) return set ( self . _successors [ frozen_tail ] . values ( ) )
Given a tail set of nodes get a list of edges of which the node set is the tail of each edge .
57,978
def get_predecessors ( self , head ) : frozen_head = frozenset ( head ) if frozen_head not in self . _predecessors : return set ( ) return set ( self . _predecessors [ frozen_head ] . values ( ) )
Given a head set of nodes get a list of edges of which the node set is the head of each edge .
57,979
def is_BF_hypergraph ( self ) : for hyperedge_id in self . _hyperedge_attributes : tail = self . get_hyperedge_tail ( hyperedge_id ) head = self . get_hyperedge_head ( hyperedge_id ) if len ( tail ) > 1 and len ( head ) > 1 : return False return True
Indicates whether the hypergraph is a BF - hypergraph . A BF - hypergraph consists of only B - hyperedges and F - hyperedges . See is_B_hypergraph or is_F_hypergraph for more details .
57,980
def get_induced_subhypergraph ( self , nodes ) : sub_H = self . copy ( ) sub_H . remove_nodes ( sub_H . get_node_set ( ) - set ( nodes ) ) return sub_H
Gives a new hypergraph that is the subhypergraph of the current hypergraph induced by the provided set of nodes . That is the induced subhypergraph s node set corresponds precisely to the nodes provided and the coressponding hyperedges in the subhypergraph are only those from the original graph consist of tail and head sets that are subsets of the provided nodes .
57,981
def getall ( self , key , default = _marker ) : identity = self . _title ( key ) res = [ v for i , k , v in self . _impl . _items if i == identity ] if res : return res if not res and default is not _marker : return default raise KeyError ( 'Key not found: %r' % key )
Return a list of all values matching the key .
57,982
def extend ( self , * args , ** kwargs ) : self . _extend ( args , kwargs , 'extend' , self . _extend_items )
Extend current MultiDict with more values .
57,983
def setdefault ( self , key , default = None ) : identity = self . _title ( key ) for i , k , v in self . _impl . _items : if i == identity : return v self . add ( key , default ) return default
Return value for key set value to default if key is not present .
57,984
def popall ( self , key , default = _marker ) : found = False identity = self . _title ( key ) ret = [ ] for i in range ( len ( self . _impl . _items ) - 1 , - 1 , - 1 ) : item = self . _impl . _items [ i ] if item [ 0 ] == identity : ret . append ( item [ 2 ] ) del self . _impl . _items [ i ] self . _impl . incr_version ( ) found = True if not found : if default is _marker : raise KeyError ( key ) else : return default else : ret . reverse ( ) return ret
Remove all occurrences of key and return the list of corresponding values .
57,985
def total ( self , xbin1 = 1 , xbin2 = - 2 ) : return self . hist . integral ( xbin1 = xbin1 , xbin2 = xbin2 , error = True )
Return the total yield and its associated statistical uncertainty .
57,986
def iter_sys ( self ) : names = self . sys_names ( ) for name in names : osys = self . GetOverallSys ( name ) hsys = self . GetHistoSys ( name ) yield name , osys , hsys
Iterate over sys_name overall_sys histo_sys . overall_sys or histo_sys may be None for any given sys_name .
57,987
def sys_hist ( self , name = None ) : if name is None : low = self . hist . Clone ( shallow = True ) high = self . hist . Clone ( shallow = True ) return low , high osys = self . GetOverallSys ( name ) hsys = self . GetHistoSys ( name ) if osys is None : osys_high , osys_low = 1. , 1. else : osys_high , osys_low = osys . high , osys . low if hsys is None : hsys_high = self . hist . Clone ( shallow = True ) hsys_low = self . hist . Clone ( shallow = True ) else : hsys_high = hsys . high . Clone ( shallow = True ) hsys_low = hsys . low . Clone ( shallow = True ) return hsys_low * osys_low , hsys_high * osys_high
Return the effective low and high histogram for a given systematic . If this sample does not contain the named systematic then return the nominal histogram for both low and high variations .
57,988
def sys_hist ( self , name = None , where = None ) : total_low , total_high = None , None for sample in self . samples : if where is not None and not where ( sample ) : continue low , high = sample . sys_hist ( name ) if total_low is None : total_low = low . Clone ( shallow = True ) else : total_low += low if total_high is None : total_high = high . Clone ( shallow = True ) else : total_high += high return total_low , total_high
Return the effective total low and high histogram for a given systematic over samples in this channel . If a sample does not contain the named systematic then its nominal histogram is used for both low and high variations .
57,989
def apply_snapshot ( self , argset ) : clone = self . Clone ( ) args = [ var for var in argset if not ( var . name . startswith ( 'binWidth_obs_x_' ) or var . name . startswith ( 'gamma_stat' ) or var . name . startswith ( 'nom_' ) ) ] nargs = [ ] for var in args : is_norm = False name = var . name . replace ( 'alpha_' , '' ) for sample in clone . samples : if sample . GetNormFactor ( name ) is not None : log . info ( "applying snapshot of {0} on sample {1}" . format ( name , sample . name ) ) is_norm = True sample *= var . value osys = OverallSys ( name , low = 1. - var . error / var . value , high = 1. + var . error / var . value ) sample . AddOverallSys ( osys ) sample . RemoveNormFactor ( name ) if not is_norm : nargs . append ( var ) for sample in clone . samples : if sample . hist is None : raise RuntimeError ( "sample {0} does not have a " "nominal histogram" . format ( sample . name ) ) nominal = sample . hist . Clone ( shallow = True ) for var in nargs : name = var . name . replace ( 'alpha_' , '' ) if not sample . has_sys ( name ) : continue log . info ( "applying snapshot of {0} on sample {1}" . format ( name , sample . name ) ) low , high = sample . sys_hist ( name ) val = var . value if val > 0 : sample . hist += ( high - nominal ) * val elif val < 0 : sample . hist += ( nominal - low ) * val return clone
Create a clone of this Channel where histograms are modified according to the values of the nuisance parameters in the snapshot . This is useful when creating post - fit distribution plots .
57,990
def printcodelist ( codelist , to = sys . stdout ) : labeldict = { } pendinglabels = [ ] for i , ( op , arg ) in enumerate ( codelist ) : if isinstance ( op , Label ) : pendinglabels . append ( op ) elif op is SetLineno : pass else : while pendinglabels : labeldict [ pendinglabels . pop ( ) ] = i lineno = None islabel = False for i , ( op , arg ) in enumerate ( codelist ) : if op is SetLineno : lineno = arg print >> to continue if isinstance ( op , Label ) : islabel = True continue if lineno is None : linenostr = '' else : linenostr = str ( lineno ) lineno = None if islabel : islabelstr = '>>' islabel = False else : islabelstr = '' if op in hasconst : argstr = repr ( arg ) elif op in hasjump : try : argstr = 'to ' + str ( labeldict [ arg ] ) except KeyError : argstr = repr ( arg ) elif op in hasarg : argstr = str ( arg ) else : argstr = '' print >> to , '%3s %2s %4d %-20s %s' % ( linenostr , islabelstr , i , op , argstr )
Get a code list . Print it nicely .
57,991
def recompile ( filename ) : import os import imp import marshal import struct f = open ( filename , 'U' ) try : timestamp = long ( os . fstat ( f . fileno ( ) ) . st_mtime ) except AttributeError : timestamp = long ( os . stat ( filename ) . st_mtime ) codestring = f . read ( ) f . close ( ) if codestring and codestring [ - 1 ] != '\n' : codestring = codestring + '\n' try : codeobject = compile ( codestring , filename , 'exec' ) except SyntaxError : print >> sys . stderr , "Skipping %s - syntax error." % filename return cod = Code . from_code ( codeobject ) message = "reassembled %r imported.\n" % filename cod . code [ : 0 ] = [ ( LOAD_GLOBAL , '__import__' ) , ( LOAD_CONST , 'sys' ) , ( CALL_FUNCTION , 1 ) , ( LOAD_ATTR , 'stderr' ) , ( LOAD_ATTR , 'write' ) , ( LOAD_CONST , message ) , ( CALL_FUNCTION , 1 ) , ( POP_TOP , None ) , ] codeobject2 = cod . to_code ( ) fc = open ( filename + 'c' , 'wb' ) fc . write ( '\0\0\0\0' ) fc . write ( struct . pack ( '<l' , timestamp ) ) marshal . dump ( codeobject2 , fc ) fc . flush ( ) fc . seek ( 0 , 0 ) fc . write ( imp . get_magic ( ) ) fc . close ( )
Create a . pyc by disassembling the file and assembling it again printing a message that the reassembled file was loaded .
57,992
def recompile_all ( path ) : import os if os . path . isdir ( path ) : for root , dirs , files in os . walk ( path ) : for name in files : if name . endswith ( '.py' ) : filename = os . path . abspath ( os . path . join ( root , name ) ) print >> sys . stderr , filename recompile ( filename ) else : filename = os . path . abspath ( path ) recompile ( filename )
recursively recompile all . py files in the directory
57,993
def from_code ( cls , co ) : co_code = co . co_code labels = dict ( ( addr , Label ( ) ) for addr in findlabels ( co_code ) ) linestarts = dict ( cls . _findlinestarts ( co ) ) cellfree = co . co_cellvars + co . co_freevars code = CodeList ( ) n = len ( co_code ) i = 0 extended_arg = 0 while i < n : op = Opcode ( ord ( co_code [ i ] ) ) if i in labels : code . append ( ( labels [ i ] , None ) ) if i in linestarts : code . append ( ( SetLineno , linestarts [ i ] ) ) i += 1 if op in hascode : lastop , lastarg = code [ - 1 ] if lastop != LOAD_CONST : raise ValueError ( "%s should be preceded by LOAD_CONST code" % op ) code [ - 1 ] = ( LOAD_CONST , Code . from_code ( lastarg ) ) if op not in hasarg : code . append ( ( op , None ) ) else : arg = ord ( co_code [ i ] ) + ord ( co_code [ i + 1 ] ) * 256 + extended_arg extended_arg = 0 i += 2 if op == opcode . EXTENDED_ARG : extended_arg = arg << 16 elif op in hasconst : code . append ( ( op , co . co_consts [ arg ] ) ) elif op in hasname : code . append ( ( op , co . co_names [ arg ] ) ) elif op in hasjabs : code . append ( ( op , labels [ arg ] ) ) elif op in hasjrel : code . append ( ( op , labels [ i + arg ] ) ) elif op in haslocal : code . append ( ( op , co . co_varnames [ arg ] ) ) elif op in hascompare : code . append ( ( op , cmp_op [ arg ] ) ) elif op in hasfree : code . append ( ( op , cellfree [ arg ] ) ) else : code . append ( ( op , arg ) ) varargs = bool ( co . co_flags & CO_VARARGS ) varkwargs = bool ( co . co_flags & CO_VARKEYWORDS ) newlocals = bool ( co . co_flags & CO_NEWLOCALS ) args = co . co_varnames [ : co . co_argcount + varargs + varkwargs ] if co . co_consts and isinstance ( co . co_consts [ 0 ] , basestring ) : docstring = co . co_consts [ 0 ] else : docstring = None return cls ( code = code , freevars = co . co_freevars , args = args , varargs = varargs , varkwargs = varkwargs , newlocals = newlocals , name = co . co_name , filename = co . co_filename , firstlineno = co . co_firstlineno , docstring = docstring , )
Disassemble a Python code object into a Code object .
57,994
def effective_sample_size ( h ) : sum = 0 ew = 0 w = 0 for bin in h . bins ( overflow = False ) : sum += bin . value ew = bin . error w += ew * ew esum = sum * sum / w return esum
Calculate the effective sample size for a histogram the same way as ROOT does .
57,995
def critical_value ( n , p ) : dn = 1 delta = 0.5 res = ROOT . TMath . KolmogorovProb ( dn * sqrt ( n ) ) while res > 1.0001 * p or res < 0.9999 * p : if ( res > 1.0001 * p ) : dn = dn + delta if ( res < 0.9999 * p ) : dn = dn - delta delta = delta / 2. res = ROOT . TMath . KolmogorovProb ( dn * sqrt ( n ) ) return dn
This function calculates the critical value given n and p and confidence level = 1 - p .
57,996
def dump ( obj , root_file , proto = 0 , key = None ) : if isinstance ( root_file , string_types ) : root_file = root_open ( root_file , 'recreate' ) own_file = True else : own_file = False ret = Pickler ( root_file , proto ) . dump ( obj , key ) if own_file : root_file . Close ( ) return ret
Dump an object into a ROOT TFile .
57,997
def load ( root_file , use_proxy = True , key = None ) : if isinstance ( root_file , string_types ) : root_file = root_open ( root_file ) own_file = True else : own_file = False obj = Unpickler ( root_file , use_proxy ) . load ( key ) if own_file : root_file . Close ( ) return obj
Load an object from a ROOT TFile .
57,998
def dump ( self , obj , key = None ) : if key is None : key = '_pickle' with preserve_current_directory ( ) : self . __file . cd ( ) if sys . version_info [ 0 ] < 3 : pickle . Pickler . dump ( self , obj ) else : super ( Pickler , self ) . dump ( obj ) s = ROOT . TObjString ( self . __io . getvalue ( ) ) self . __io . reopen ( ) s . Write ( key ) self . __file . GetFile ( ) . Flush ( ) self . __pmap . clear ( )
Write a pickled representation of obj to the open TFile .
57,999
def load ( self , key = None ) : if key is None : key = '_pickle' obj = None if _compat_hooks : save = _compat_hooks [ 0 ] ( ) try : self . __n += 1 s = self . __file . Get ( key + ';{0:d}' . format ( self . __n ) ) self . __io . setvalue ( s . GetName ( ) ) if sys . version_info [ 0 ] < 3 : obj = pickle . Unpickler . load ( self ) else : obj = super ( Unpickler , self ) . load ( ) self . __io . reopen ( ) finally : if _compat_hooks : save = _compat_hooks [ 1 ] ( save ) return obj
Read a pickled object representation from the open file .