idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
58,100 | def prefix ( cls , name ) : attrs = dict ( [ ( name + attr , value ) for attr , value in cls . get_attrs ( ) ] ) return TreeModelMeta ( '_' . join ( [ name , cls . __name__ ] ) , ( TreeModel , ) , attrs ) | Create a new TreeModel where class attribute names are prefixed with name |
58,101 | def get_attrs ( cls ) : ignore = dir ( type ( 'dummy' , ( object , ) , { } ) ) + [ '__metaclass__' ] attrs = [ item for item in inspect . getmembers ( cls ) if item [ 0 ] not in ignore and not isinstance ( item [ 1 ] , ( types . FunctionType , types . MethodType , classmethod , staticmethod , property ) ) ] attrs . sort ( key = lambda attr : ( getattr ( attr [ 1 ] , 'idx' , - 1 ) , attr [ 0 ] ) ) return attrs | Get all class attributes ordered by definition |
58,102 | def to_struct ( cls , name = None ) : if name is None : name = cls . __name__ basic_attrs = dict ( [ ( attr_name , value ) for attr_name , value in cls . get_attrs ( ) if isinstance ( value , Column ) ] ) if not basic_attrs : return None src = 'struct {0} {{' . format ( name ) for attr_name , value in basic_attrs . items ( ) : src += '{0} {1};' . format ( value . type . typename , attr_name ) src += '};' if ROOT . gROOT . ProcessLine ( src ) != 0 : return None return getattr ( ROOT , name , None ) | Convert the TreeModel into a compiled C struct |
58,103 | def id_to_name ( id ) : name = pdgid_names . get ( id ) if not name : name = repr ( id ) return name | Convert a PDG ID to a printable string . |
58,104 | def id_to_root_name ( id ) : name = root_names . get ( id ) if not name : name = repr ( id ) return name | Convert a PDG ID to a string with root markup . |
58,105 | def new_closure ( vals ) : args = ',' . join ( 'x%i' % i for i in range ( len ( vals ) ) ) f = eval ( "lambda %s:lambda:(%s)" % ( args , args ) ) if sys . version_info [ 0 ] >= 3 : return f ( * vals ) . __closure__ return f ( * vals ) . func_closure | Build a new closure |
58,106 | def _inject_closure_values_fix_closures ( c , injected , ** kwargs ) : code = c . code orig_len = len ( code ) for iback , ( opcode , value ) in enumerate ( reversed ( code ) ) : i = orig_len - iback - 1 if opcode != MAKE_CLOSURE : continue codeobj = code [ i - 1 - OPCODE_OFFSET ] assert codeobj [ 0 ] == byteplay . LOAD_CONST build_tuple = code [ i - 2 - OPCODE_OFFSET ] assert build_tuple [ 0 ] == byteplay . BUILD_TUPLE n_closed = build_tuple [ 1 ] load_closures = code [ i - 2 - OPCODE_OFFSET - n_closed : i - 2 - OPCODE_OFFSET ] assert all ( o == byteplay . LOAD_CLOSURE for o , _ in load_closures ) newlcs = [ ( byteplay . LOAD_CLOSURE , inj ) for inj in injected ] code [ i - 2 - OPCODE_OFFSET ] = byteplay . BUILD_TUPLE , n_closed + len ( injected ) code [ i - 2 - OPCODE_OFFSET : i - 2 - OPCODE_OFFSET ] = newlcs _inject_closure_values_fix_code ( codeobj [ 1 ] , injected , ** kwargs ) | Recursively fix closures |
58,107 | def _inject_closure_values_fix_code ( c , injected , ** kwargs ) : c . freevars += injected for i , ( opcode , value ) in enumerate ( c . code ) : if opcode == byteplay . LOAD_GLOBAL and value in kwargs : c . code [ i ] = byteplay . LOAD_DEREF , value _inject_closure_values_fix_closures ( c , injected , ** kwargs ) return c | Fix code objects recursively fixing any closures |
58,108 | def inject_closure_values ( func , ** kwargs ) : wrapped_by = None if isinstance ( func , property ) : fget , fset , fdel = func . fget , func . fset , func . fdel if fget : fget = fix_func ( fget , ** kwargs ) if fset : fset = fix_func ( fset , ** kwargs ) if fdel : fdel = fix_func ( fdel , ** kwargs ) wrapped_by = type ( func ) return wrapped_by ( fget , fset , fdel ) elif isinstance ( func , ( staticmethod , classmethod ) ) : func = func . __func__ wrapped_by = type ( func ) newfunc = _inject_closure_values ( func , ** kwargs ) if wrapped_by : newfunc = wrapped_by ( newfunc ) return newfunc | Returns a new function identical to the previous one except that it acts as though global variables named in kwargs have been closed over with the values specified in the kwargs dictionary . |
58,109 | def axes ( self , ndim = 1 , xlimits = None , ylimits = None , zlimits = None , xbins = 1 , ybins = 1 , zbins = 1 ) : if xlimits is None : xlimits = ( 0 , 1 ) if ylimits is None : ylimits = ( 0 , 1 ) if zlimits is None : zlimits = ( 0 , 1 ) if ndim == 1 : from . hist import Hist hist = Hist ( 1 , xlimits [ 0 ] , xlimits [ 1 ] ) elif ndim == 2 : from . hist import Hist2D hist = Hist2D ( 1 , xlimits [ 0 ] , xlimits [ 1 ] , 1 , ylimits [ 0 ] , ylimits [ 1 ] ) elif ndim == 3 : from . hist import Hist3D hist = Hist3D ( 1 , xlimits [ 0 ] , xlimits [ 1 ] , 1 , ylimits [ 0 ] , ylimits [ 1 ] , 1 , zlimits [ 0 ] , zlimits [ 1 ] ) else : raise ValueError ( "ndim must be 1, 2, or 3" ) with self : hist . Draw ( 'AXIS' ) xaxis = hist . xaxis yaxis = hist . yaxis if isinstance ( xbins , ( list , tuple ) ) : xbins = array ( 'd' , xbins ) if hasattr ( xbins , '__iter__' ) : xaxis . Set ( len ( xbins ) - 1 , xbins ) else : xaxis . Set ( xbins , * xlimits ) if ndim > 1 : if isinstance ( ybins , ( list , tuple ) ) : ybins = array ( 'd' , ybins ) if hasattr ( ybins , '__iter__' ) : yaxis . Set ( len ( ybins ) - 1 , ybins ) else : yaxis . Set ( ybins , * ylimits ) else : yaxis . limits = ylimits yaxis . range_user = ylimits if ndim > 1 : zaxis = hist . zaxis if ndim == 3 : if isinstance ( zbins , ( list , tuple ) ) : zbins = array ( 'd' , zbins ) if hasattr ( zbins , '__iter__' ) : zaxis . Set ( len ( zbins ) - 1 , zbins ) else : zaxis . Set ( zbins , * zlimits ) else : zaxis . limits = zlimits zaxis . range_user = zlimits return xaxis , yaxis , zaxis return xaxis , yaxis | Create and return axes on this pad |
58,110 | def root2hdf5 ( rfile , hfile , rpath = '' , entries = - 1 , userfunc = None , show_progress = False , ignore_exception = False , ** kwargs ) : own_rootfile = False if isinstance ( rfile , string_types ) : rfile = root_open ( rfile ) own_rootfile = True own_h5file = False if isinstance ( hfile , string_types ) : hfile = tables_open ( filename = hfile , mode = "w" , title = "Data" ) own_h5file = True for dirpath , dirnames , treenames in rfile . walk ( rpath , class_ref = QROOT . TTree ) : if not treenames : continue treenames . sort ( ) group_where = '/' + os . path . dirname ( dirpath ) group_name = os . path . basename ( dirpath ) if not group_name : group = hfile . root elif TABLES_NEW_API : group = hfile . create_group ( group_where , group_name , createparents = True ) else : group = hfile . createGroup ( group_where , group_name ) ntrees = len ( treenames ) log . info ( "Will convert {0:d} tree{1} in {2}" . format ( ntrees , 's' if ntrees != 1 else '' , os . path . join ( group_where , group_name ) ) ) for treename in treenames : input_tree = rfile . Get ( os . path . join ( dirpath , treename ) ) if userfunc is not None : tmp_file = TemporaryFile ( ) log . info ( "Calling user function on tree '{0}'" . format ( input_tree . GetName ( ) ) ) trees = userfunc ( input_tree ) if not isinstance ( trees , list ) : trees = [ trees ] else : trees = [ input_tree ] tmp_file = None for tree in trees : try : tree2hdf5 ( tree , hfile , group = group , entries = entries , show_progress = show_progress , ** kwargs ) except Exception as e : if ignore_exception : log . error ( "Failed to convert tree '{0}': {1}" . format ( tree . GetName ( ) , str ( e ) ) ) else : raise input_tree . Delete ( ) if userfunc is not None : for tree in trees : tree . Delete ( ) tmp_file . Close ( ) if own_h5file : hfile . close ( ) if own_rootfile : rfile . Close ( ) | Convert all trees in a ROOT file into tables in an HDF5 file . |
58,111 | def Reverse ( self , copy = False ) : numPoints = self . GetN ( ) if copy : revGraph = self . Clone ( ) else : revGraph = self X = self . GetX ( ) EXlow = self . GetEXlow ( ) EXhigh = self . GetEXhigh ( ) Y = self . GetY ( ) EYlow = self . GetEYlow ( ) EYhigh = self . GetEYhigh ( ) for i in range ( numPoints ) : index = numPoints - 1 - i revGraph . SetPoint ( i , X [ index ] , Y [ index ] ) revGraph . SetPointError ( i , EXlow [ index ] , EXhigh [ index ] , EYlow [ index ] , EYhigh [ index ] ) return revGraph | Reverse the order of the points |
58,112 | def Shift ( self , value , copy = False ) : numPoints = self . GetN ( ) if copy : shiftGraph = self . Clone ( ) else : shiftGraph = self X = self . GetX ( ) EXlow = self . GetEXlow ( ) EXhigh = self . GetEXhigh ( ) Y = self . GetY ( ) EYlow = self . GetEYlow ( ) EYhigh = self . GetEYhigh ( ) for i in range ( numPoints ) : shiftGraph . SetPoint ( i , X [ i ] + value , Y [ i ] ) shiftGraph . SetPointError ( i , EXlow [ i ] , EXhigh [ i ] , EYlow [ i ] , EYhigh [ i ] ) return shiftGraph | Shift the graph left or right by value |
58,113 | def Integrate ( self ) : area = 0. X = self . GetX ( ) Y = self . GetY ( ) for i in range ( self . GetN ( ) - 1 ) : area += ( X [ i + 1 ] - X [ i ] ) * ( Y [ i ] + Y [ i + 1 ] ) / 2. return area | Integrate using the trapazoidal method |
58,114 | def Append ( self , other ) : orig_len = len ( self ) self . Set ( orig_len + len ( other ) ) ipoint = orig_len if hasattr ( self , 'SetPointError' ) : for point in other : self . SetPoint ( ipoint , point . x . value , point . y . value ) self . SetPointError ( ipoint , point . x . error_low , point . x . error_hi , point . y . error_low , point . y . error_hi ) ipoint += 1 else : for point in other : self . SetPoint ( ipoint , point . x . value , point . y . value ) ipoint += 1 | Append points from another graph |
58,115 | def keepalive ( nurse , * patients ) : if DISABLED : return if hashable ( nurse ) : hashable_patients = [ ] for p in patients : if hashable ( p ) : log . debug ( "Keeping {0} alive for lifetime of {1}" . format ( p , nurse ) ) hashable_patients . append ( p ) else : log . warning ( "Unable to keep unhashable object {0} " "alive for lifetime of {1}" . format ( p , nurse ) ) KEEPALIVE . setdefault ( nurse , set ( ) ) . update ( hashable_patients ) else : log . warning ( "Unable to keep objects alive for lifetime of " "unhashable object {0}" . format ( nurse ) ) | Keep patients alive at least as long as nurse is around using a WeakKeyDictionary . |
58,116 | def canonify_slice ( s , n ) : if isinstance ( s , ( int , long ) ) : return canonify_slice ( slice ( s , s + 1 , None ) , n ) start = s . start % n if s . start is not None else 0 stop = s . stop % n if s . stop is not None else n step = s . step if s . step is not None else 1 return slice ( start , stop , step ) | Convert a slice object into a canonical form to simplify treatment in histogram bin content and edge slicing . |
58,117 | def bin_to_edge_slice ( s , n ) : s = canonify_slice ( s , n ) start = s . start stop = s . stop if start > stop : _stop = start + 1 start = stop + 1 stop = _stop start = max ( start - 1 , 0 ) step = abs ( s . step ) if stop <= 1 or start >= n - 1 or stop == start + 1 : return slice ( 0 , None , min ( step , n - 2 ) ) s = slice ( start , stop , abs ( s . step ) ) if len ( range ( * s . indices ( n - 1 ) ) ) < 2 : return slice ( start , stop , stop - start - 1 ) return s | Convert a bin slice into a bin edge slice . |
58,118 | def histogram ( data , * args , ** kwargs ) : from . autobinning import autobinning dim = kwargs . pop ( 'dim' , 1 ) if dim != 1 : raise NotImplementedError if 'binning' in kwargs : args = autobinning ( data , kwargs [ 'binning' ] ) del kwargs [ 'binning' ] histo = Hist ( * args , ** kwargs ) for d in data : histo . Fill ( d ) return list ( histo . xedgesl ( ) ) , histo | Create and fill a one - dimensional histogram . |
58,119 | def overflow ( self ) : indices = self . hist . xyz ( self . idx ) for i in range ( self . hist . GetDimension ( ) ) : if indices [ i ] == 0 or indices [ i ] == self . hist . nbins ( i ) + 1 : return True return False | Returns true if this BinProxy is for an overflow bin |
58,120 | def xyz ( self , idx ) : nx = self . GetNbinsX ( ) + 2 ny = self . GetNbinsY ( ) + 2 ndim = self . GetDimension ( ) if ndim < 2 : binx = idx % nx biny = 0 binz = 0 elif ndim < 3 : binx = idx % nx biny = ( ( idx - binx ) // nx ) % ny binz = 0 elif ndim < 4 : binx = idx % nx biny = ( ( idx - binx ) // nx ) % ny binz = ( ( idx - binx ) // nx - biny ) // ny else : raise NotImplementedError return binx , biny , binz | return binx biny binz corresponding to the global bin number |
58,121 | def nbins ( self , axis = 0 , overflow = False ) : if axis == 0 : nbins = self . GetNbinsX ( ) elif axis == 1 : nbins = self . GetNbinsY ( ) elif axis == 2 : nbins = self . GetNbinsZ ( ) else : raise ValueError ( "axis must be 0, 1, or 2" ) if overflow : nbins += 2 return nbins | Get the number of bins along an axis |
58,122 | def bins_range ( self , axis = 0 , overflow = False ) : nbins = self . nbins ( axis = axis , overflow = False ) if overflow : start = 0 end_offset = 2 else : start = 1 end_offset = 1 return range ( start , nbins + end_offset ) | Return a range of bin indices for iterating along an axis |
58,123 | def uniform_binned ( self , name = None ) : if self . GetDimension ( ) == 1 : new_hist = Hist ( self . GetNbinsX ( ) , 0 , self . GetNbinsX ( ) , name = name , type = self . TYPE ) elif self . GetDimension ( ) == 2 : new_hist = Hist2D ( self . GetNbinsX ( ) , 0 , self . GetNbinsX ( ) , self . GetNbinsY ( ) , 0 , self . GetNbinsY ( ) , name = name , type = self . TYPE ) else : new_hist = Hist3D ( self . GetNbinsX ( ) , 0 , self . GetNbinsX ( ) , self . GetNbinsY ( ) , 0 , self . GetNbinsY ( ) , self . GetNbinsZ ( ) , 0 , self . GetNbinsZ ( ) , name = name , type = self . TYPE ) for outbin , inbin in zip ( new_hist . bins ( ) , self . bins ( ) ) : outbin . value = inbin . value outbin . error = inbin . error new_hist . decorate ( self ) new_hist . entries = self . entries return new_hist | Return a new histogram with constant width bins along all axes by using the bin indices as the bin edges of the new histogram . |
58,124 | def underflow ( self , axis = 0 ) : if axis not in range ( 3 ) : raise ValueError ( "axis must be 0, 1, or 2" ) if self . DIM == 1 : return self . GetBinContent ( 0 ) elif self . DIM == 2 : def idx ( i ) : arg = [ i ] arg . insert ( axis , 0 ) return arg return [ self . GetBinContent ( * idx ( i ) ) for i in self . bins_range ( axis = ( axis + 1 ) % 2 , overflow = True ) ] elif self . DIM == 3 : axes = [ 0 , 1 , 2 ] axes . remove ( axis ) axis2 , axis3 = axes def idx ( i , j ) : arg = [ i , j ] arg . insert ( axis , 0 ) return arg return [ [ self . GetBinContent ( * idx ( i , j ) ) for i in self . bins_range ( axis = axis2 , overflow = True ) ] for j in self . bins_range ( axis = axis3 , overflow = True ) ] | Return the underflow for the given axis . |
58,125 | def lowerbound ( self , axis = 0 ) : if not 0 <= axis < self . GetDimension ( ) : raise ValueError ( "axis must be a non-negative integer less than " "the dimensionality of the histogram" ) if axis == 0 : return self . xedges ( 1 ) if axis == 1 : return self . yedges ( 1 ) if axis == 2 : return self . zedges ( 1 ) raise TypeError ( "axis must be an integer" ) | Get the lower bound of the binning along an axis |
58,126 | def bounds ( self , axis = 0 ) : if not 0 <= axis < self . GetDimension ( ) : raise ValueError ( "axis must be a non-negative integer less than " "the dimensionality of the histogram" ) if axis == 0 : return self . xedges ( 1 ) , self . xedges ( - 2 ) if axis == 1 : return self . yedges ( 1 ) , self . yedges ( - 2 ) if axis == 2 : return self . zedges ( 1 ) , self . zedges ( - 2 ) raise TypeError ( "axis must be an integer" ) | Get the lower and upper bounds of the binning along an axis |
58,127 | def check_compatibility ( self , other , check_edges = False , precision = 1E-7 ) : if self . GetDimension ( ) != other . GetDimension ( ) : raise TypeError ( "histogram dimensionalities do not match" ) if len ( self ) != len ( other ) : raise ValueError ( "histogram sizes do not match" ) for axis in range ( self . GetDimension ( ) ) : if self . nbins ( axis = axis ) != other . nbins ( axis = axis ) : raise ValueError ( "numbers of bins along axis {0:d} do not match" . format ( axis ) ) if check_edges : for axis in range ( self . GetDimension ( ) ) : if not all ( [ abs ( l - r ) < precision for l , r in zip ( self . _edges ( axis ) , other . _edges ( axis ) ) ] ) : raise ValueError ( "edges do not match along axis {0:d}" . format ( axis ) ) | Test whether two histograms are considered compatible by the number of dimensions number of bins along each axis and optionally the bin edges . |
58,128 | def fill_array ( self , array , weights = None ) : try : try : from root_numpy import fill_hist as fill_func except ImportError : from root_numpy import fill_array as fill_func except ImportError : log . critical ( "root_numpy is needed for Hist*.fill_array. " "Is it installed and importable?" ) raise fill_func ( self , array , weights = weights ) | Fill this histogram with a NumPy array |
58,129 | def fill_view ( self , view ) : other = view . hist _other_x_center = other . axis ( 0 ) . GetBinCenter _other_y_center = other . axis ( 1 ) . GetBinCenter _other_z_center = other . axis ( 2 ) . GetBinCenter _other_get = other . GetBinContent _other_get_bin = super ( _HistBase , other ) . GetBin other_sum_w2 = other . GetSumw2 ( ) _other_sum_w2_at = other_sum_w2 . At _find = self . FindBin sum_w2 = self . GetSumw2 ( ) _sum_w2_at = sum_w2 . At _sum_w2_setat = sum_w2 . SetAt _set = self . SetBinContent _get = self . GetBinContent for x , y , z in view . points : idx = _find ( _other_x_center ( x ) , _other_y_center ( y ) , _other_z_center ( z ) ) other_idx = _other_get_bin ( x , y , z ) _set ( idx , _get ( idx ) + _other_get ( other_idx ) ) _sum_w2_setat ( _sum_w2_at ( idx ) + _other_sum_w2_at ( other_idx ) , idx ) | Fill this histogram from a view of another histogram |
58,130 | def get_sum_w2 ( self , ix , iy = 0 , iz = 0 ) : if self . GetSumw2N ( ) == 0 : raise RuntimeError ( "Attempting to access Sumw2 in histogram " "where weights were not stored" ) xl = self . nbins ( axis = 0 , overflow = True ) yl = self . nbins ( axis = 1 , overflow = True ) idx = xl * yl * iz + xl * iy + ix if not 0 <= idx < self . GetSumw2N ( ) : raise IndexError ( "bin index out of range" ) return self . GetSumw2 ( ) . At ( idx ) | Obtain the true number of entries in the bin weighted by w^2 |
58,131 | def set_sum_w2 ( self , w , ix , iy = 0 , iz = 0 ) : if self . GetSumw2N ( ) == 0 : raise RuntimeError ( "Attempting to access Sumw2 in histogram " "where weights were not stored" ) xl = self . nbins ( axis = 0 , overflow = True ) yl = self . nbins ( axis = 1 , overflow = True ) idx = xl * yl * iz + xl * iy + ix if not 0 <= idx < self . GetSumw2N ( ) : raise IndexError ( "bin index out of range" ) self . GetSumw2 ( ) . SetAt ( w , idx ) | Sets the true number of entries in the bin weighted by w^2 |
58,132 | def rebinned ( self , bins , axis = 0 ) : ndim = self . GetDimension ( ) if axis >= ndim : raise ValueError ( "axis must be less than the dimensionality of the histogram" ) if isinstance ( bins , int ) : _bins = [ 1 ] * ndim try : _bins [ axis ] = bins except IndexError : raise ValueError ( "axis must be 0, 1, or 2" ) bins = tuple ( _bins ) if isinstance ( bins , tuple ) : if len ( bins ) != ndim : raise ValueError ( "bins must be a tuple with the same " "number of elements as histogram axes" ) newname = '{0}_{1}' . format ( self . __class__ . __name__ , uuid ( ) ) if ndim == 1 : hist = self . Rebin ( bins [ 0 ] , newname ) elif ndim == 2 : hist = self . Rebin2D ( bins [ 0 ] , bins [ 1 ] , newname ) else : hist = self . Rebin3D ( bins [ 0 ] , bins [ 1 ] , bins [ 2 ] , newname ) hist = asrootpy ( hist ) elif hasattr ( bins , '__iter__' ) : hist = self . empty_clone ( bins , axis = axis ) nbinsx = self . nbins ( 0 ) nbinsy = self . nbins ( 1 ) nbinsz = self . nbins ( 2 ) xaxis = self . xaxis yaxis = self . yaxis zaxis = self . zaxis sum_w2 = self . GetSumw2 ( ) _sum_w2_at = sum_w2 . At new_sum_w2 = hist . GetSumw2 ( ) _new_sum_w2_at = new_sum_w2 . At _new_sum_w2_setat = new_sum_w2 . SetAt _x_center = xaxis . GetBinCenter _y_center = yaxis . GetBinCenter _z_center = zaxis . GetBinCenter _find = hist . FindBin _set = hist . SetBinContent _get = hist . GetBinContent _this_get = self . GetBinContent _get_bin = super ( _HistBase , self ) . GetBin for z in range ( 1 , nbinsz + 1 ) : for y in range ( 1 , nbinsy + 1 ) : for x in range ( 1 , nbinsx + 1 ) : newbin = _find ( _x_center ( x ) , _y_center ( y ) , _z_center ( z ) ) idx = _get_bin ( x , y , z ) _set ( newbin , _get ( newbin ) + _this_get ( idx ) ) _new_sum_w2_setat ( _new_sum_w2_at ( newbin ) + _sum_w2_at ( idx ) , newbin ) hist . SetEntries ( self . GetEntries ( ) ) else : raise TypeError ( "bins must either be an integer, a tuple, or an iterable" ) return hist | Return a new rebinned histogram |
58,133 | def smoothed ( self , iterations = 1 ) : copy = self . Clone ( shallow = True ) copy . Smooth ( iterations ) return copy | Return a smoothed copy of this histogram |
58,134 | def empty_clone ( self , binning = None , axis = 0 , type = None , ** kwargs ) : ndim = self . GetDimension ( ) if binning is False and ndim == 1 : raise ValueError ( "cannot remove the x-axis of a 1D histogram" ) args = [ ] for iaxis in range ( ndim ) : if iaxis == axis : if binning is False : continue elif binning is not None : if hasattr ( binning , '__iter__' ) : binning = ( binning , ) args . extend ( binning ) continue args . append ( list ( self . _edges ( axis = iaxis ) ) ) if type is None : type = self . TYPE if binning is False : ndim -= 1 cls = [ Hist , Hist2D , Hist3D ] [ ndim - 1 ] return cls ( * args , type = type , ** kwargs ) | Return a new empty histogram . The binning may be modified along one axis by specifying the binning and axis arguments . If binning is False then the corresponding axis is dropped from the returned histogram . |
58,135 | def poisson_errors ( self ) : graph = Graph ( self . nbins ( axis = 0 ) , type = 'asymm' ) graph . SetLineWidth ( self . GetLineWidth ( ) ) graph . SetMarkerSize ( self . GetMarkerSize ( ) ) chisqr = ROOT . TMath . ChisquareQuantile npoints = 0 for bin in self . bins ( overflow = False ) : entries = bin . effective_entries if entries <= 0 : continue ey_low = entries - 0.5 * chisqr ( 0.1586555 , 2. * entries ) ey_high = 0.5 * chisqr ( 1. - 0.1586555 , 2. * ( entries + 1 ) ) - entries ex = bin . x . width / 2. graph . SetPoint ( npoints , bin . x . center , bin . value ) graph . SetPointEXlow ( npoints , ex ) graph . SetPointEXhigh ( npoints , ex ) graph . SetPointEYlow ( npoints , ey_low ) graph . SetPointEYhigh ( npoints , ey_high ) npoints += 1 graph . Set ( npoints ) return graph | Return a TGraphAsymmErrors representation of this histogram where the point y errors are Poisson . |
58,136 | def attach_event_handler ( canvas , handler = close_on_esc_or_middlemouse ) : if getattr ( canvas , "_py_event_dispatcher_attached" , None ) : return event_dispatcher = C . TPyDispatcherProcessedEvent ( handler ) canvas . Connect ( "ProcessedEvent(int,int,int,TObject*)" , "TPyDispatcherProcessedEvent" , event_dispatcher , "Dispatch(int,int,int,TObject*)" ) canvas . _py_event_dispatcher_attached = event_dispatcher | Attach a handler function to the ProcessedEvent slot defaulting to closing when middle mouse is clicked or escape is pressed |
58,137 | def _num_to_string ( self , number , pad_to_length = None ) : output = "" while number : number , digit = divmod ( number , self . _alpha_len ) output += self . _alphabet [ digit ] if pad_to_length : remainder = max ( pad_to_length - len ( output ) , 0 ) output = output + self . _alphabet [ 0 ] * remainder return output | Convert a number to a string using the given alphabet . |
58,138 | def _string_to_int ( self , string ) : number = 0 for char in string [ : : - 1 ] : number = number * self . _alpha_len + self . _alphabet . index ( char ) return number | Convert a string to a number using the given alphabet .. |
58,139 | def uuid ( self , name = None , pad_length = 22 ) : if name is None : uuid = _uu . uuid4 ( ) elif "http" not in name . lower ( ) : uuid = _uu . uuid5 ( _uu . NAMESPACE_DNS , name ) else : uuid = _uu . uuid5 ( _uu . NAMESPACE_URL , name ) return self . encode ( uuid , pad_length ) | Generate and return a UUID . |
58,140 | def fit ( self , data = 'obsData' , model_config = 'ModelConfig' , param_const = None , param_values = None , param_ranges = None , poi_const = False , poi_value = None , poi_range = None , extended = False , num_cpu = 1 , process_strategy = 0 , offset = False , print_level = None , return_nll = False , ** kwargs ) : if isinstance ( model_config , string_types ) : model_config = self . obj ( model_config , cls = ROOT . RooStats . ModelConfig ) if isinstance ( data , string_types ) : data = self . data ( data ) pdf = model_config . GetPdf ( ) pois = model_config . GetParametersOfInterest ( ) if pois . getSize ( ) > 0 : poi = pois . first ( ) poi . setConstant ( poi_const ) if poi_value is not None : poi . setVal ( poi_value ) if poi_range is not None : poi . setRange ( * poi_range ) if param_const is not None : for param_name , const in param_const . items ( ) : var = self . var ( param_name ) var . setConstant ( const ) if param_values is not None : for param_name , param_value in param_values . items ( ) : var = self . var ( param_name ) var . setVal ( param_value ) if param_ranges is not None : for param_name , param_range in param_ranges . items ( ) : var = self . var ( param_name ) var . setRange ( * param_range ) if print_level < 0 : msg_service = ROOT . RooMsgService . instance ( ) msg_level = msg_service . globalKillBelow ( ) msg_service . setGlobalKillBelow ( ROOT . RooFit . FATAL ) args = [ ROOT . RooFit . Constrain ( model_config . GetNuisanceParameters ( ) ) , ROOT . RooFit . GlobalObservables ( model_config . GetGlobalObservables ( ) ) ] if extended : args . append ( ROOT . RooFit . Extended ( True ) ) if offset : args . append ( ROOT . RooFit . Offset ( True ) ) if num_cpu != 1 : if num_cpu == 0 : raise ValueError ( "num_cpu must be non-zero" ) if num_cpu < 0 : num_cpu = NCPU args . append ( ROOT . RooFit . NumCPU ( num_cpu , process_strategy ) ) func = pdf . createNLL ( data , * args ) if print_level < 0 : msg_service . setGlobalKillBelow ( msg_level ) result = minimize ( func , print_level = print_level , ** kwargs ) if return_nll : return result , func return result | Fit a pdf to data in a workspace |
58,141 | def ensure_trafaret ( trafaret ) : if isinstance ( trafaret , Trafaret ) : return trafaret elif isinstance ( trafaret , type ) : if issubclass ( trafaret , Trafaret ) : return trafaret ( ) return Call ( lambda val : trafaret ( val ) ) elif callable ( trafaret ) : return Call ( trafaret ) else : raise RuntimeError ( "%r should be instance or subclass" " of Trafaret" % trafaret ) | Helper for complex trafarets takes trafaret instance or class and returns trafaret instance |
58,142 | def DictKeys ( keys ) : req = [ ( Key ( key ) , Any ) for key in keys ] return Dict ( dict ( req ) ) | Checks if dict has all given keys |
58,143 | def guard ( trafaret = None , ** kwargs ) : if ( trafaret and not isinstance ( trafaret , Dict ) and not isinstance ( trafaret , Forward ) ) : raise RuntimeError ( "trafaret should be instance of Dict or Forward" ) elif trafaret and kwargs : raise RuntimeError ( "choose one way of initialization," " trafaret or kwargs" ) if not trafaret : trafaret = Dict ( ** kwargs ) def wrapper ( fn ) : argspec = getargspec ( fn ) @ functools . wraps ( fn ) def decor ( * args , ** kwargs ) : fnargs = argspec . args if fnargs and fnargs [ 0 ] in [ 'self' , 'cls' ] : obj = args [ 0 ] fnargs = fnargs [ 1 : ] checkargs = args [ 1 : ] else : obj = None checkargs = args try : call_args = dict ( itertools . chain ( zip ( fnargs , checkargs ) , kwargs . items ( ) ) ) for name , default in zip ( reversed ( fnargs ) , reversed ( argspec . defaults or ( ) ) ) : if name not in call_args : call_args [ name ] = default converted = trafaret ( call_args ) except DataError as err : raise GuardError ( error = err . error ) return fn ( obj , ** converted ) if obj else fn ( ** converted ) decor . __doc__ = "guarded with %r\n\n" % trafaret + ( decor . __doc__ or "" ) return decor return wrapper | Decorator for protecting function with trafarets |
58,144 | def _clone_args ( self ) : keys = list ( self . keys ) kw = { } if self . allow_any or self . extras : kw [ 'allow_extra' ] = list ( self . extras ) if self . allow_any : kw [ 'allow_extra' ] . append ( '*' ) kw [ 'allow_extra_trafaret' ] = self . extras_trafaret if self . ignore_any or self . ignore : kw [ 'ignore_extra' ] = list ( self . ignore ) if self . ignore_any : kw [ 'ignore_any' ] . append ( '*' ) return keys , kw | return args to create new Dict clone |
58,145 | def merge ( self , other ) : ignore = self . ignore extra = self . extras if isinstance ( other , Dict ) : other_keys = other . keys ignore += other . ignore extra += other . extras elif isinstance ( other , ( list , tuple ) ) : other_keys = list ( other ) elif isinstance ( other , dict ) : return self . __class__ ( other , * self . keys ) else : raise TypeError ( 'You must merge Dict only with Dict' ' or list of Keys' ) return self . __class__ ( * ( self . keys + other_keys ) , ignore_extra = ignore , allow_extra = extra ) | Extends one Dict with other Dict Key s or Key s list or dict instance supposed for Dict |
58,146 | def get_deep_attr ( obj , keys ) : cur = obj for k in keys : if isinstance ( cur , Mapping ) and k in cur : cur = cur [ k ] continue else : try : cur = getattr ( cur , k ) continue except AttributeError : pass raise DataError ( error = 'Unexistent key' ) return cur | Helper for DeepKey |
58,147 | def construct ( arg ) : if isinstance ( arg , t . Trafaret ) : return arg elif isinstance ( arg , tuple ) or ( isinstance ( arg , list ) and len ( arg ) > 1 ) : return t . Tuple ( * ( construct ( a ) for a in arg ) ) elif isinstance ( arg , list ) : return t . List ( construct ( arg [ 0 ] ) ) elif isinstance ( arg , dict ) : return t . Dict ( { construct_key ( key ) : construct ( value ) for key , value in arg . items ( ) } ) elif isinstance ( arg , str ) : return t . Atom ( arg ) elif isinstance ( arg , type ) : if arg is int : return t . Int ( ) elif arg is float : return t . Float ( ) elif arg is str : return t . String ( ) elif arg is bool : return t . Bool ( ) else : return t . Type ( arg ) elif callable ( arg ) : return t . Call ( arg ) else : return arg | Shortcut syntax to define trafarets . |
58,148 | def subdict ( name , * keys , ** kw ) : trafaret = kw . pop ( 'trafaret' ) def inner ( data , context = None ) : errors = False preserve_output = [ ] touched = set ( ) collect = { } for key in keys : for k , v , names in key ( data , context = context ) : touched . update ( names ) preserve_output . append ( ( k , v , names ) ) if isinstance ( v , t . DataError ) : errors = True else : collect [ k ] = v if errors : for out in preserve_output : yield out elif collect : yield name , t . catch ( trafaret , collect ) , touched return inner | Subdict key . |
58,149 | def xor_key ( first , second , trafaret ) : trafaret = t . Trafaret . _trafaret ( trafaret ) def check_ ( value ) : if ( first in value ) ^ ( second in value ) : key = first if first in value else second yield first , t . catch_error ( trafaret , value [ key ] ) , ( key , ) elif first in value and second in value : yield first , t . DataError ( error = 'correct only if {} is not defined' . format ( second ) ) , ( first , ) yield second , t . DataError ( error = 'correct only if {} is not defined' . format ( first ) ) , ( second , ) else : yield first , t . DataError ( error = 'is required if {} is not defined' . format ( 'second' ) ) , ( first , ) yield second , t . DataError ( error = 'is required if {} is not defined' . format ( 'first' ) ) , ( second , ) return check_ | xor_key - takes first and second key names and trafaret . |
58,150 | def confirm_key ( name , confirm_name , trafaret ) : def check_ ( value ) : first , second = None , None if name in value : first = value [ name ] else : yield name , t . DataError ( 'is required' ) , ( name , ) if confirm_name in value : second = value [ confirm_name ] else : yield confirm_name , t . DataError ( 'is required' ) , ( confirm_name , ) if not ( first and second ) : return yield name , t . catch_error ( trafaret , first ) , ( name , ) yield confirm_name , t . catch_error ( trafaret , second ) , ( confirm_name , ) if first != second : yield confirm_name , t . DataError ( 'must be equal to {}' . format ( name ) ) , ( confirm_name , ) return check_ | confirm_key - takes name confirm_name and trafaret . |
58,151 | def get_capacity ( self , legacy = None ) : params = None if legacy : params = { 'legacy' : legacy } return self . call_api ( '/capacity' , params = params ) [ 'capacity' ] | Get capacity of all facilities . |
58,152 | def altcore_data ( self ) : ret = [ ] for symbol in self . supported_currencies ( project = 'altcore' , level = "address" ) : data = crypto_data [ symbol ] priv = data . get ( 'private_key_prefix' ) pub = data . get ( 'address_version_byte' ) hha = data . get ( 'header_hash_algo' ) shb = data . get ( 'script_hash_byte' ) supported = collections . OrderedDict ( ) supported [ 'name' ] = data [ 'name' ] supported [ 'alias' ] = symbol if pub is not None : supported [ 'pubkeyhash' ] = int ( pub ) if priv : supported [ 'privatekey' ] = priv supported [ 'scripthash' ] = shb if shb else 5 if 'transaction_form' in data : supported [ 'transactionForm' ] = data [ 'transaction_form' ] if 'private_key_form' in data : supported [ 'privateKeyForm' ] = data [ 'private_key_form' ] supported [ 'port' ] = data . get ( 'port' ) or None if hha not in ( None , 'double-sha256' ) : supported [ 'headerHashAlgo' ] = hha if data . get ( 'script_hash_algo' , 'double-sha256' ) not in ( None , 'double-sha256' ) : supported [ 'scriptHashAlgo' ] = data [ 'script_hash_algo' ] if data . get ( 'transaction_hash_algo' , 'double-sha256' ) not in ( None , 'double-sha256' ) : supported [ 'transactionHashAlgo' ] = data [ 'transaction_hash_algo' ] if data . get ( 'seed_nodes' ) : supported [ 'dnsSeeds' ] = data [ 'seed_nodes' ] ret . append ( supported ) return ret | Returns the crypto_data for all currencies defined in moneywagon that also meet the minimum support for altcore . Data is keyed according to the bitcore specification . |
58,153 | def from_unit_to_satoshi ( self , value , unit = 'satoshi' ) : if not unit or unit == 'satoshi' : return value if unit == 'bitcoin' or unit == 'btc' : return value * 1e8 convert = get_current_price ( self . crypto , unit ) return int ( value / convert * 1e8 ) | Convert a value to satoshis . units can be any fiat currency . By default the unit is satoshi . |
58,154 | def _get_utxos ( self , address , services , ** modes ) : return get_unspent_outputs ( self . crypto , address , services = services , ** modes ) | Using the service fallback engine get utxos from remote service . |
58,155 | def total_input_satoshis ( self ) : just_inputs = [ x [ 'input' ] for x in self . ins ] return sum ( [ x [ 'amount' ] for x in just_inputs ] ) | Add up all the satoshis coming from all input tx s . |
58,156 | def select_inputs ( self , amount ) : sorted_txin = sorted ( self . ins , key = lambda x : - x [ 'input' ] [ 'confirmations' ] ) total_amount = 0 for ( idx , tx_in ) in enumerate ( sorted_txin ) : total_amount += tx_in [ 'input' ] [ 'amount' ] if ( total_amount >= amount ) : break sorted_txin = sorted ( sorted_txin [ : idx + 1 ] , key = lambda x : x [ 'input' ] [ 'amount' ] ) for ( idx , tx_in ) in enumerate ( sorted_txin ) : value = tx_in [ 'input' ] [ 'amount' ] if ( total_amount - value < amount ) : break else : total_amount -= value self . ins = sorted_txin [ idx : ] return total_amount | Maximize transaction priority . Select the oldest inputs that are sufficient to cover the spent amount . Then remove any unneeded inputs starting with the smallest in value . Returns sum of amounts of inputs selected |
58,157 | def onchain_exchange ( self , withdraw_crypto , withdraw_address , value , unit = 'satoshi' ) : self . onchain_rate = get_onchain_exchange_rates ( self . crypto , withdraw_crypto , best = True , verbose = self . verbose ) exchange_rate = float ( self . onchain_rate [ 'rate' ] ) result = self . onchain_rate [ 'service' ] . get_onchain_exchange_address ( self . crypto , withdraw_crypto , withdraw_address ) address = result [ 'deposit' ] value_satoshi = self . from_unit_to_satoshi ( value , unit ) if self . verbose : print ( "Adding output of: %s satoshi (%.8f) via onchain exchange, converting to %s %s" % ( value_satoshi , ( value_satoshi / 1e8 ) , exchange_rate * value_satoshi / 1e8 , withdraw_crypto . upper ( ) ) ) self . outs . append ( { 'address' : address , 'value' : value_satoshi } ) | This method is like add_output but it sends to another |
58,158 | def fee ( self , value = None , unit = 'satoshi' ) : convert = None if not value : convert = get_current_price ( self . crypto , "usd" ) self . fee_satoshi = int ( 0.02 / convert * 1e8 ) verbose = "Using default fee of:" elif value == 'optimal' : self . fee_satoshi = get_optimal_fee ( self . crypto , self . estimate_size ( ) , verbose = self . verbose ) verbose = "Using optimal fee of:" else : self . fee_satoshi = self . from_unit_to_satoshi ( value , unit ) verbose = "Using manually set fee of:" if self . verbose : if not convert : convert = get_current_price ( self . crypto , "usd" ) fee_dollar = convert * self . fee_satoshi / 1e8 print ( verbose + " %s satoshis ($%.2f)" % ( self . fee_satoshi , fee_dollar ) ) | Set the miner fee if unit is not set assumes value is satoshi . If using optimal make sure you have already added all outputs . |
58,159 | def get_hex ( self , signed = True ) : total_ins_satoshi = self . total_input_satoshis ( ) if total_ins_satoshi == 0 : raise ValueError ( "Can't make transaction, there are zero inputs" ) total_outs_satoshi = sum ( [ x [ 'value' ] for x in self . outs ] ) if not self . fee_satoshi : self . fee ( ) change_satoshi = total_ins_satoshi - ( total_outs_satoshi + self . fee_satoshi ) if change_satoshi < 0 : raise ValueError ( "Input amount (%s) must be more than all output amounts (%s) plus fees (%s). You need more %s." % ( total_ins_satoshi , total_outs_satoshi , self . fee_satoshi , self . crypto . upper ( ) ) ) ins = [ x [ 'input' ] for x in self . ins ] if change_satoshi > 0 : if self . verbose : print ( "Adding change address of %s satoshis to %s" % ( change_satoshi , self . change_address ) ) change = [ { 'value' : change_satoshi , 'address' : self . change_address } ] else : change = [ ] if self . verbose : print ( "Inputs == Outputs, no change address needed." ) tx = mktx ( ins , self . outs + change ) if signed : for i , input_data in enumerate ( self . ins ) : if not input_data [ 'private_key' ] : raise Exception ( "Can't sign transaction, missing private key for input %s" % i ) tx = sign ( tx , i , input_data [ 'private_key' ] ) return tx | Given all the data the user has given so far make the hex using pybitcointools |
58,160 | def get_current_price ( crypto , fiat , services = None , convert_to = None , helper_prices = None , ** modes ) : fiat = fiat . lower ( ) args = { 'crypto' : crypto , 'fiat' : fiat , 'convert_to' : convert_to } if not services : services = get_optimal_services ( crypto , 'current_price' ) if fiat in services : try_services = services [ fiat ] result = _try_price_fetch ( try_services , args , modes ) if not isinstance ( result , Exception ) : return result if '*' in services : try_services = services [ '*' ] result = _try_price_fetch ( try_services , args , modes ) if not isinstance ( result , Exception ) : return result def _do_composite_price_fetch ( crypto , convert_crypto , fiat , helpers , modes ) : before = modes . get ( 'report_services' , False ) modes [ 'report_services' ] = True services1 , converted_price = get_current_price ( crypto , convert_crypto , ** modes ) if not helpers or convert_crypto not in helpers [ fiat ] : services2 , fiat_price = get_current_price ( convert_crypto , fiat , ** modes ) else : services2 , fiat_price = helpers [ fiat ] [ convert_crypto ] modes [ 'report_services' ] = before if modes . get ( 'report_services' , False ) : serv = CompositeService ( services1 , services2 , convert_crypto ) return [ serv ] , converted_price * fiat_price else : return converted_price * fiat_price all_composite_cryptos = [ 'btc' , 'ltc' , 'doge' , 'uno' ] if crypto in all_composite_cryptos : all_composite_cryptos . remove ( crypto ) for composite_attempt in all_composite_cryptos : if composite_attempt in services and services [ composite_attempt ] : result = _do_composite_price_fetch ( crypto , composite_attempt , fiat , helper_prices , modes ) if not isinstance ( result , Exception ) : return result raise result | High level function for getting current exchange rate for a cryptocurrency . If the fiat value is not explicitly defined it will try the wildcard service . if that does not work it tries converting to an intermediate cryptocurrency if available . |
58,161 | def get_onchain_exchange_rates ( deposit_crypto = None , withdraw_crypto = None , ** modes ) : from moneywagon . onchain_exchange import ALL_SERVICES rates = [ ] for Service in ALL_SERVICES : srv = Service ( verbose = modes . get ( 'verbose' , False ) ) rates . extend ( srv . onchain_exchange_rates ( ) ) if deposit_crypto : rates = [ x for x in rates if x [ 'deposit_currency' ] [ 'code' ] == deposit_crypto . upper ( ) ] if withdraw_crypto : rates = [ x for x in rates if x [ 'withdraw_currency' ] [ 'code' ] == withdraw_crypto . upper ( ) ] if modes . get ( 'best' , False ) : return max ( rates , key = lambda x : float ( x [ 'rate' ] ) ) return rates | Gets exchange rates for all defined on - chain exchange services . |
58,162 | def generate_keypair ( crypto , seed , password = None ) : if crypto in [ 'eth' , 'etc' ] : raise CurrencyNotSupported ( "Ethereums not yet supported" ) pub_byte , priv_byte = get_magic_bytes ( crypto ) priv = sha256 ( seed ) pub = privtopub ( priv ) priv_wif = encode_privkey ( priv , 'wif_compressed' , vbyte = priv_byte ) if password : from . bip38 import Bip38EncryptedPrivateKey priv_wif = str ( Bip38EncryptedPrivateKey . encrypt ( crypto , priv_wif , password ) ) compressed_pub = encode_pubkey ( pub , 'hex_compressed' ) ret = { 'public' : { 'hex_uncompressed' : pub , 'hex' : compressed_pub , 'address' : pubtoaddr ( compressed_pub , pub_byte ) } , 'private' : { 'wif' : priv_wif } } if not password : ret [ 'private' ] [ 'hex' ] = encode_privkey ( priv , 'hex_compressed' , vbyte = priv_byte ) ret [ 'private' ] [ 'hex_uncompressed' ] = encode_privkey ( priv , 'hex' , vbyte = priv_byte ) ret [ 'private' ] [ 'wif_uncompressed' ] = encode_privkey ( priv , 'wif' , vbyte = priv_byte ) return ret | Generate a private key and publickey for any currency given a seed . That seed can be random or a brainwallet phrase . |
58,163 | def sweep ( crypto , private_key , to_address , fee = None , password = None , ** modes ) : from moneywagon . tx import Transaction tx = Transaction ( crypto , verbose = modes . get ( 'verbose' , False ) ) tx . add_inputs ( private_key = private_key , password = password , ** modes ) tx . change_address = to_address tx . fee ( fee ) return tx . push ( ) | Move all funds by private key to another address . |
58,164 | def guess_currency_from_address ( address ) : if is_py2 : fixer = lambda x : int ( x . encode ( 'hex' ) , 16 ) else : fixer = lambda x : x first_byte = fixer ( b58decode_check ( address ) [ 0 ] ) double_first_byte = fixer ( b58decode_check ( address ) [ : 2 ] ) hits = [ ] for currency , data in crypto_data . items ( ) : if hasattr ( data , 'get' ) : version = data . get ( 'address_version_byte' , None ) if version is not None and version in [ double_first_byte , first_byte ] : hits . append ( [ currency , data [ 'name' ] ] ) if hits : return hits raise ValueError ( "Unknown Currency with first byte: %s" % first_byte ) | Given a crypto address find which currency it likely belongs to . Raises an exception if it can t find a match . Raises exception if address is invalid . |
58,165 | def service_table ( format = 'simple' , authenticated = False ) : if authenticated : all_services = ExchangeUniverse . get_authenticated_services ( ) else : all_services = ALL_SERVICES if format == 'html' : linkify = lambda x : "<a href='{0}' target='_blank'>{0}</a>" . format ( x ) else : linkify = lambda x : x ret = [ ] for service in sorted ( all_services , key = lambda x : x . service_id ) : ret . append ( [ service . service_id , service . __name__ , linkify ( service . api_homepage . format ( domain = service . domain , protocol = service . protocol ) ) , ", " . join ( service . supported_cryptos or [ ] ) ] ) return tabulate ( ret , headers = [ 'ID' , 'Name' , 'URL' , 'Supported Currencies' ] , tablefmt = format ) | Returns a string depicting all services currently installed . |
58,166 | def find_pair ( self , crypto = "" , fiat = "" , verbose = False ) : self . fetch_pairs ( ) if not crypto and not fiat : raise Exception ( "Fiat or Crypto required" ) def is_matched ( crypto , fiat , pair ) : if crypto and not fiat : return pair . startswith ( "%s-" % crypto ) if crypto and fiat : return pair == "%s-%s" % ( crypto , fiat ) if not crypto : return pair . endswith ( "-%s" % fiat ) matched_pairs = { } for Service , pairs in self . _all_pairs . items ( ) : matched = [ p for p in pairs if is_matched ( crypto , fiat , p ) ] if matched : matched_pairs [ Service ] = matched return matched_pairs | This utility is used to find an exchange that supports a given exchange pair . |
58,167 | def all_balances ( currency , services = None , verbose = False , timeout = None ) : balances = { } if not services : services = [ x ( verbose = verbose , timeout = timeout ) for x in ExchangeUniverse . get_authenticated_services ( ) ] for e in services : try : balances [ e ] = e . get_exchange_balance ( currency ) except NotImplementedError : if verbose : print ( e . name , "balance not implemented" ) except Exception as exc : if verbose : print ( e . name , "failed:" , exc . __class__ . __name__ , str ( exc ) ) return balances | Get balances for passed in currency for all exchanges . |
58,168 | def total_exchange_balances ( services = None , verbose = None , timeout = None , by_service = False ) : balances = defaultdict ( lambda : 0 ) if not services : services = [ x ( verbose = verbose , timeout = timeout ) for x in ExchangeUniverse . get_authenticated_services ( ) ] for e in services : try : more_balances = e . get_total_exchange_balances ( ) if by_service : balances [ e . __class__ ] = more_balances else : for code , bal in more_balances . items ( ) : balances [ code ] += bal except NotImplementedError : if verbose : print ( e . name , "total balance not implemented" ) except Exception as exc : if verbose : print ( e . name , "failed:" , exc . __class__ . __name__ , str ( exc ) ) return balances | Returns all balances for all currencies for all exchanges |
58,169 | def compress ( x , y ) : polarity = "02" if y % 2 == 0 else "03" wrap = lambda x : x if not is_py2 : wrap = lambda x : bytes ( x , 'ascii' ) return unhexlify ( wrap ( "%s%0.64x" % ( polarity , x ) ) ) | Given a x y coordinate encode in compressed format Returned is always 33 bytes . |
58,170 | def decrypt ( self , passphrase , wif = False ) : passphrase = normalize ( 'NFC' , unicode ( passphrase ) ) if is_py2 : passphrase = passphrase . encode ( 'utf8' ) if self . ec_multiply : raise Exception ( "Not supported yet" ) key = scrypt . hash ( passphrase , self . addresshash , 16384 , 8 , 8 ) derivedhalf1 = key [ 0 : 32 ] derivedhalf2 = key [ 32 : 64 ] aes = AES . new ( derivedhalf2 ) decryptedhalf2 = aes . decrypt ( self . encryptedhalf2 ) decryptedhalf1 = aes . decrypt ( self . encryptedhalf1 ) priv = decryptedhalf1 + decryptedhalf2 priv = unhexlify ( '%064x' % ( long ( hexlify ( priv ) , 16 ) ^ long ( hexlify ( derivedhalf1 ) , 16 ) ) ) pub = privtopub ( priv ) if self . compressed : pub = encode_pubkey ( pub , 'hex_compressed' ) addr = pubtoaddr ( pub , self . pub_byte ) if is_py2 : ascii_key = addr else : ascii_key = bytes ( addr , 'ascii' ) if sha256 ( sha256 ( ascii_key ) . digest ( ) ) . digest ( ) [ 0 : 4 ] != self . addresshash : raise Exception ( 'Bip38 password decrypt failed: Wrong password?' ) else : formatt = 'wif' if wif else 'hex' if self . compressed : return encode_privkey ( priv , formatt + '_compressed' , self . priv_byte ) else : return encode_privkey ( priv , formatt , self . priv_byte ) | BIP0038 non - ec - multiply decryption . Returns hex privkey . |
58,171 | def encrypt ( cls , crypto , privkey , passphrase ) : pub_byte , priv_byte = get_magic_bytes ( crypto ) privformat = get_privkey_format ( privkey ) if privformat in [ 'wif_compressed' , 'hex_compressed' ] : compressed = True flagbyte = b'\xe0' if privformat == 'wif_compressed' : privkey = encode_privkey ( privkey , 'hex_compressed' ) privformat = get_privkey_format ( privkey ) if privformat in [ 'wif' , 'hex' ] : compressed = False flagbyte = b'\xc0' if privformat == 'wif' : privkey = encode_privkey ( privkey , 'hex' ) privformat = get_privkey_format ( privkey ) pubkey = privtopub ( privkey ) addr = pubtoaddr ( pubkey , pub_byte ) passphrase = normalize ( 'NFC' , unicode ( passphrase ) ) if is_py2 : ascii_key = addr passphrase = passphrase . encode ( 'utf8' ) else : ascii_key = bytes ( addr , 'ascii' ) salt = sha256 ( sha256 ( ascii_key ) . digest ( ) ) . digest ( ) [ 0 : 4 ] key = scrypt . hash ( passphrase , salt , 16384 , 8 , 8 ) derivedhalf1 , derivedhalf2 = key [ : 32 ] , key [ 32 : ] aes = AES . new ( derivedhalf2 ) encryptedhalf1 = aes . encrypt ( unhexlify ( '%0.32x' % ( long ( privkey [ 0 : 32 ] , 16 ) ^ long ( hexlify ( derivedhalf1 [ 0 : 16 ] ) , 16 ) ) ) ) encryptedhalf2 = aes . encrypt ( unhexlify ( '%0.32x' % ( long ( privkey [ 32 : 64 ] , 16 ) ^ long ( hexlify ( derivedhalf1 [ 16 : 32 ] ) , 16 ) ) ) ) payload = b'\x01\x42' + flagbyte + salt + encryptedhalf1 + encryptedhalf2 return cls ( crypto , b58encode_check ( payload ) ) | BIP0038 non - ec - multiply encryption . Returns BIP0038 encrypted privkey . |
58,172 | def create_from_intermediate ( cls , crypto , intermediate_point , seed , compressed = True , include_cfrm = True ) : flagbyte = b'\x20' if compressed else b'\x00' payload = b58decode_check ( str ( intermediate_point ) ) ownerentropy = payload [ 8 : 16 ] passpoint = payload [ 16 : - 4 ] x , y = uncompress ( passpoint ) if not is_py2 : seed = bytes ( seed , 'ascii' ) seedb = hexlify ( sha256 ( seed ) . digest ( ) ) [ : 24 ] factorb = int ( hexlify ( sha256 ( sha256 ( seedb ) . digest ( ) ) . digest ( ) ) , 16 ) generatedaddress = pubtoaddr ( fast_multiply ( ( x , y ) , factorb ) ) wrap = lambda x : x if not is_py2 : wrap = lambda x : bytes ( x , 'ascii' ) addresshash = sha256 ( sha256 ( wrap ( generatedaddress ) ) . digest ( ) ) . digest ( ) [ : 4 ] encrypted_seedb = scrypt . hash ( passpoint , addresshash + ownerentropy , 1024 , 1 , 1 , 64 ) derivedhalf1 , derivedhalf2 = encrypted_seedb [ : 32 ] , encrypted_seedb [ 32 : ] aes = AES . new ( derivedhalf2 ) block1 = long ( seedb [ 0 : 16 ] , 16 ) ^ long ( hexlify ( derivedhalf1 [ 0 : 16 ] ) , 16 ) encryptedpart1 = aes . encrypt ( unhexlify ( '%0.32x' % block1 ) ) block2 = long ( hexlify ( encryptedpart1 [ 8 : 16 ] ) + seedb [ 16 : 24 ] , 16 ) ^ long ( hexlify ( derivedhalf1 [ 16 : 32 ] ) , 16 ) encryptedpart2 = aes . encrypt ( unhexlify ( '%0.32x' % block2 ) ) payload = b"\x01\x43" + flagbyte + addresshash + ownerentropy + encryptedpart1 [ : 8 ] + encryptedpart2 encrypted_pk = b58encode_check ( payload ) if not include_cfrm : return generatedaddress , encrypted_pk confirmation_code = Bip38ConfirmationCode . create ( flagbyte , ownerentropy , factorb , derivedhalf1 , derivedhalf2 , addresshash ) return generatedaddress , cls ( crypto , encrypted_pk ) , confirmation_code | Given an intermediate point given to us by owner generate an address and encrypted private key that can be decoded by the passphrase used to generate the intermediate point . |
58,173 | def generate_address ( self , passphrase ) : inter = Bip38IntermediatePoint . create ( passphrase , ownersalt = self . ownersalt ) public_key = privtopub ( inter . passpoint ) derived = scrypt . hash ( inter . passpoint , self . addresshash + inter . ownerentropy , 1024 , 1 , 1 , 64 ) derivedhalf1 , derivedhalf2 = derived [ : 32 ] , derived [ 32 : ] unencrypted_prefix = bytes_to_int ( self . pointbprefix ) ^ ( bytes_to_int ( derived [ 63 ] ) & 0x01 ) aes = AES . new ( derivedhalf2 ) block1 = aes . decrypt ( self . pointbx1 ) block2 = aes . decrypt ( self . pointbx2 ) raise Exception ( "Not done yet" ) return block2 = long ( hexlify ( pointb2 ) , 16 ) ^ long ( hexlify ( derivedhalf1 [ 16 : ] ) , 16 ) return pubtoaddr ( * fast_multiply ( pointb , passfactor ) ) | Make sure the confirm code is valid for the given password and address . |
58,174 | def push_tx ( self , crypto , tx_hex ) : url = "%s/pushtx" % self . base_url return self . post_url ( url , { 'hex' : tx_hex } ) . content | This method is untested . |
58,175 | def replay_block ( self , block_to_replay , limit = 5 ) : if block_to_replay == 'latest' : if self . verbose : print ( "Getting latest %s block header" % source . upper ( ) ) block = get_block ( self . source , latest = True , verbose = self . verbose ) if self . verbose : print ( "Latest %s block is #%s" % ( self . source . upper ( ) , block [ 'block_number' ] ) ) else : blocknum = block_to_replay if type ( block_to_replay ) == int else block_to_replay [ 'block_number' ] if blocknum < self . parent_fork_block or blocknum < self . child_fork_block : raise Exception ( "Can't replay blocks mined before the fork" ) if type ( block_to_replay ) is not dict : if self . verbose : print ( "Getting %s block header #%s" % ( self . source . upper ( ) , block_to_replay ) ) block = get_block ( self . source , block_number = int ( block_to_replay ) , verbose = self . verbose ) else : block = block_to_replay if self . verbose : print ( "Using %s for pushing to %s" % ( self . pusher . name , self . destination . upper ( ) ) ) print ( "Using %s for getting %s transactions" % ( self . tx_fetcher . name , self . source . upper ( ) ) ) print ( "Finished getting block header," , len ( block [ 'txids' ] ) , "transactions in block, will replay" , ( limit or "all of them" ) ) results = [ ] enforced_limit = ( limit or len ( block [ 'txids' ] ) ) for i , txid in enumerate ( block [ 'txids' ] [ : enforced_limit ] ) : print ( "outside" , txid ) self . _replay_tx ( txid , i ) | Replay all transactions in parent currency to passed in source currency . Block_to_replay can either be an integer or a block object . |
58,176 | def get_block_adjustments ( crypto , points = None , intervals = None , ** modes ) : from moneywagon import get_block all_points = [ ] if intervals : latest_block_height = get_block ( crypto , latest = True , ** modes ) [ 'block_number' ] interval = int ( latest_block_height / float ( intervals ) ) all_points = [ x * interval for x in range ( 1 , intervals - 1 ) ] if points : all_points . extend ( points ) all_points . sort ( ) adjustments = [ ] previous_point = 0 previous_time = ( crypto_data [ crypto . lower ( ) ] . get ( 'genesis_date' ) . replace ( tzinfo = pytz . UTC ) or get_block ( crypto , block_number = 0 , ** modes ) [ 'time' ] ) for point in all_points : if point == 0 : continue point_time = get_block ( crypto , block_number = point , ** modes ) [ 'time' ] length = point - previous_point minutes = ( point_time - previous_time ) . total_seconds ( ) / 60 rate = minutes / length adjustments . append ( [ previous_point , rate ] ) previous_time = point_time previous_point = point return adjustments | This utility is used to determine the actual block rate . The output can be directly copied to the blocktime_adjustments setting . |
58,177 | def _per_era_supply ( self , block_height ) : coins = 0 for era in self . supply_data [ 'eras' ] : end_block = era [ 'end' ] start_block = era [ 'start' ] reward = era [ 'reward' ] if not end_block or block_height <= end_block : blocks_this_era = block_height - start_block coins += blocks_this_era * reward break blocks_per_era = end_block - start_block coins += reward * blocks_per_era return coins | Calculate the coin supply based on eras defined in crypto_data . Some currencies don t have a simple algorithmically defined halfing schedule so coins supply has to be defined explicitly per era . |
58,178 | def _prepare_consensus ( FetcherClass , results ) : if hasattr ( FetcherClass , "strip_for_consensus" ) : to_compare = [ FetcherClass . strip_for_consensus ( value ) for ( fetcher , value ) in results ] else : to_compare = [ value for fetcher , value in results ] return to_compare , [ fetcher . _successful_service for fetcher , values in results ] | Given a list of results return a list that is simplified to make consensus determination possible . Returns two item tuple first arg is simplified list the second argument is a list of all services used in making these results . |
58,179 | def _get_results ( FetcherClass , services , kwargs , num_results = None , fast = 0 , verbose = False , timeout = None ) : results = [ ] if not num_results or fast : num_results = len ( services ) with futures . ThreadPoolExecutor ( max_workers = len ( services ) ) as executor : fetches = { } for service in services [ : num_results ] : tail = [ x for x in services if x is not service ] random . shuffle ( tail ) srv = FetcherClass ( services = [ service ] + tail , verbose = verbose , timeout = timeout ) fetches [ executor . submit ( srv . action , ** kwargs ) ] = srv if fast == 1 : raise NotImplementedError to_iterate , still_going = futures . wait ( fetches , return_when = futures . FIRST_COMPLETED ) for x in still_going : try : x . result ( timeout = 1.001 ) except futures . _base . TimeoutError : pass elif fast > 1 : raise Exception ( "fast level greater than 1 not yet implemented" ) else : to_iterate = futures . as_completed ( fetches ) for future in to_iterate : service = fetches [ future ] results . append ( [ service , future . result ( ) ] ) return results | Does the fetching in multiple threads of needed . Used by paranoid and fast mode . |
58,180 | def _do_private_mode ( FetcherClass , services , kwargs , random_wait_seconds , timeout , verbose ) : addresses = kwargs . pop ( 'addresses' ) results = { } with futures . ThreadPoolExecutor ( max_workers = len ( addresses ) ) as executor : fetches = { } for address in addresses : k = kwargs k [ 'address' ] = address random . shuffle ( services ) srv = FetcherClass ( services = services , verbose = verbose , timeout = timeout or 5.0 , random_wait_seconds = random_wait_seconds ) fetches [ executor . submit ( srv . action , ** k ) ] = ( srv , address ) to_iterate = futures . as_completed ( fetches ) for future in to_iterate : service , address = fetches [ future ] results [ address ] = future . result ( ) return results | Private mode is only applicable to address_balance unspent_outputs and historical_transactions . There will always be a list for the addresses argument . Each address goes to a random service . Also a random delay is performed before the external fetch for improved privacy . |
58,181 | def currency_to_protocol ( amount ) : if type ( amount ) in [ float , int ] : amount = "%.8f" % amount return int ( amount . replace ( "." , '' ) ) | Convert a string of currency units to protocol units . For instance converts 19 . 1 bitcoin to 1910000000 satoshis . |
58,182 | def to_rawtx ( tx ) : if tx . get ( 'hex' ) : return tx [ 'hex' ] new_tx = { } locktime = tx . get ( 'locktime' , 0 ) new_tx [ 'locktime' ] = locktime new_tx [ 'version' ] = tx . get ( 'version' , 1 ) new_tx [ 'ins' ] = [ { 'outpoint' : { 'hash' : str ( x [ 'txid' ] ) , 'index' : x [ 'n' ] } , 'script' : str ( x [ 'scriptSig' ] . replace ( ' ' , '' ) ) , 'sequence' : x . get ( 'sequence' , 0xFFFFFFFF if locktime == 0 else None ) } for x in tx [ 'inputs' ] ] new_tx [ 'outs' ] = [ { 'script' : str ( x [ 'scriptPubKey' ] ) , 'value' : x [ 'amount' ] } for x in tx [ 'outputs' ] ] return serialize ( new_tx ) | Take a tx object in the moneywagon format and convert it to the format that pybitcointools s serialize funcion takes then return in raw hex format . |
58,183 | def check_error ( self , response ) : if response . status_code == 500 : raise ServiceError ( "500 - " + response . content ) if response . status_code == 503 : if "DDoS protection by Cloudflare" in response . content : raise ServiceError ( "Foiled by Cloudfare's DDoS protection" ) raise ServiceError ( "503 - Temporarily out of service." ) if response . status_code == 429 : raise ServiceError ( "429 - Too many requests" ) if response . status_code == 404 : raise ServiceError ( "404 - Not Found" ) if response . status_code == 400 : raise ServiceError ( "400 - Bad Request" ) | If the service is returning an error this function should raise an exception . such as SkipThisService |
58,184 | def convert_currency ( self , base_fiat , base_amount , target_fiat ) : url = "http://api.fixer.io/latest?base=%s" % base_fiat data = self . get_url ( url ) . json ( ) try : return data [ 'rates' ] [ target_fiat . upper ( ) ] * base_amount except KeyError : raise Exception ( "Can not convert %s to %s" % ( base_fiat , target_fiat ) ) | Convert one fiat amount to another fiat . Uses the fixer . io service . |
58,185 | def fix_symbol ( self , symbol , reverse = False ) : if not self . symbol_mapping : return symbol for old , new in self . symbol_mapping : if reverse : if symbol == new : return old else : if symbol == old : return new return symbol | In comes a moneywagon format symbol and returned in the symbol converted to one the service can understand . |
58,186 | def parse_market ( self , market , split_char = '_' ) : crypto , fiat = market . lower ( ) . split ( split_char ) return ( self . fix_symbol ( crypto , reverse = True ) , self . fix_symbol ( fiat , reverse = True ) ) | In comes the market identifier directly from the service . Returned is the crypto and fiat identifier in moneywagon format . |
58,187 | def make_market ( self , crypto , fiat , seperator = "_" ) : return ( "%s%s%s" % ( self . fix_symbol ( crypto ) , seperator , self . fix_symbol ( fiat ) ) ) . lower ( ) | Convert a crypto and fiat to a market string . All exchanges use their own format for specifying markets . Subclasses can define their own implementation . |
58,188 | def _external_request ( self , method , url , * args , ** kwargs ) : self . last_url = url if url in self . responses . keys ( ) and method == 'get' : return self . responses [ url ] headers = kwargs . pop ( 'headers' , None ) custom = { 'User-Agent' : useragent } if headers : headers . update ( custom ) kwargs [ 'headers' ] = headers else : kwargs [ 'headers' ] = custom if self . timeout : kwargs [ 'timeout' ] = self . timeout start = datetime . datetime . now ( ) response = getattr ( requests , method ) ( url , verify = self . ssl_verify , * args , ** kwargs ) self . total_external_fetch_duration += datetime . datetime . now ( ) - start if self . verbose : print ( "Got Response: %s (took %s)" % ( url , ( datetime . datetime . now ( ) - start ) ) ) self . last_raw_response = response self . check_error ( response ) if method == 'get' : self . responses [ url ] = response return response | Wrapper for requests . get with useragent automatically set . And also all requests are reponses are cached . |
58,189 | def get_block ( self , crypto , block_hash = '' , block_number = '' , latest = False ) : raise NotImplementedError ( self . name + " does not support getting getting block data. " "Or rather it has no defined 'get_block' method." ) | Get block based on either block height block number or get the latest block . Only one of the previous arguments must be passed on . |
58,190 | def make_order ( self , crypto , fiat , amount , price , type = "limit" ) : raise NotImplementedError ( self . name + " does not support making orders. " "Or rather it has no defined 'make_order' method." ) | This method buys or sells crypto on an exchange using fiat balance . Type can either be fill - or - kill post - only market or limit . To get what modes are supported consult make_order . supported_types if one is defined . |
58,191 | def _try_services ( self , method_name , * args , ** kwargs ) : crypto = ( ( args and args [ 0 ] ) or kwargs [ 'crypto' ] ) . lower ( ) address = kwargs . get ( 'address' , '' ) . lower ( ) fiat = kwargs . get ( 'fiat' , '' ) . lower ( ) if not self . services : raise CurrencyNotSupported ( "No services defined for %s for %s" % ( method_name , crypto ) ) if self . random_wait_seconds > 0 : pause_time = random . random ( ) * self . random_wait_seconds if self . verbose : print ( "Pausing for: %.2f seconds" % pause_time ) time . sleep ( pause_time ) for service in self . services : if service . supported_cryptos and ( crypto not in service . supported_cryptos ) : if self . verbose : print ( "SKIP:" , "%s not supported for %s" % ( crypto , service . __class__ . __name__ ) ) continue try : if self . verbose : print ( "* Trying:" , service , crypto , "%s%s" % ( address , fiat ) ) ret = getattr ( service , method_name ) ( * args , ** kwargs ) self . _successful_service = service return ret except ( KeyError , IndexError , TypeError , ValueError , requests . exceptions . Timeout , requests . exceptions . ConnectionError ) as exc : if self . verbose : print ( "FAIL:" , service , exc . __class__ . __name__ , exc ) self . _failed_services . append ( { 'service' : service , 'error' : "%s %s" % ( exc . __class__ . __name__ , exc ) } ) except NoService as exc : if self . verbose : print ( "SKIP:" , exc . __class__ . __name__ , exc ) self . _failed_services . append ( { 'service' : service , 'error' : "Skipped: %s" % str ( exc ) } ) except NotImplementedError as exc : if self . verbose : print ( "SKIP:" , exc . __class__ . __name__ , exc ) self . _failed_services . append ( { 'service' : service , 'error' : "Not Implemented" } ) if not self . _failed_services : raise NotImplementedError ( "No Services defined for %s and %s" % ( crypto , method_name ) ) if set ( x [ 'error' ] for x in self . _failed_services ) == set ( [ 'Not Implemented' ] ) and method_name . endswith ( "multi" ) : raise RevertToPrivateMode ( "All services do not implement %s service" % method_name ) failed_msg = ', ' . join ( [ "{service.name} -> {error}" . format ( ** x ) for x in self . _failed_services ] ) raise NoService ( self . no_service_msg ( * args , ** kwargs ) + "! Tried: " + failed_msg ) | Try each service until one returns a response . This function only catches the bare minimum of exceptions from the service class . We want exceptions to be raised so the service classes can be debugged and fixed quickly . |
58,192 | def uconcatenate ( arrs , axis = 0 ) : v = np . concatenate ( arrs , axis = axis ) v = _validate_numpy_wrapper_units ( v , arrs ) return v | Concatenate a sequence of arrays . |
58,193 | def ucross ( arr1 , arr2 , registry = None , axisa = - 1 , axisb = - 1 , axisc = - 1 , axis = None ) : v = np . cross ( arr1 , arr2 , axisa = axisa , axisb = axisb , axisc = axisc , axis = axis ) units = arr1 . units * arr2 . units arr = unyt_array ( v , units , registry = registry ) return arr | Applies the cross product to two YT arrays . |
58,194 | def uintersect1d ( arr1 , arr2 , assume_unique = False ) : v = np . intersect1d ( arr1 , arr2 , assume_unique = assume_unique ) v = _validate_numpy_wrapper_units ( v , [ arr1 , arr2 ] ) return v | Find the sorted unique elements of the two input arrays . |
58,195 | def uunion1d ( arr1 , arr2 ) : v = np . union1d ( arr1 , arr2 ) v = _validate_numpy_wrapper_units ( v , [ arr1 , arr2 ] ) return v | Find the union of two arrays . |
58,196 | def unorm ( data , ord = None , axis = None , keepdims = False ) : norm = np . linalg . norm ( data , ord = ord , axis = axis , keepdims = keepdims ) if norm . shape == ( ) : return unyt_quantity ( norm , data . units ) return unyt_array ( norm , data . units ) | Matrix or vector norm that preserves units |
58,197 | def udot ( op1 , op2 ) : dot = np . dot ( op1 . d , op2 . d ) units = op1 . units * op2 . units if dot . shape == ( ) : return unyt_quantity ( dot , units ) return unyt_array ( dot , units ) | Matrix or vector dot product that preserves units |
58,198 | def uhstack ( arrs ) : v = np . hstack ( arrs ) v = _validate_numpy_wrapper_units ( v , arrs ) return v | Stack arrays in sequence horizontally while preserving units |
58,199 | def ustack ( arrs , axis = 0 ) : v = np . stack ( arrs , axis = axis ) v = _validate_numpy_wrapper_units ( v , arrs ) return v | Join a sequence of arrays along a new axis while preserving units |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.