idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
55,900
def create_invoice_from_ticket ( pk , list_lines ) : context = { } if list_lines : new_list_lines = [ x [ 0 ] for x in SalesLineTicket . objects . values_list ( 'line_order__pk' ) . filter ( pk__in = [ int ( x ) for x in list_lines ] ) ] if new_list_lines : lo = SalesLineOrder . objects . values_list ( 'order__pk' ) . filter ( pk__in = new_list_lines ) [ : 1 ] if lo and lo [ 0 ] and lo [ 0 ] [ 0 ] : new_pk = lo [ 0 ] [ 0 ] return GenLineProduct . create_invoice_from_order ( new_pk , new_list_lines ) else : error = _ ( 'Pedido no encontrado' ) else : error = _ ( 'Lineas no relacionadas con pedido' ) else : error = _ ( 'Lineas no seleccionadas' ) context [ 'error' ] = error return context
la pk y list_lines son de ticket necesitamos la info de las lineas de pedidos
55,901
def _check_values ( in_values ) : out_values = [ ] for value in in_values : out_values . append ( value ) return tuple ( out_values )
Check if values need to be converted before they get mogrify d
55,902
def clone ( srcpath , destpath , vcs = None ) : vcs = vcs or probe ( srcpath ) cls = _get_repo_class ( vcs ) return cls . clone ( srcpath , destpath )
Clone an existing repository .
55,903
def probe ( path ) : import os from . common import UnknownVCSType if os . path . isdir ( os . path . join ( path , '.git' ) ) : return 'git' elif os . path . isdir ( os . path . join ( path , '.hg' ) ) : return 'hg' elif ( os . path . isfile ( os . path . join ( path , 'config' ) ) and os . path . isdir ( os . path . join ( path , 'objects' ) ) and os . path . isdir ( os . path . join ( path , 'refs' ) ) and os . path . isdir ( os . path . join ( path , 'branches' ) ) ) : return 'git' elif ( os . path . isfile ( os . path . join ( path , 'format' ) ) and os . path . isdir ( os . path . join ( path , 'conf' ) ) and os . path . isdir ( os . path . join ( path , 'db' ) ) and os . path . isdir ( os . path . join ( path , 'locks' ) ) ) : return 'svn' else : raise UnknownVCSType ( path )
Probe a repository for its type .
55,904
def open ( path , vcs = None ) : import os assert os . path . isdir ( path ) , path + ' is not a directory' vcs = vcs or probe ( path ) cls = _get_repo_class ( vcs ) return cls ( path )
Open an existing repository
55,905
def _check_attributes ( self , attributes , extra = None ) : extra = extra or ( ) unknown_keys = set ( attributes ) - set ( self . _possible_attributes ) - set ( extra ) if unknown_keys : logger . warning ( '%s got unknown attributes: %s' % ( self . __class__ . __name__ , unknown_keys ) )
Check if attributes given to the constructor can be used to instanciate a valid node .
55,906
def main ( args = None ) : if args is None : args = tag . cli . parser ( ) . parse_args ( ) assert args . cmd in mains mainmethod = mains [ args . cmd ] mainmethod ( args )
Entry point for the tag CLI .
55,907
def _build_request ( request ) : msg = bytes ( [ request [ 'cmd' ] ] ) if 'dest' in request : msg += bytes ( [ request [ 'dest' ] ] ) else : msg += b'\0' if 'sha' in request : msg += request [ 'sha' ] else : for dummy in range ( 64 ) : msg += b'0' logging . debug ( "Request (%d): %s" , len ( msg ) , msg ) return msg
Build message to transfer over the socket from a request .
55,908
def main ( ) : __async__ = True logging . basicConfig ( format = "%(levelname)-10s %(message)s" , level = logging . DEBUG ) if len ( sys . argv ) != 2 : logging . error ( "Must specify configuration file" ) sys . exit ( ) config = configparser . ConfigParser ( ) config . read ( sys . argv [ 1 ] ) password = config . get ( 'default' , 'password' ) if __async__ : client = Client ( config . get ( 'default' , 'host' ) , config . getint ( 'default' , 'port' ) , password , _callback ) else : client = Client ( config . get ( 'default' , 'host' ) , config . getint ( 'default' , 'port' ) , password ) status = client . messages ( ) msg = status [ 0 ] print ( msg ) print ( client . mp3 ( msg [ 'sha' ] . encode ( 'utf-8' ) ) ) while True : continue
Show example using the API .
55,909
def start ( self ) : if not self . _thread : logging . info ( "Starting asterisk mbox thread" ) try : while True : self . signal . get ( False ) except queue . Empty : pass self . _thread = threading . Thread ( target = self . _loop ) self . _thread . setDaemon ( True ) self . _thread . start ( )
Start thread .
55,910
def stop ( self ) : if self . _thread : self . signal . put ( "Stop" ) self . _thread . join ( ) if self . _soc : self . _soc . shutdown ( ) self . _soc . close ( ) self . _thread = None
Stop thread .
55,911
def _recv_msg ( self ) : command = ord ( recv_blocking ( self . _soc , 1 ) ) msglen = recv_blocking ( self . _soc , 4 ) msglen = ( ( msglen [ 0 ] << 24 ) + ( msglen [ 1 ] << 16 ) + ( msglen [ 2 ] << 8 ) + msglen [ 3 ] ) msg = recv_blocking ( self . _soc , msglen ) return command , msg
Read a message from the server .
55,912
def _loop ( self ) : request = { } connected = False while True : timeout = None sockets = [ self . request_queue , self . signal ] if not connected : try : self . _clear_request ( request ) self . _connect ( ) self . _soc . send ( _build_request ( { 'cmd' : cmd . CMD_MESSAGE_LIST } ) ) self . _soc . send ( _build_request ( { 'cmd' : cmd . CMD_MESSAGE_CDR_AVAILABLE } ) ) connected = True except ConnectionRefusedError : timeout = 5.0 if connected : sockets . append ( self . _soc ) readable , _writable , _errored = select . select ( sockets , [ ] , [ ] , timeout ) if self . signal in readable : break if self . _soc in readable : try : command , msg = self . _recv_msg ( ) self . _handle_msg ( command , msg , request ) except ( RuntimeError , ConnectionResetError ) : logging . warning ( "Lost connection" ) connected = False self . _clear_request ( request ) if self . request_queue in readable : request = self . request_queue . get ( ) self . request_queue . task_done ( ) if not connected : self . _clear_request ( request ) else : if ( request [ 'cmd' ] == cmd . CMD_MESSAGE_LIST and self . _status and ( not self . _callback or 'sync' in request ) ) : self . result_queue . put ( [ cmd . CMD_MESSAGE_LIST , self . _status ] ) request = { } else : self . _soc . send ( _build_request ( request ) )
Handle data .
55,913
def mp3 ( self , sha , ** kwargs ) : return self . _queue_msg ( { 'cmd' : cmd . CMD_MESSAGE_MP3 , 'sha' : _get_bytes ( sha ) } , ** kwargs )
Get raw MP3 of a message .
55,914
def delete ( self , sha , ** kwargs ) : return self . _queue_msg ( { 'cmd' : cmd . CMD_MESSAGE_DELETE , 'sha' : _get_bytes ( sha ) } , ** kwargs )
Delete a message .
55,915
def get_cdr ( self , start = 0 , count = - 1 , ** kwargs ) : sha = encode_to_sha ( "{:d},{:d}" . format ( start , count ) ) return self . _queue_msg ( { 'cmd' : cmd . CMD_MESSAGE_CDR , 'sha' : sha } , ** kwargs )
Request range of CDR messages
55,916
def path ( self ) -> Path : args = list ( self . _iter_translated_field_names ( self . get_path_pattern_list ( ) ) ) args . append ( self . get_name ( ) ) return Path ( * args )
A Path for this name object joining field names from self . get_path_pattern_list with this object s name
55,917
def fold ( self , predicate ) : childs = { x : y . fold ( predicate ) for ( x , y ) in self . _attributes . items ( ) if isinstance ( y , SerializableTypedAttributesHolder ) } return predicate ( self , childs )
Takes a predicate and applies it to each node starting from the leaves and making the return value propagate .
55,918
def the_one ( cls ) : if cls . THE_ONE is None : cls . THE_ONE = cls ( settings . HELP_TOKENS_INI_FILE ) return cls . THE_ONE
Get the single global HelpUrlExpert object .
55,919
def get_config_value ( self , section_name , option , default_option = "default" ) : if self . config is None : self . config = configparser . ConfigParser ( ) self . config . read ( self . ini_file_name ) if option : try : return self . config . get ( section_name , option ) except configparser . NoOptionError : log . debug ( "Didn't find a configuration option for '%s' section and '%s' option" , section_name , option , ) return self . config . get ( section_name , default_option )
Read a value from the configuration with a default .
55,920
def url_for_token ( self , token ) : book_url = self . get_config_value ( "pages" , token ) book , _ , url_tail = book_url . partition ( ':' ) book_base = settings . HELP_TOKENS_BOOKS [ book ] url = book_base lang = getattr ( settings , "HELP_TOKENS_LANGUAGE_CODE" , None ) if lang is not None : lang = self . get_config_value ( "locales" , lang ) url += "/" + lang version = getattr ( settings , "HELP_TOKENS_VERSION" , None ) if version is not None : url += "/" + version url += "/" + url_tail return url
Find the full URL for a help token .
55,921
def multi_load_data_custom ( Channel , TraceTitle , RunNos , directoryPath = '.' , calcPSD = True , NPerSegmentPSD = 1000000 ) : matching_files = search_data_custom ( Channel , TraceTitle , RunNos , directoryPath ) cpu_count = _cpu_count ( ) workerPool = _Pool ( cpu_count ) load_data_partial = _partial ( load_data , calcPSD = calcPSD , NPerSegmentPSD = NPerSegmentPSD ) data = workerPool . map ( load_data_partial , matching_files ) workerPool . close ( ) workerPool . terminate ( ) workerPool . join ( ) return data
Lets you load multiple datasets named with the LeCroy s custom naming scheme at once .
55,922
def search_data_custom ( Channel , TraceTitle , RunNos , directoryPath = '.' ) : files = glob ( '{}/*' . format ( directoryPath ) ) files_CorrectChannel = [ ] for file_ in files : if 'C{}' . format ( Channel ) in file_ : files_CorrectChannel . append ( file_ ) files_CorrectRunNo = [ ] for RunNo in RunNos : files_match = _fnmatch . filter ( files_CorrectChannel , '*C{}' . format ( Channel ) + TraceTitle + str ( RunNo ) . zfill ( 5 ) + '.*' ) for file_ in files_match : files_CorrectRunNo . append ( file_ ) print ( "loading the following files: {}" . format ( files_CorrectRunNo ) ) paths = files_CorrectRunNo return paths
Lets you create a list with full file paths of the files named with the LeCroy s custom naming scheme .
55,923
def calc_temp ( Data_ref , Data ) : T = 300 * ( ( Data . A * Data_ref . Gamma ) / ( Data_ref . A * Data . Gamma ) ) Data . T = T return T
Calculates the temperature of a data set relative to a reference . The reference is assumed to be at 300K .
55,924
def fit_curvefit ( p0 , datax , datay , function , ** kwargs ) : pfit , pcov = _curve_fit ( function , datax , datay , p0 = p0 , epsfcn = 0.0001 , ** kwargs ) error = [ ] for i in range ( len ( pfit ) ) : try : error . append ( _np . absolute ( pcov [ i ] [ i ] ) ** 0.5 ) except : error . append ( _np . NaN ) pfit_curvefit = pfit perr_curvefit = _np . array ( error ) return pfit_curvefit , perr_curvefit
Fits the data to a function using scipy . optimise . curve_fit
55,925
def moving_average ( array , n = 3 ) : ret = _np . cumsum ( array , dtype = float ) ret [ n : ] = ret [ n : ] - ret [ : - n ] return ret [ n - 1 : ] / n
Calculates the moving average of an array .
55,926
def fit_autocorrelation ( autocorrelation , time , GammaGuess , TrapFreqGuess = None , method = 'energy' , MakeFig = True , show_fig = True ) : datax = time datay = autocorrelation method = method . lower ( ) if method == 'energy' : p0 = _np . array ( [ GammaGuess ] ) Params_Fit , Params_Fit_Err = fit_curvefit ( p0 , datax , datay , _energy_autocorrelation_fitting_eqn ) autocorrelation_fit = _energy_autocorrelation_fitting_eqn ( _np . arange ( 0 , datax [ - 1 ] , 1e-7 ) , Params_Fit [ 0 ] ) elif method == 'position' : AngTrapFreqGuess = 2 * _np . pi * TrapFreqGuess p0 = _np . array ( [ GammaGuess , AngTrapFreqGuess ] ) Params_Fit , Params_Fit_Err = fit_curvefit ( p0 , datax , datay , _position_autocorrelation_fitting_eqn ) autocorrelation_fit = _position_autocorrelation_fitting_eqn ( _np . arange ( 0 , datax [ - 1 ] , 1e-7 ) , Params_Fit [ 0 ] , Params_Fit [ 1 ] ) if MakeFig == True : fig = _plt . figure ( figsize = properties [ "default_fig_size" ] ) ax = fig . add_subplot ( 111 ) ax . plot ( datax * 1e6 , datay , '.' , color = "darkblue" , label = "Autocorrelation Data" , alpha = 0.5 ) ax . plot ( _np . arange ( 0 , datax [ - 1 ] , 1e-7 ) * 1e6 , autocorrelation_fit , color = "red" , label = "fit" ) ax . set_xlim ( [ 0 , 30e6 / Params_Fit [ 0 ] / ( 2 * _np . pi ) ] ) legend = ax . legend ( loc = "best" , frameon = 1 ) frame = legend . get_frame ( ) frame . set_facecolor ( 'white' ) frame . set_edgecolor ( 'white' ) ax . set_xlabel ( "time (us)" ) ax . set_ylabel ( r"$\left | \frac{\langle x(t)x(t+\tau) \rangle}{\langle x(t)x(t) \rangle} \right |$" ) if show_fig == True : _plt . show ( ) return Params_Fit , Params_Fit_Err , fig , ax else : return Params_Fit , Params_Fit_Err , None , None
Fits exponential relaxation theory to data .
55,927
def IFFT_filter ( Signal , SampleFreq , lowerFreq , upperFreq , PyCUDA = False ) : if PyCUDA == True : Signalfft = calc_fft_with_PyCUDA ( Signal ) else : print ( "starting fft" ) Signalfft = scipy . fftpack . fft ( Signal ) print ( "starting freq calc" ) freqs = _np . fft . fftfreq ( len ( Signal ) ) * SampleFreq print ( "starting bin zeroing" ) Signalfft [ _np . where ( freqs < lowerFreq ) ] = 0 Signalfft [ _np . where ( freqs > upperFreq ) ] = 0 if PyCUDA == True : FilteredSignal = 2 * calc_ifft_with_PyCUDA ( Signalfft ) else : print ( "starting ifft" ) FilteredSignal = 2 * scipy . fftpack . ifft ( Signalfft ) print ( "done" ) return _np . real ( FilteredSignal )
Filters data using fft - > zeroing out fft bins - > ifft
55,928
def calc_fft_with_PyCUDA ( Signal ) : print ( "starting fft" ) Signal = Signal . astype ( _np . float32 ) Signal_gpu = _gpuarray . to_gpu ( Signal ) Signalfft_gpu = _gpuarray . empty ( len ( Signal ) // 2 + 1 , _np . complex64 ) plan = _Plan ( Signal . shape , _np . float32 , _np . complex64 ) _fft ( Signal_gpu , Signalfft_gpu , plan ) Signalfft = Signalfft_gpu . get ( ) Signalfft = _np . hstack ( ( Signalfft , _np . conj ( _np . flipud ( Signalfft [ 1 : len ( Signal ) // 2 ] ) ) ) ) print ( "fft done" ) return Signalfft
Calculates the FFT of the passed signal by using the scikit - cuda libary which relies on PyCUDA
55,929
def calc_ifft_with_PyCUDA ( Signalfft ) : print ( "starting ifft" ) Signalfft = Signalfft . astype ( _np . complex64 ) Signalfft_gpu = _gpuarray . to_gpu ( Signalfft [ 0 : len ( Signalfft ) // 2 + 1 ] ) Signal_gpu = _gpuarray . empty ( len ( Signalfft ) , _np . float32 ) plan = _Plan ( len ( Signalfft ) , _np . complex64 , _np . float32 ) _ifft ( Signalfft_gpu , Signal_gpu , plan ) Signal = Signal_gpu . get ( ) / ( 2 * len ( Signalfft ) ) print ( "ifft done" ) return Signal
Calculates the inverse - FFT of the passed FFT - signal by using the scikit - cuda libary which relies on PyCUDA
55,930
def make_butterworth_b_a ( lowcut , highcut , SampleFreq , order = 5 , btype = 'band' ) : nyq = 0.5 * SampleFreq low = lowcut / nyq high = highcut / nyq if btype . lower ( ) == 'band' : b , a = scipy . signal . butter ( order , [ low , high ] , btype = btype ) elif btype . lower ( ) == 'low' : b , a = scipy . signal . butter ( order , low , btype = btype ) elif btype . lower ( ) == 'high' : b , a = scipy . signal . butter ( order , high , btype = btype ) else : raise ValueError ( 'Filter type unknown' ) return b , a
Generates the b and a coefficients for a butterworth IIR filter .
55,931
def make_butterworth_bandpass_b_a ( CenterFreq , bandwidth , SampleFreq , order = 5 , btype = 'band' ) : lowcut = CenterFreq - bandwidth / 2 highcut = CenterFreq + bandwidth / 2 b , a = make_butterworth_b_a ( lowcut , highcut , SampleFreq , order , btype ) return b , a
Generates the b and a coefficients for a butterworth bandpass IIR filter .
55,932
def get_freq_response ( a , b , show_fig = True , SampleFreq = ( 2 * pi ) , NumOfFreqs = 500 , whole = False ) : w , h = scipy . signal . freqz ( b = b , a = a , worN = NumOfFreqs , whole = whole ) freqList = w / ( pi ) * SampleFreq / 2.0 himag = _np . array ( [ hi . imag for hi in h ] ) GainArray = 20 * _np . log10 ( _np . abs ( h ) ) PhaseDiffArray = _np . unwrap ( _np . arctan2 ( _np . imag ( h ) , _np . real ( h ) ) ) fig1 = _plt . figure ( ) ax1 = fig1 . add_subplot ( 111 ) ax1 . plot ( freqList , GainArray , '-' , label = "Specified Filter" ) ax1 . set_title ( "Frequency Response" ) if SampleFreq == 2 * pi : ax1 . set_xlabel ( ( "$\Omega$ - Normalized frequency " "($\pi$=Nyquist Frequency)" ) ) else : ax1 . set_xlabel ( "frequency (Hz)" ) ax1 . set_ylabel ( "Gain (dB)" ) ax1 . set_xlim ( [ 0 , SampleFreq / 2.0 ] ) if show_fig == True : _plt . show ( ) fig2 = _plt . figure ( ) ax2 = fig2 . add_subplot ( 111 ) ax2 . plot ( freqList , PhaseDiffArray , '-' , label = "Specified Filter" ) ax2 . set_title ( "Phase Response" ) if SampleFreq == 2 * pi : ax2 . set_xlabel ( ( "$\Omega$ - Normalized frequency " "($\pi$=Nyquist Frequency)" ) ) else : ax2 . set_xlabel ( "frequency (Hz)" ) ax2 . set_ylabel ( "Phase Difference" ) ax2 . set_xlim ( [ 0 , SampleFreq / 2.0 ] ) if show_fig == True : _plt . show ( ) return freqList , GainArray , PhaseDiffArray , fig1 , ax1 , fig2 , ax2
This function takes an array of coefficients and finds the frequency response of the filter using scipy . signal . freqz . show_fig sets if the response should be plotted
55,933
def multi_plot_PSD ( DataArray , xlim = [ 0 , 500 ] , units = "kHz" , LabelArray = [ ] , ColorArray = [ ] , alphaArray = [ ] , show_fig = True ) : unit_prefix = units [ : - 2 ] if LabelArray == [ ] : LabelArray = [ "DataSet {}" . format ( i ) for i in _np . arange ( 0 , len ( DataArray ) , 1 ) ] if ColorArray == [ ] : ColorArray = _np . empty ( len ( DataArray ) ) ColorArray = list ( ColorArray ) for i , ele in enumerate ( ColorArray ) : ColorArray [ i ] = None if alphaArray == [ ] : alphaArray = _np . empty ( len ( DataArray ) ) alphaArray = list ( alphaArray ) for i , ele in enumerate ( alphaArray ) : alphaArray [ i ] = None fig = _plt . figure ( figsize = properties [ 'default_fig_size' ] ) ax = fig . add_subplot ( 111 ) for i , data in enumerate ( DataArray ) : ax . semilogy ( unit_conversion ( data . freqs , unit_prefix ) , data . PSD , label = LabelArray [ i ] , color = ColorArray [ i ] , alpha = alphaArray [ i ] ) ax . set_xlabel ( "Frequency ({})" . format ( units ) ) ax . set_xlim ( xlim ) ax . grid ( which = "major" ) legend = ax . legend ( loc = "best" , frameon = 1 ) frame = legend . get_frame ( ) frame . set_facecolor ( 'white' ) frame . set_edgecolor ( 'white' ) ax . set_ylabel ( "PSD ($v^2/Hz$)" ) _plt . title ( 'filedir=%s' % ( DataArray [ 0 ] . filedir ) ) if show_fig == True : _plt . show ( ) return fig , ax
plot the pulse spectral density for multiple data sets on the same axes .
55,934
def multi_plot_time ( DataArray , SubSampleN = 1 , units = 's' , xlim = None , ylim = None , LabelArray = [ ] , show_fig = True ) : unit_prefix = units [ : - 1 ] if LabelArray == [ ] : LabelArray = [ "DataSet {}" . format ( i ) for i in _np . arange ( 0 , len ( DataArray ) , 1 ) ] fig = _plt . figure ( figsize = properties [ 'default_fig_size' ] ) ax = fig . add_subplot ( 111 ) for i , data in enumerate ( DataArray ) : ax . plot ( unit_conversion ( data . time . get_array ( ) [ : : SubSampleN ] , unit_prefix ) , data . voltage [ : : SubSampleN ] , alpha = 0.8 , label = LabelArray [ i ] ) ax . set_xlabel ( "time (s)" ) if xlim != None : ax . set_xlim ( xlim ) if ylim != None : ax . set_ylim ( ylim ) ax . grid ( which = "major" ) legend = ax . legend ( loc = "best" , frameon = 1 ) frame = legend . get_frame ( ) frame . set_facecolor ( 'white' ) frame . set_edgecolor ( 'white' ) ax . set_ylabel ( "voltage (V)" ) if show_fig == True : _plt . show ( ) return fig , ax
plot the time trace for multiple data sets on the same axes .
55,935
def multi_subplots_time ( DataArray , SubSampleN = 1 , units = 's' , xlim = None , ylim = None , LabelArray = [ ] , show_fig = True ) : unit_prefix = units [ : - 1 ] NumDataSets = len ( DataArray ) if LabelArray == [ ] : LabelArray = [ "DataSet {}" . format ( i ) for i in _np . arange ( 0 , len ( DataArray ) , 1 ) ] fig , axs = _plt . subplots ( NumDataSets , 1 ) for i , data in enumerate ( DataArray ) : axs [ i ] . plot ( unit_conversion ( data . time . get_array ( ) [ : : SubSampleN ] , unit_prefix ) , data . voltage [ : : SubSampleN ] , alpha = 0.8 , label = LabelArray [ i ] ) axs [ i ] . set_xlabel ( "time ({})" . format ( units ) ) axs [ i ] . grid ( which = "major" ) axs [ i ] . legend ( loc = "best" ) axs [ i ] . set_ylabel ( "voltage (V)" ) if xlim != None : axs [ i ] . set_xlim ( xlim ) if ylim != None : axs [ i ] . set_ylim ( ylim ) if show_fig == True : _plt . show ( ) return fig , axs
plot the time trace on multiple axes
55,936
def calc_autocorrelation ( Signal , FFT = False , PyCUDA = False ) : if FFT == True : Signal_padded = scipy . fftpack . ifftshift ( ( Signal - _np . average ( Signal ) ) / _np . std ( Signal ) ) n , = Signal_padded . shape Signal_padded = _np . r_ [ Signal_padded [ : n // 2 ] , _np . zeros_like ( Signal_padded ) , Signal_padded [ n // 2 : ] ] if PyCUDA == True : f = calc_fft_with_PyCUDA ( Signal_padded ) else : f = scipy . fftpack . fft ( Signal_padded ) p = _np . absolute ( f ) ** 2 if PyCUDA == True : autocorr = calc_ifft_with_PyCUDA ( p ) else : autocorr = scipy . fftpack . ifft ( p ) return _np . real ( autocorr ) [ : n // 2 ] / ( _np . arange ( n // 2 ) [ : : - 1 ] + n // 2 ) else : Signal = Signal - _np . mean ( Signal ) autocorr = scipy . signal . correlate ( Signal , Signal , mode = 'full' ) return autocorr [ autocorr . size // 2 : ] / autocorr [ autocorr . size // 2 ]
Calculates the autocorrelation from a given Signal via using
55,937
def _GetRealImagArray ( Array ) : ImagArray = _np . array ( [ num . imag for num in Array ] ) RealArray = _np . array ( [ num . real for num in Array ] ) return RealArray , ImagArray
Returns the real and imaginary components of each element in an array and returns them in 2 resulting arrays .
55,938
def _GetComplexConjugateArray ( Array ) : ConjArray = _np . array ( [ num . conj ( ) for num in Array ] ) return ConjArray
Calculates the complex conjugate of each element in an array and returns the resulting array .
55,939
def fm_discriminator ( Signal ) : S_analytic = _hilbert ( Signal ) S_analytic_star = _GetComplexConjugateArray ( S_analytic ) S_analytic_hat = S_analytic [ 1 : ] * S_analytic_star [ : - 1 ] R , I = _GetRealImagArray ( S_analytic_hat ) fmDiscriminator = _np . arctan2 ( I , R ) return fmDiscriminator
Calculates the digital FM discriminator from a real - valued time signal .
55,940
def find_collisions ( Signal , tolerance = 50 ) : fmd = fm_discriminator ( Signal ) mean_fmd = _np . mean ( fmd ) Collisions = [ _is_this_a_collision ( [ value , mean_fmd , tolerance ] ) for value in fmd ] return Collisions
Finds collision events in the signal from the shift in phase of the signal .
55,941
def count_collisions ( Collisions ) : CollisionCount = 0 CollisionIndicies = [ ] lastval = True for i , val in enumerate ( Collisions ) : if val == True and lastval == False : CollisionIndicies . append ( i ) CollisionCount += 1 lastval = val return CollisionCount , CollisionIndicies
Counts the number of unique collisions and gets the collision index .
55,942
def steady_state_potential ( xdata , HistBins = 100 ) : import numpy as _np pops = _np . histogram ( xdata , HistBins ) [ 0 ] bins = _np . histogram ( xdata , HistBins ) [ 1 ] bins = bins [ 0 : - 1 ] bins = bins + _np . mean ( _np . diff ( bins ) ) pops = pops / float ( _np . sum ( pops ) ) return bins , - _np . log ( pops )
Calculates the steady state potential . Used in fit_radius_from_potentials .
55,943
def calc_z0_and_conv_factor_from_ratio_of_harmonics ( z , z2 , NA = 0.999 ) : V1 = calc_mean_amp ( z ) V2 = calc_mean_amp ( z2 ) ratio = V2 / V1 beta = 4 * ratio laserWavelength = 1550e-9 k0 = ( 2 * pi ) / ( laserWavelength ) WaistSize = laserWavelength / ( pi * NA ) Zr = pi * WaistSize ** 2 / laserWavelength z0 = beta / ( k0 - 1 / Zr ) ConvFactor = V1 / z0 T0 = 300 return z0 , ConvFactor
Calculates the Conversion Factor and physical amplitude of motion in nms by comparison of the ratio of the heights of the z signal and second harmonic of z .
55,944
def calc_mass_from_z0 ( z0 , w0 ) : T0 = 300 mFromEquipartition = Boltzmann * T0 / ( w0 ** 2 * z0 ** 2 ) return mFromEquipartition
Calculates the mass of the particle using the equipartition from the angular frequency of the z signal and the average amplitude of the z signal in nms .
55,945
def calc_mass_from_fit_and_conv_factor ( A , Damping , ConvFactor ) : T0 = 300 mFromA = 2 * Boltzmann * T0 / ( pi * A ) * ConvFactor ** 2 * Damping return mFromA
Calculates mass from the A parameter from fitting the damping from fitting in angular units and the Conversion factor calculated from comparing the ratio of the z signal and first harmonic of z .
55,946
def unit_conversion ( array , unit_prefix , current_prefix = "" ) : UnitDict = { 'E' : 1e18 , 'P' : 1e15 , 'T' : 1e12 , 'G' : 1e9 , 'M' : 1e6 , 'k' : 1e3 , '' : 1 , 'm' : 1e-3 , 'u' : 1e-6 , 'n' : 1e-9 , 'p' : 1e-12 , 'f' : 1e-15 , 'a' : 1e-18 , } try : Desired_units = UnitDict [ unit_prefix ] except KeyError : raise ValueError ( "You entered {} for the unit_prefix, this is not a valid prefix" . format ( unit_prefix ) ) try : Current_units = UnitDict [ current_prefix ] except KeyError : raise ValueError ( "You entered {} for the current_prefix, this is not a valid prefix" . format ( current_prefix ) ) conversion_multiplication = Current_units / Desired_units converted_array = array * conversion_multiplication return converted_array
Converts an array or value to of a certain unit scale to another unit scale .
55,947
def get_wigner ( z , freq , sample_freq , histbins = 200 , show_plot = False ) : phase , phase_slices = extract_slices ( z , freq , sample_freq , show_plot = False ) counts_array , bin_edges = histogram_phase ( phase_slices , phase , histbins , show_plot = show_plot ) diff = bin_edges [ 1 ] - bin_edges [ 0 ] bin_centres = bin_edges [ : - 1 ] + diff iradon_output = _iradon_sart ( counts_array , theta = phase ) return iradon_output , bin_centres
Calculates an approximation to the wigner quasi - probability distribution by splitting the z position array into slices of the length of one period of the motion . This slice is then associated with phase from - 180 to 180 degrees . These slices are then histogramed in order to get a distribution of counts of where the particle is observed at each phase . The 2d array containing the counts varying with position and phase is then passed through the inverse radon transformation using the Simultaneous Algebraic Reconstruction Technique approximation from the scikit - image package .
55,948
def plot_wigner3d ( iradon_output , bin_centres , bin_centre_units = "" , cmap = _cm . cubehelix_r , view = ( 10 , - 45 ) , figsize = ( 10 , 10 ) ) : fig = _plt . figure ( figsize = figsize ) ax = fig . add_subplot ( 111 , projection = '3d' ) resid1 = iradon_output . sum ( axis = 0 ) resid2 = iradon_output . sum ( axis = 1 ) x = bin_centres y = bin_centres xpos , ypos = _np . meshgrid ( x , y ) X = xpos Y = ypos Z = iradon_output ax . set_xlabel ( "x ({})" . format ( bin_centre_units ) ) ax . set_xlabel ( "y ({})" . format ( bin_centre_units ) ) ax . scatter ( _np . min ( X ) * _np . ones_like ( y ) , y , resid2 / _np . max ( resid2 ) * _np . max ( Z ) , alpha = 0.7 ) ax . scatter ( x , _np . max ( Y ) * _np . ones_like ( x ) , resid1 / _np . max ( resid1 ) * _np . max ( Z ) , alpha = 0.7 ) surf = ax . plot_surface ( X , Y , Z , cmap = cmap , linewidth = 0 , antialiased = False ) fig . colorbar ( surf , shrink = 0.5 , aspect = 5 ) ax . view_init ( view [ 0 ] , view [ 1 ] ) return fig , ax
Plots the wigner space representation as a 3D surface plot .
55,949
def plot_wigner2d ( iradon_output , bin_centres , cmap = _cm . cubehelix_r , figsize = ( 6 , 6 ) ) : xx , yy = _np . meshgrid ( bin_centres , bin_centres ) resid1 = iradon_output . sum ( axis = 0 ) resid2 = iradon_output . sum ( axis = 1 ) wigner_marginal_seperation = 0.001 left , width = 0.2 , 0.65 - 0.1 bottom , height = 0.1 , 0.65 - 0.1 bottom_h = height + bottom + wigner_marginal_seperation left_h = width + left + wigner_marginal_seperation cbar_pos = [ 0.03 , bottom , 0.05 , 0.02 + width ] rect_wigner = [ left , bottom , width , height ] rect_histx = [ left , bottom_h , width , 0.2 ] rect_histy = [ left_h , bottom , 0.2 , height ] fig = _plt . figure ( figsize = figsize ) axWigner = _plt . axes ( rect_wigner ) axHistx = _plt . axes ( rect_histx ) axHisty = _plt . axes ( rect_histy ) pcol = axWigner . pcolor ( xx , yy , iradon_output , cmap = cmap ) binwidth = bin_centres [ 1 ] - bin_centres [ 0 ] axHistx . bar ( bin_centres , resid2 , binwidth ) axHisty . barh ( bin_centres , resid1 , binwidth ) _plt . setp ( axHistx . get_xticklabels ( ) , visible = False ) _plt . setp ( axHisty . get_yticklabels ( ) , visible = False ) for tick in axHisty . get_xticklabels ( ) : tick . set_rotation ( - 90 ) cbaraxes = fig . add_axes ( cbar_pos ) cbar = fig . colorbar ( pcol , cax = cbaraxes , drawedges = False ) cbar . solids . set_edgecolor ( "face" ) cbar . solids . set_rasterized ( True ) cbar . ax . set_yticklabels ( cbar . ax . yaxis . get_ticklabels ( ) , y = 0 , rotation = 45 ) plotlimits = _np . max ( _np . abs ( bin_centres ) ) axWigner . axis ( ( - plotlimits , plotlimits , - plotlimits , plotlimits ) ) axHistx . set_xlim ( axWigner . get_xlim ( ) ) axHisty . set_ylim ( axWigner . get_ylim ( ) ) return fig , axWigner , axHistx , axHisty , cbar
Plots the wigner space representation as a 2D heatmap .
55,950
def get_time_data ( self , timeStart = None , timeEnd = None ) : if timeStart == None : timeStart = self . timeStart if timeEnd == None : timeEnd = self . timeEnd time = self . time . get_array ( ) StartIndex = _np . where ( time == take_closest ( time , timeStart ) ) [ 0 ] [ 0 ] EndIndex = _np . where ( time == take_closest ( time , timeEnd ) ) [ 0 ] [ 0 ] if EndIndex == len ( time ) - 1 : EndIndex = EndIndex + 1 return time [ StartIndex : EndIndex ] , self . voltage [ StartIndex : EndIndex ]
Gets the time and voltage data .
55,951
def plot_time_data ( self , timeStart = None , timeEnd = None , units = 's' , show_fig = True ) : unit_prefix = units [ : - 1 ] if timeStart == None : timeStart = self . timeStart if timeEnd == None : timeEnd = self . timeEnd time = self . time . get_array ( ) StartIndex = _np . where ( time == take_closest ( time , timeStart ) ) [ 0 ] [ 0 ] EndIndex = _np . where ( time == take_closest ( time , timeEnd ) ) [ 0 ] [ 0 ] fig = _plt . figure ( figsize = properties [ 'default_fig_size' ] ) ax = fig . add_subplot ( 111 ) ax . plot ( unit_conversion ( time [ StartIndex : EndIndex ] , unit_prefix ) , self . voltage [ StartIndex : EndIndex ] ) ax . set_xlabel ( "time ({})" . format ( units ) ) ax . set_ylabel ( "voltage (V)" ) ax . set_xlim ( [ timeStart , timeEnd ] ) if show_fig == True : _plt . show ( ) return fig , ax
plot time data against voltage data .
55,952
def plot_PSD ( self , xlim = None , units = "kHz" , show_fig = True , timeStart = None , timeEnd = None , * args , ** kwargs ) : if timeStart == None and timeEnd == None : freqs = self . freqs PSD = self . PSD else : freqs , PSD = self . get_PSD ( timeStart = timeStart , timeEnd = timeEnd ) unit_prefix = units [ : - 2 ] if xlim == None : xlim = [ 0 , unit_conversion ( self . SampleFreq / 2 , unit_prefix ) ] fig = _plt . figure ( figsize = properties [ 'default_fig_size' ] ) ax = fig . add_subplot ( 111 ) ax . semilogy ( unit_conversion ( freqs , unit_prefix ) , PSD , * args , ** kwargs ) ax . set_xlabel ( "Frequency ({})" . format ( units ) ) ax . set_xlim ( xlim ) ax . grid ( which = "major" ) ax . set_ylabel ( "$S_{xx}$ ($V^2/Hz$)" ) if show_fig == True : _plt . show ( ) return fig , ax
plot the pulse spectral density .
55,953
def calc_area_under_PSD ( self , lowerFreq , upperFreq ) : Freq_startAreaPSD = take_closest ( self . freqs , lowerFreq ) index_startAreaPSD = int ( _np . where ( self . freqs == Freq_startAreaPSD ) [ 0 ] [ 0 ] ) Freq_endAreaPSD = take_closest ( self . freqs , upperFreq ) index_endAreaPSD = int ( _np . where ( self . freqs == Freq_endAreaPSD ) [ 0 ] [ 0 ] ) AreaUnderPSD = sum ( self . PSD [ index_startAreaPSD : index_endAreaPSD ] ) return AreaUnderPSD
Sums the area under the PSD from lowerFreq to upperFreq .
55,954
def get_fit_auto ( self , CentralFreq , MaxWidth = 15000 , MinWidth = 500 , WidthIntervals = 500 , MakeFig = True , show_fig = True , silent = False ) : MinTotalSumSquaredError = _np . infty for Width in _np . arange ( MaxWidth , MinWidth - WidthIntervals , - WidthIntervals ) : try : OmegaTrap , A , Gamma , _ , _ = self . get_fit_from_peak ( CentralFreq - Width / 2 , CentralFreq + Width / 2 , silent = True , MakeFig = False , show_fig = False ) except RuntimeError : _warnings . warn ( "Couldn't find good fit with width {}" . format ( Width ) , RuntimeWarning ) val = _uncertainties . ufloat ( _np . NaN , _np . NaN ) OmegaTrap = val A = val Gamma = val TotalSumSquaredError = ( A . std_dev / A . n ) ** 2 + ( Gamma . std_dev / Gamma . n ) ** 2 + ( OmegaTrap . std_dev / OmegaTrap . n ) ** 2 if TotalSumSquaredError < MinTotalSumSquaredError : MinTotalSumSquaredError = TotalSumSquaredError BestWidth = Width if silent != True : print ( "found best" ) try : OmegaTrap , A , Gamma , fig , ax = self . get_fit_from_peak ( CentralFreq - BestWidth / 2 , CentralFreq + BestWidth / 2 , MakeFig = MakeFig , show_fig = show_fig , silent = silent ) except UnboundLocalError : raise ValueError ( "A best width was not found, try increasing the number of widths tried by either decreasing WidthIntervals or MinWidth or increasing MaxWidth" ) OmegaTrap = self . OmegaTrap A = self . A Gamma = self . Gamma self . FTrap = OmegaTrap / ( 2 * pi ) return OmegaTrap , A , Gamma , fig , ax
Tries a range of regions to search for peaks and runs the one with the least error and returns the parameters with the least errors .
55,955
def calc_gamma_from_variance_autocorrelation_fit ( self , NumberOfOscillations , GammaGuess = None , silent = False , MakeFig = True , show_fig = True ) : try : SplittedArraySize = int ( self . SampleFreq / self . FTrap . n ) * NumberOfOscillations except KeyError : ValueError ( 'You forgot to do the spectrum fit to specify self.FTrap exactly.' ) VoltageArraySize = len ( self . voltage ) SnippetsVariances = _np . var ( self . voltage [ : VoltageArraySize - _np . mod ( VoltageArraySize , SplittedArraySize ) ] . reshape ( - 1 , SplittedArraySize ) , axis = 1 ) autocorrelation = calc_autocorrelation ( SnippetsVariances ) time = _np . array ( range ( len ( autocorrelation ) ) ) * SplittedArraySize / self . SampleFreq if GammaGuess == None : Gamma_Initial = ( time [ 4 ] - time [ 0 ] ) / ( autocorrelation [ 0 ] - autocorrelation [ 4 ] ) else : Gamma_Initial = GammaGuess if MakeFig == True : Params , ParamsErr , fig , ax = fit_autocorrelation ( autocorrelation , time , Gamma_Initial , MakeFig = MakeFig , show_fig = show_fig ) else : Params , ParamsErr , _ , _ = fit_autocorrelation ( autocorrelation , time , Gamma_Initial , MakeFig = MakeFig , show_fig = show_fig ) if silent == False : print ( "\n" ) print ( "Big Gamma: {} +- {}% " . format ( Params [ 0 ] , ParamsErr [ 0 ] / Params [ 0 ] * 100 ) ) Gamma = _uncertainties . ufloat ( Params [ 0 ] , ParamsErr [ 0 ] ) if MakeFig == True : return Gamma , fig , ax else : return Gamma , None , None
Calculates the total damping i . e . Gamma by splitting the time trace into chunks of NumberOfOscillations oscillations and calculated the variance of each of these chunks . This array of varainces is then used for the autocorrleation . The autocorrelation is fitted with an exponential relaxation function and the function returns the parameters with errors .
55,956
def calc_gamma_from_energy_autocorrelation_fit ( self , GammaGuess = None , silent = False , MakeFig = True , show_fig = True ) : autocorrelation = calc_autocorrelation ( self . voltage [ : - 1 ] ** 2 * self . OmegaTrap . n ** 2 + ( _np . diff ( self . voltage ) * self . SampleFreq ) ** 2 ) time = self . time . get_array ( ) [ : len ( autocorrelation ) ] if GammaGuess == None : Gamma_Initial = ( time [ 4 ] - time [ 0 ] ) / ( autocorrelation [ 0 ] - autocorrelation [ 4 ] ) else : Gamma_Initial = GammaGuess if MakeFig == True : Params , ParamsErr , fig , ax = fit_autocorrelation ( autocorrelation , time , Gamma_Initial , MakeFig = MakeFig , show_fig = show_fig ) else : Params , ParamsErr , _ , _ = fit_autocorrelation ( autocorrelation , time , Gamma_Initial , MakeFig = MakeFig , show_fig = show_fig ) if silent == False : print ( "\n" ) print ( "Big Gamma: {} +- {}% " . format ( Params [ 0 ] , ParamsErr [ 0 ] / Params [ 0 ] * 100 ) ) Gamma = _uncertainties . ufloat ( Params [ 0 ] , ParamsErr [ 0 ] ) if MakeFig == True : return Gamma , fig , ax else : return Gamma , None , None
Calculates the total damping i . e . Gamma by calculating the energy each point in time . This energy array is then used for the autocorrleation . The autocorrelation is fitted with an exponential relaxation function and the function returns the parameters with errors .
55,957
def extract_parameters ( self , P_mbar , P_Error , method = "chang" ) : [ R , M , ConvFactor ] , [ RErr , MErr , ConvFactorErr ] = extract_parameters ( P_mbar , P_Error , self . A . n , self . A . std_dev , self . Gamma . n , self . Gamma . std_dev , method = method ) self . Radius = _uncertainties . ufloat ( R , RErr ) self . Mass = _uncertainties . ufloat ( M , MErr ) self . ConvFactor = _uncertainties . ufloat ( ConvFactor , ConvFactorErr ) return self . Radius , self . Mass , self . ConvFactor
Extracts the Radius mass and Conversion factor for a particle .
55,958
def get_value ( self , ColumnName , RunNo ) : Value = float ( self . ORGTableData [ self . ORGTableData . RunNo == '{}' . format ( RunNo ) ] [ ColumnName ] ) return Value
Retreives the value of the collumn named ColumnName associated with a particular run number .
55,959
def steady_state_potential ( xdata , HistBins = 100 ) : import numpy as np pops = np . histogram ( xdata , HistBins ) [ 0 ] bins = np . histogram ( xdata , HistBins ) [ 1 ] bins = bins [ 0 : - 1 ] bins = bins + np . mean ( np . diff ( bins ) ) pops = pops / float ( np . sum ( pops ) ) return bins , - np . log ( pops )
Calculates the steady state potential .
55,960
def finished ( finished_status , update_interval , status_key , edit_at_key ) : return { status_key : { "$gte" : finished_status } , edit_at_key : { "$gte" : x_seconds_before_now ( update_interval ) , } , }
Create dict query for pymongo that getting all finished task .
55,961
def unfinished ( finished_status , update_interval , status_key , edit_at_key ) : return { "$or" : [ { status_key : { "$lt" : finished_status } } , { edit_at_key : { "$lt" : x_seconds_before_now ( update_interval ) } } , ] }
Create dict query for pymongo that getting all unfinished task .
55,962
def getCommandLine ( self ) : commandLine = self . precursor + self . sep if self . precursor else '' commandLine += self . cd + ' ' + self . path + self . sep if self . path else '' commandLine += PosixCommand . getCommandLine ( self ) return commandLine
Insert the precursor and change directory commands
55,963
def _policy_psets ( policy_instances ) : if len ( policy_instances ) == 0 : return PermissionSet . objects . filter ( policyinstance__isnull = True ) else : return PermissionSet . objects . filter ( policyinstance__policy__in = policy_instances ) . distinct ( )
Find all permission sets making use of all of a list of policy_instances . The input is an array of policy instances .
55,964
def _get_permission_set_tree ( user ) : if hasattr ( user , CACHED_PSET_PROPERTY_KEY ) : return getattr ( user , CACHED_PSET_PROPERTY_KEY ) if user . is_authenticated ( ) : try : return user . permissionset . first ( ) . tree ( ) except AttributeError : raise ObjectDoesNotExist return PermissionSet . objects . get ( anonymous_user = True ) . tree ( )
Helper to return cached permission set tree from user instance if set else generates and returns analyzed permission set tree . Does not cache set automatically that must be done explicitely .
55,965
def ensure_permission_set_tree_cached ( user ) : if hasattr ( user , CACHED_PSET_PROPERTY_KEY ) : return try : setattr ( user , CACHED_PSET_PROPERTY_KEY , _get_permission_set_tree ( user ) ) except ObjectDoesNotExist : pass
Helper to cache permission set tree on user instance
55,966
def parsed ( self ) : if not self . _parsed : self . _parsed = json . loads ( self . content ) return self . _parsed
Get the JSON dictionary object which represents the content .
55,967
def cleanup_logger ( self ) : self . log_handler . close ( ) self . log . removeHandler ( self . log_handler )
Clean up logger to close out file handles .
55,968
def update_configs ( self , release ) : git_repo = release [ 'git_repo' ] git_cache = release [ 'git_cache' ] if not os . path . isdir ( git_cache ) : self . call ( [ 'git' , 'clone' , '--mirror' , git_repo , git_cache ] ) else : self . call ( [ 'git' , 'fetch' , '--all' , '--prune' ] , cwd = git_cache ) git_dir = release [ 'git_dir' ] = os . path . join ( release [ 'tmp_dir' ] , os . path . basename ( git_repo ) ) self . call ( [ 'git' , 'clone' , '-b' , release [ 'git_branch' ] , git_cache , git_dir ] ) if release [ 'delete_repo_files' ] : for repo_file in glob . glob ( os . path . join ( git_dir , '*.repo' ) ) : self . log . info ( 'Deleting %s' % repo_file ) os . unlink ( repo_file )
Update the fedora - atomic . git repositories for a given release
55,969
def mock_cmd ( self , release , * cmd , ** kwargs ) : fmt = '{mock_cmd}' if kwargs . get ( 'new_chroot' ) is True : fmt += ' --new-chroot' fmt += ' --configdir={mock_dir}' return self . call ( fmt . format ( ** release ) . split ( ) + list ( cmd ) )
Run a mock command in the chroot for a given release
55,970
def generate_mock_config ( self , release ) : mock_tmpl = pkg_resources . resource_string ( __name__ , 'templates/mock.mako' ) mock_dir = release [ 'mock_dir' ] = os . path . join ( release [ 'tmp_dir' ] , 'mock' ) mock_cfg = os . path . join ( release [ 'mock_dir' ] , release [ 'mock' ] + '.cfg' ) os . mkdir ( mock_dir ) for cfg in ( 'site-defaults.cfg' , 'logging.ini' ) : os . symlink ( '/etc/mock/%s' % cfg , os . path . join ( mock_dir , cfg ) ) with file ( mock_cfg , 'w' ) as cfg : mock_out = Template ( mock_tmpl ) . render ( ** release ) self . log . debug ( 'Writing %s:\n%s' , mock_cfg , mock_out ) cfg . write ( mock_out )
Dynamically generate our mock configuration
55,971
def mock_chroot ( self , release , cmd , ** kwargs ) : return self . mock_cmd ( release , '--chroot' , cmd , ** kwargs )
Run a commend in the mock container for a release
55,972
def generate_repo_files ( self , release ) : repo_tmpl = pkg_resources . resource_string ( __name__ , 'templates/repo.mako' ) repo_file = os . path . join ( release [ 'git_dir' ] , '%s.repo' % release [ 'repo' ] ) with file ( repo_file , 'w' ) as repo : repo_out = Template ( repo_tmpl ) . render ( ** release ) self . log . debug ( 'Writing repo file %s:\n%s' , repo_file , repo_out ) repo . write ( repo_out ) self . log . info ( 'Wrote repo configuration to %s' , repo_file )
Dynamically generate our yum repo configuration
55,973
def ostree_init ( self , release ) : out = release [ 'output_dir' ] . rstrip ( '/' ) base = os . path . dirname ( out ) if not os . path . isdir ( base ) : self . log . info ( 'Creating %s' , base ) os . makedirs ( base , mode = 0755 ) if not os . path . isdir ( out ) : self . mock_chroot ( release , release [ 'ostree_init' ] )
Initialize the OSTree for a release
55,974
def ostree_compose ( self , release ) : start = datetime . utcnow ( ) treefile = os . path . join ( release [ 'git_dir' ] , 'treefile.json' ) cmd = release [ 'ostree_compose' ] % treefile with file ( treefile , 'w' ) as tree : json . dump ( release [ 'treefile' ] , tree ) out , err , rcode = self . mock_chroot ( release , cmd , new_chroot = True ) ref = None commitid = None for line in out . split ( '\n' ) : if ' => ' in line : line = line . replace ( '\n' , '' ) ref , _ , commitid = line . partition ( ' => ' ) self . log . info ( 'rpm-ostree compose complete (%s), ref %s, commitid %s' , datetime . utcnow ( ) - start , ref , commitid ) return ref , commitid
Compose the OSTree in the mock container
55,975
def update_ostree_summary ( self , release ) : self . log . info ( 'Updating the ostree summary for %s' , release [ 'name' ] ) self . mock_chroot ( release , release [ 'ostree_summary' ] ) return os . path . join ( release [ 'output_dir' ] , 'summary' )
Update the ostree summary file and return a path to it
55,976
def sync_in ( self , release ) : tree = release [ 'canonical_dir' ] if os . path . exists ( tree ) and release . get ( 'rsync_in_objs' ) : out = release [ 'output_dir' ] if not os . path . isdir ( out ) : self . log . info ( 'Creating %s' , out ) os . makedirs ( out ) self . call ( release [ 'rsync_in_objs' ] ) self . call ( release [ 'rsync_in_rest' ] )
Sync the canonical repo to our local working directory
55,977
def sync_out ( self , release ) : if release . get ( 'rsync_out_objs' ) : tree = release [ 'canonical_dir' ] if not os . path . isdir ( tree ) : self . log . info ( 'Creating %s' , tree ) os . makedirs ( tree ) self . call ( release [ 'rsync_out_objs' ] ) self . call ( release [ 'rsync_out_rest' ] )
Sync our tree to the canonical location
55,978
def call ( self , cmd , ** kwargs ) : if isinstance ( cmd , basestring ) : cmd = cmd . split ( ) self . log . info ( 'Running %s' , cmd ) p = subprocess . Popen ( cmd , stdout = subprocess . PIPE , stderr = subprocess . PIPE , ** kwargs ) out , err = p . communicate ( ) if out : self . log . info ( out ) if err : if p . returncode == 0 : self . log . info ( err ) else : self . log . error ( err ) if p . returncode != 0 : self . log . error ( 'returncode = %d' % p . returncode ) raise Exception return out , err , p . returncode
A simple subprocess wrapper
55,979
def intersect ( self , other ) : if not self . overlap ( other ) : return None newstart = max ( self . _start , other . start ) newend = min ( self . _end , other . end ) return Range ( newstart , newend )
Determine the interval of overlap between this range and another .
55,980
def overlap ( self , other ) : if self . _start < other . end and self . _end > other . start : return True return False
Determine whether this range overlaps with another .
55,981
def contains ( self , other ) : return self . _start <= other . start and self . _end >= other . end
Determine whether this range contains another .
55,982
def transform ( self , offset ) : assert self . _start + offset > 0 , ( 'offset {} invalid; resulting range [{}, {}) is ' 'undefined' . format ( offset , self . _start + offset , self . _end + offset ) ) self . _start += offset self . _end += offset
Shift this range by the specified offset .
55,983
def run ( cls , command , cwd = "." , ** kwargs ) : assert isinstance ( command , six . string_types ) command_result = CommandResult ( ) command_result . command = command use_shell = cls . USE_SHELL if "shell" in kwargs : use_shell = kwargs . pop ( "shell" ) if six . PY2 and isinstance ( command , six . text_type ) : command = codecs . encode ( command , "utf-8" ) cmdargs = shlex . split ( command ) command0 = cmdargs [ 0 ] real_command = cls . COMMAND_MAP . get ( command0 , None ) if real_command : cmdargs0 = real_command . split ( ) cmdargs = cmdargs0 + cmdargs [ 1 : ] preprocessors = cls . PREPROCESSOR_MAP . get ( command0 ) if preprocessors : cmdargs = cls . preprocess_command ( preprocessors , cmdargs , command , cwd ) try : process = subprocess . Popen ( cmdargs , stdout = subprocess . PIPE , stderr = subprocess . PIPE , universal_newlines = True , shell = use_shell , cwd = cwd , ** kwargs ) out , err = process . communicate ( ) if six . PY2 : default_encoding = 'UTF-8' out = six . text_type ( out , process . stdout . encoding or default_encoding ) err = six . text_type ( err , process . stderr . encoding or default_encoding ) process . poll ( ) assert process . returncode is not None command_result . stdout = out command_result . stderr = err command_result . returncode = process . returncode if cls . DEBUG : print ( "shell.cwd={0}" . format ( kwargs . get ( "cwd" , None ) ) ) print ( "shell.command: {0}" . format ( " " . join ( cmdargs ) ) ) print ( "shell.command.output:\n{0};" . format ( command_result . output ) ) except OSError as e : command_result . stderr = u"OSError: %s" % e command_result . returncode = e . errno assert e . errno != 0 postprocessors = cls . POSTPROCESSOR_MAP . get ( command0 ) if postprocessors : command_result = cls . postprocess_command ( postprocessors , command_result ) return command_result
Make a subprocess call collect its output and returncode . Returns CommandResult instance as ValueObject .
55,984
def get_field_template ( self , bound_field , template_name = None ) : template_name = super ( ) . get_field_template ( bound_field , template_name ) if ( template_name == self . field_template and isinstance ( bound_field . field . widget , ( forms . RadioSelect , forms . CheckboxSelectMultiple ) ) ) : return 'tapeforms/fields/foundation_fieldset.html' return template_name
Uses a special field template for widget with multiple inputs . It only applies if no other template than the default one has been defined .
55,985
def printer ( self ) : print " ID " + repr ( self . id ) if self . type == 0 : print " Tag: - " print " Start State - " elif self . type == 1 : print " Push " + repr ( self . sym ) elif self . type == 2 : print " Pop State " + repr ( self . sym ) elif self . type == 3 : print " Read State " + repr ( self . sym ) elif self . type == 4 : print " Stop State " + repr ( self . sym ) for j in self . trans : if len ( self . trans [ j ] ) > 1 or ( len ( self . trans [ j ] ) == 1 ) : for symbol in self . trans [ j ] : print " On Symbol " + repr ( symbol ) + " Transition To State " + repr ( j )
Prints PDA state attributes
55,986
def printer ( self ) : i = 0 while i < self . n + 1 : print "--------- State No --------" + repr ( i ) self . s [ i ] . printer ( ) i = i + 1
Prints PDA states and their attributes
55,987
def _ActionDatabase ( self , cmd , args = None , commit = True , error = True ) : goodlogging . Log . Info ( "DB" , "Database Command: {0} {1}" . format ( cmd , args ) , verbosity = self . logVerbosity ) with sqlite3 . connect ( self . _dbPath ) as db : try : if args is None : result = db . execute ( cmd ) else : result = db . execute ( cmd , args ) except sqlite3 . OperationalError : if error is True : raise return None else : if commit is True : db . commit ( ) return result . fetchall ( )
Do action on database .
55,988
def _PurgeTable ( self , tableName ) : goodlogging . Log . Info ( "DB" , "Deleting all entries from table {0}" . format ( tableName ) , verbosity = self . logVerbosity ) self . _ActionDatabase ( "DELETE FROM {0}" . format ( tableName ) )
Deletes all rows from given table without dropping table .
55,989
def GetConfigValue ( self , fieldName ) : result = self . _ActionDatabase ( "SELECT Value FROM Config WHERE Name=?" , ( fieldName , ) ) if result is None : return None elif len ( result ) == 0 : return None elif len ( result ) == 1 : goodlogging . Log . Info ( "DB" , "Found database match in config table {0}={1}" . format ( fieldName , result [ 0 ] [ 0 ] ) , verbosity = self . logVerbosity ) return result [ 0 ] [ 0 ] elif len ( result ) > 1 : goodlogging . Log . Fatal ( "DB" , "Database corrupted - multiple matches found in config table {0}={1}" . format ( fieldName , result ) )
Match given field name in Config table and return corresponding value .
55,990
def SetConfigValue ( self , fieldName , value ) : currentConfigValue = self . GetConfigValue ( fieldName ) if currentConfigValue is None : goodlogging . Log . Info ( "DB" , "Adding {0}={1} to database config table" . format ( fieldName , value ) , verbosity = self . logVerbosity ) self . _ActionDatabase ( "INSERT INTO Config VALUES (?,?)" , ( fieldName , value ) ) else : goodlogging . Log . Info ( "DB" , "Updating {0} in database config table from {1} to {2}" . format ( fieldName , currentConfigValue , value ) , verbosity = self . logVerbosity ) self . _ActionDatabase ( "UPDATE Config SET Value=? WHERE Name=?" , ( value , fieldName ) )
Set value in Config table .
55,991
def _AddToSingleColumnTable ( self , tableName , columnHeading , newValue ) : match = None currentTable = self . _GetFromSingleColumnTable ( tableName ) if currentTable is not None : for currentValue in currentTable : if currentValue == newValue : match = True if match is None : goodlogging . Log . Info ( "DB" , "Adding {0} to {1} table" . format ( newValue , tableName ) , verbosity = self . logVerbosity ) self . _ActionDatabase ( "INSERT INTO {0} VALUES (?)" . format ( tableName ) , ( newValue , ) ) else : goodlogging . Log . Info ( "DB" , "{0} already exists in {1} table" . format ( newValue , tableName ) , verbosity = self . logVerbosity )
Add an entry to a table containing a single column . Checks existing table entries to avoid duplicate entries if the given value already exists in the table .
55,992
def AddShowToTVLibrary ( self , showName ) : goodlogging . Log . Info ( "DB" , "Adding {0} to TV library" . format ( showName ) , verbosity = self . logVerbosity ) currentShowValues = self . SearchTVLibrary ( showName = showName ) if currentShowValues is None : self . _ActionDatabase ( "INSERT INTO TVLibrary (ShowName) VALUES (?)" , ( showName , ) ) showID = self . _ActionDatabase ( "SELECT (ShowID) FROM TVLibrary WHERE ShowName=?" , ( showName , ) ) [ 0 ] [ 0 ] return showID else : goodlogging . Log . Fatal ( "DB" , "An entry for {0} already exists in the TV library" . format ( showName ) )
Add show to TVLibrary table . If the show already exists in the table a fatal error is raised .
55,993
def UpdateShowDirInTVLibrary ( self , showID , showDir ) : goodlogging . Log . Info ( "DB" , "Updating TV library for ShowID={0}: ShowDir={1}" . format ( showID , showDir ) ) self . _ActionDatabase ( "UPDATE TVLibrary SET ShowDir=? WHERE ShowID=?" , ( showDir , showID ) )
Update show directory entry for given show id in TVLibrary table .
55,994
def SearchTVLibrary ( self , showName = None , showID = None , showDir = None ) : unique = True if showName is None and showID is None and showDir is None : goodlogging . Log . Info ( "DB" , "Looking up all items in TV library" , verbosity = self . logVerbosity ) queryString = "SELECT * FROM TVLibrary" queryTuple = None unique = False elif showDir is not None : goodlogging . Log . Info ( "DB" , "Looking up from TV library where ShowDir is {0}" . format ( showDir ) , verbosity = self . logVerbosity ) queryString = "SELECT * FROM TVLibrary WHERE ShowDir=?" queryTuple = ( showDir , ) elif showID is not None : goodlogging . Log . Info ( "DB" , "Looking up from TV library where ShowID is {0}" . format ( showID ) , verbosity = self . logVerbosity ) queryString = "SELECT * FROM TVLibrary WHERE ShowID=?" queryTuple = ( showID , ) elif showName is not None : goodlogging . Log . Info ( "DB" , "Looking up from TV library where ShowName is {0}" . format ( showName ) , verbosity = self . logVerbosity ) queryString = "SELECT * FROM TVLibrary WHERE ShowName=?" queryTuple = ( showName , ) result = self . _ActionDatabase ( queryString , queryTuple , error = False ) if result is None : return None elif len ( result ) == 0 : return None elif len ( result ) == 1 : goodlogging . Log . Info ( "DB" , "Found match in TVLibrary: {0}" . format ( result ) , verbosity = self . logVerbosity ) return result elif len ( result ) > 1 : if unique is True : goodlogging . Log . Fatal ( "DB" , "Database corrupted - multiple matches found in TV Library: {0}" . format ( result ) ) else : goodlogging . Log . Info ( "DB" , "Found multiple matches in TVLibrary: {0}" . format ( result ) , verbosity = self . logVerbosity ) return result
Search TVLibrary table .
55,995
def SearchFileNameTable ( self , fileName ) : goodlogging . Log . Info ( "DB" , "Looking up filename string '{0}' in database" . format ( fileName ) , verbosity = self . logVerbosity ) queryString = "SELECT ShowID FROM FileName WHERE FileName=?" queryTuple = ( fileName , ) result = self . _ActionDatabase ( queryString , queryTuple , error = False ) if result is None : goodlogging . Log . Info ( "DB" , "No match found in database for '{0}'" . format ( fileName ) , verbosity = self . logVerbosity ) return None elif len ( result ) == 0 : return None elif len ( result ) == 1 : goodlogging . Log . Info ( "DB" , "Found file name match: {0}" . format ( result ) , verbosity = self . logVerbosity ) return result [ 0 ] [ 0 ] elif len ( result ) > 1 : goodlogging . Log . Fatal ( "DB" , "Database corrupted - multiple matches found in database table for: {0}" . format ( result ) )
Search FileName table .
55,996
def AddToFileNameTable ( self , fileName , showID ) : goodlogging . Log . Info ( "DB" , "Adding filename string match '{0}'={1} to database" . format ( fileName , showID ) , verbosity = self . logVerbosity ) currentValues = self . SearchFileNameTable ( fileName ) if currentValues is None : self . _ActionDatabase ( "INSERT INTO FileName (FileName, ShowID) VALUES (?,?)" , ( fileName , showID ) ) else : goodlogging . Log . Fatal ( "DB" , "An entry for '{0}' already exists in the FileName table" . format ( fileName ) )
Add entry to FileName table . If the file name and show id combination already exists in the table a fatal error is raised .
55,997
def SearchSeasonDirTable ( self , showID , seasonNum ) : goodlogging . Log . Info ( "DB" , "Looking up directory for ShowID={0} Season={1} in database" . format ( showID , seasonNum ) , verbosity = self . logVerbosity ) queryString = "SELECT SeasonDir FROM SeasonDir WHERE ShowID=? AND Season=?" queryTuple = ( showID , seasonNum ) result = self . _ActionDatabase ( queryString , queryTuple , error = False ) if result is None : goodlogging . Log . Info ( "DB" , "No match found in database" , verbosity = self . logVerbosity ) return None elif len ( result ) == 0 : return None elif len ( result ) == 1 : goodlogging . Log . Info ( "DB" , "Found database match: {0}" . format ( result ) , verbosity = self . logVerbosity ) return result [ 0 ] [ 0 ] elif len ( result ) > 1 : goodlogging . Log . Fatal ( "DB" , "Database corrupted - multiple matches found in database table for: {0}" . format ( result ) )
Search SeasonDir table .
55,998
def AddSeasonDirTable ( self , showID , seasonNum , seasonDir ) : goodlogging . Log . Info ( "DB" , "Adding season directory ({0}) to database for ShowID={1}, Season={2}" . format ( seasonDir , showID , seasonNum ) , verbosity = self . logVerbosity ) currentValue = self . SearchSeasonDirTable ( showID , seasonNum ) if currentValue is None : self . _ActionDatabase ( "INSERT INTO SeasonDir (ShowID, Season, SeasonDir) VALUES (?,?,?)" , ( showID , seasonNum , seasonDir ) ) else : if currentValue == seasonDir : goodlogging . Log . Info ( "DB" , "A matching entry already exists in the SeasonDir table" , verbosity = self . logVerbosity ) else : goodlogging . Log . Fatal ( "DB" , "A different entry already exists in the SeasonDir table" )
Add entry to SeasonDir table . If a different entry for season directory is found for the given show id and season number combination this raises a fatal error .
55,999
def PrintAllTables ( self ) : goodlogging . Log . Info ( "DB" , "Database contents:\n" ) for table in self . _tableDict . keys ( ) : self . _PrintDatabaseTable ( table )
Prints contents of every table .