idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
11,500
def get_rotation_parameters ( phases , magnitudes ) : z_thetas = [ ] y_thetas = [ ] new_phases = [ ] new_magnitudes = [ ] for i in range ( 0 , len ( phases ) , 2 ) : phi = phases [ i ] psi = phases [ i + 1 ] z_thetas . append ( phi - psi ) kappa = ( phi + psi ) / 2. new_phases . append ( kappa ) a = magnitudes [ i ] b = magnitudes [ i + 1 ] if a == 0 and b == 0 : y_thetas . append ( 0 ) else : y_thetas . append ( 2 * np . arcsin ( ( a - b ) / ( np . sqrt ( 2 * ( a ** 2 + b ** 2 ) ) ) ) ) c = np . sqrt ( ( a ** 2 + b ** 2 ) / 2. ) new_magnitudes . append ( c ) return z_thetas , y_thetas , new_phases , new_magnitudes
Simulates one step of rotations .
11,501
def get_reversed_unification_program ( angles , control_indices , target , controls , mode ) : if mode == 'phase' : gate = RZ elif mode == 'magnitude' : gate = RY else : raise ValueError ( "mode must be \'phase\' or \'magnitude\'" ) reversed_gates = [ ] for j in range ( len ( angles ) ) : if angles [ j ] != 0 : reversed_gates . append ( gate ( - angles [ j ] , target ) ) if len ( controls ) > 0 : reversed_gates . append ( CNOT ( controls [ control_indices [ j ] - 1 ] , target ) ) return Program ( ) . inst ( reversed_gates [ : : - 1 ] )
Gets the Program representing the reversed circuit for the decomposition of the uniformly controlled rotations in a unification step .
11,502
def get_ancestors ( self ) : node = self ancestor_list = [ ] while node . parent is not None : ancestor_list . append ( node . parent ) node = node . parent return ancestor_list
Returns a list of ancestors of the node . Ordered from the earliest .
11,503
def bitwise_dot_product ( bs0 : str , bs1 : str ) -> str : if len ( bs0 ) != len ( bs1 ) : raise ValueError ( "Bit strings are not of equal length" ) return str ( sum ( [ int ( bs0 [ i ] ) * int ( bs1 [ i ] ) for i in range ( len ( bs0 ) ) ] ) % 2 )
A helper to calculate the bitwise dot - product between two string representing bit - vectors
11,504
def notebook_mode ( m ) : global NOTEBOOK_MODE global TRANGE NOTEBOOK_MODE = m if NOTEBOOK_MODE : TRANGE = tqdm . tnrange else : TRANGE = tqdm . trange
Configure whether this module should assume that it is being run from a jupyter notebook . This sets some global variables related to how progress for long measurement sequences is indicated .
11,505
def sample_outcomes ( probs , n ) : dist = np . cumsum ( probs ) rs = np . random . rand ( n ) return np . array ( [ ( np . where ( r < dist ) [ 0 ] [ 0 ] ) for r in rs ] )
For a discrete probability distribution probs with outcomes 0 1 ... k - 1 draw n random samples .
11,506
def sample_bad_readout ( program , num_samples , assignment_probs , cxn ) : wf = cxn . wavefunction ( program ) return sample_outcomes ( assignment_probs . dot ( abs ( wf . amplitudes . ravel ( ) ) ** 2 ) , num_samples )
Generate n samples of measuring all outcomes of a Quil program assuming the assignment probabilities assignment_probs by simulating the wave function on a qvm QVMConnection cxn
11,507
def plot_pauli_transfer_matrix ( ptransfermatrix , ax , labels , title ) : im = ax . imshow ( ptransfermatrix , interpolation = "nearest" , cmap = rigetti_3_color_cm , vmin = - 1 , vmax = 1 ) dim = len ( labels ) plt . colorbar ( im , ax = ax ) ax . set_xticks ( range ( dim ) ) ax . set_xlabel ( "Input Pauli Operator" , fontsize = 20 ) ax . set_yticks ( range ( dim ) ) ax . set_ylabel ( "Output Pauli Operator" , fontsize = 20 ) ax . set_title ( title , fontsize = 25 ) ax . set_xticklabels ( labels , rotation = 45 ) ax . set_yticklabels ( labels ) ax . grid ( False ) return ax
Visualize the Pauli Transfer Matrix of a process .
11,508
def state_histogram ( rho , ax = None , title = "" , threshold = 0.001 ) : rho_amps = rho . data . toarray ( ) . ravel ( ) nqc = int ( round ( np . log2 ( rho . shape [ 0 ] ) ) ) if ax is None : fig = plt . figure ( figsize = ( 10 , 6 ) ) ax = Axes3D ( fig , azim = - 35 , elev = 35 ) cmap = rigetti_4_color_cm norm = mpl . colors . Normalize ( - np . pi , np . pi ) colors = cmap ( norm ( np . angle ( rho_amps ) ) ) dzs = abs ( rho_amps ) colors [ : , 3 ] = 1.0 * ( dzs > threshold ) xs , ys = np . meshgrid ( range ( 2 ** nqc ) , range ( 2 ** nqc ) ) xs = xs . ravel ( ) ys = ys . ravel ( ) zs = np . zeros_like ( xs ) dxs = dys = np . ones_like ( xs ) * 0.8 _ = ax . bar3d ( xs , ys , zs , dxs , dys , dzs , color = colors ) ax . set_xticks ( np . arange ( 2 ** nqc ) + .4 ) ax . set_xticklabels ( basis_labels ( nqc ) ) ax . set_yticks ( np . arange ( 2 ** nqc ) + .4 ) ax . set_yticklabels ( basis_labels ( nqc ) ) ax . set_zlim3d ( [ 0 , 1 ] ) cax , kw = mpl . colorbar . make_axes ( ax , shrink = .75 , pad = .1 ) cb = mpl . colorbar . ColorbarBase ( cax , cmap = cmap , norm = norm ) cb . set_ticks ( [ - np . pi , - np . pi / 2 , 0 , np . pi / 2 , np . pi ] ) cb . set_ticklabels ( ( r'$-\pi$' , r'$-\pi/2$' , r'$0$' , r'$\pi/2$' , r'$\pi$' ) ) cb . set_label ( 'arg' ) ax . view_init ( azim = - 55 , elev = 45 ) ax . set_title ( title ) return ax
Visualize a density matrix as a 3d bar plot with complex phase encoded as the bar color .
11,509
def bitlist_to_int ( bitlist ) : ret = 0 for b in bitlist : ret = ( ret << 1 ) | ( int ( b ) & 1 ) return ret
Convert a binary bitstring into the corresponding unsigned integer .
11,510
def sample_assignment_probs ( qubits , nsamples , cxn ) : num_qubits = len ( qubits ) dimension = 2 ** num_qubits hists = [ ] preps = basis_state_preps ( * qubits ) jobs = [ ] _log . info ( 'Submitting jobs...' ) for jj , p in izip ( TRANGE ( dimension ) , preps ) : jobs . append ( cxn . run_and_measure_async ( p , qubits , nsamples ) ) _log . info ( 'Waiting for results...' ) for jj , job_id in izip ( TRANGE ( dimension ) , jobs ) : job = cxn . wait_for_job ( job_id ) results = job . result ( ) idxs = list ( map ( bitlist_to_int , results ) ) hists . append ( make_histogram ( idxs , dimension ) ) return estimate_assignment_probs ( hists )
Sample the assignment probabilities of qubits using nsamples per measurement and then compute the estimated assignment probability matrix . See the docstring for estimate_assignment_probs for more information .
11,511
def run_in_parallel ( programs , nsamples , cxn , shuffle = True ) : if shuffle : n_groups = len ( programs ) n_progs_per_group = len ( programs [ 0 ] ) permutations = np . outer ( np . ones ( n_groups , dtype = int ) , np . arange ( n_progs_per_group , dtype = int ) ) inverse_permutations = np . zeros_like ( permutations ) for jj in range ( n_groups ) : np . random . shuffle ( permutations [ jj ] ) inverse_permutations [ jj ] = np . argsort ( permutations [ jj ] ) shuffled_programs = np . empty ( ( n_groups , n_progs_per_group ) , dtype = object ) for jdx , ( progsj , pj ) in enumerate ( zip ( programs , permutations ) ) : shuffled_programs [ jdx ] = [ progsj [ pjk ] for pjk in pj ] shuffled_results = _run_in_parallel ( shuffled_programs , nsamples , cxn ) results = np . array ( [ resultsj [ pj ] for resultsj , pj in zip ( shuffled_results , inverse_permutations ) ] ) return results else : return _run_in_parallel ( programs , nsamples , cxn )
Take sequences of Protoquil programs on disjoint qubits and execute a single sequence of programs that executes the input programs in parallel . Optionally randomize within each qubit - specific sequence .
11,512
def remove_imaginary_terms ( pauli_sums : PauliSum ) -> PauliSum : if not isinstance ( pauli_sums , PauliSum ) : raise TypeError ( "not a pauli sum. please give me one" ) new_term = sI ( 0 ) * 0.0 for term in pauli_sums : new_term += term_with_coeff ( term , term . coefficient . real ) return new_term
Remove the imaginary component of each term in a Pauli sum .
11,513
def get_rotation_program ( pauli_term : PauliTerm ) -> Program : meas_basis_change = Program ( ) for index , gate in pauli_term : if gate == 'X' : meas_basis_change . inst ( RY ( - np . pi / 2 , index ) ) elif gate == 'Y' : meas_basis_change . inst ( RX ( np . pi / 2 , index ) ) elif gate == 'Z' : pass else : raise ValueError ( ) return meas_basis_change
Generate a rotation program so that the pauli term is diagonal .
11,514
def controlled ( m : np . ndarray ) -> np . ndarray : rows , cols = m . shape assert rows == cols n = rows I = np . eye ( n ) Z = np . zeros ( ( n , n ) ) controlled_m = np . bmat ( [ [ I , Z ] , [ Z , m ] ] ) return controlled_m
Make a one - qubit - controlled version of a matrix .
11,515
def phase_estimation ( U : np . ndarray , accuracy : int , reg_offset : int = 0 ) -> Program : assert isinstance ( accuracy , int ) rows , cols = U . shape m = int ( log2 ( rows ) ) output_qubits = range ( 0 , accuracy ) U_qubits = range ( accuracy , accuracy + m ) p = Program ( ) ro = p . declare ( 'ro' , 'BIT' , len ( output_qubits ) ) for i in output_qubits : p . inst ( H ( i ) ) for i in output_qubits : if i > 0 : U = np . dot ( U , U ) cU = controlled ( U ) name = "CONTROLLED-U{0}" . format ( 2 ** i ) p . defgate ( name , cU ) p . inst ( ( name , i ) + tuple ( U_qubits ) ) p = p + inverse_qft ( output_qubits ) for i in output_qubits : p . measure ( i , ro [ reg_offset + i ] ) return p
Generate a circuit for quantum phase estimation .
11,516
def binary_float_to_decimal_float ( number : Union [ float , str ] ) -> float : if isinstance ( number , str ) : if number [ 0 ] == '-' : n_sign = - 1 else : n_sign = 1 elif isinstance ( number , float ) : n_sign = np . sign ( number ) number = str ( number ) deci = 0 for ndx , val in enumerate ( number . split ( '.' ) [ - 1 ] ) : deci += float ( val ) / 2 ** ( ndx + 1 ) deci *= n_sign return deci
Convert binary floating point to decimal floating point .
11,517
def measurements_to_bf ( measurements : np . ndarray ) -> float : try : measurements . sum ( axis = 0 ) except AttributeError : measurements = np . asarray ( measurements ) finally : stats = measurements . sum ( axis = 0 ) / len ( measurements ) stats_str = [ str ( int ( i ) ) for i in np . round ( stats [ : : - 1 ] [ 1 : ] ) ] bf_str = '0.' + '' . join ( stats_str ) bf = float ( bf_str ) return bf
Convert measurements into gradient binary fraction .
11,518
def amplification_circuit ( algorithm : Program , oracle : Program , qubits : List [ int ] , num_iter : int , decompose_diffusion : bool = False ) -> Program : program = Program ( ) uniform_superimposer = Program ( ) . inst ( [ H ( qubit ) for qubit in qubits ] ) program += uniform_superimposer if decompose_diffusion : diffusion = decomposed_diffusion_program ( qubits ) else : diffusion = diffusion_program ( qubits ) defined_gates = oracle . defined_gates + algorithm . defined_gates + diffusion . defined_gates for _ in range ( num_iter ) : program += ( oracle . instructions + algorithm . dagger ( ) . instructions + diffusion . instructions + algorithm . instructions ) for gate in defined_gates : program . defgate ( gate . name , gate . matrix ) return program
Returns a program that does num_iter rounds of amplification given a measurement - less algorithm an oracle and a list of qubits to operate on .
11,519
def _operator_generator ( index , conj ) : pterm = PauliTerm ( 'I' , 0 , 1.0 ) Zstring = PauliTerm ( 'I' , 0 , 1.0 ) for j in range ( index ) : Zstring = Zstring * PauliTerm ( 'Z' , j , 1.0 ) pterm1 = Zstring * PauliTerm ( 'X' , index , 0.5 ) scalar = 0.5 * conj * 1.0j pterm2 = Zstring * PauliTerm ( 'Y' , index , scalar ) pterm = pterm * ( pterm1 + pterm2 ) pterm = pterm . simplify ( ) return pterm
Internal method to generate the appropriate operator
11,520
def maxcut_qaoa ( graph , steps = 1 , rand_seed = None , connection = None , samples = None , initial_beta = None , initial_gamma = None , minimizer_kwargs = None , vqe_option = None ) : if not isinstance ( graph , nx . Graph ) and isinstance ( graph , list ) : maxcut_graph = nx . Graph ( ) for edge in graph : maxcut_graph . add_edge ( * edge ) graph = maxcut_graph . copy ( ) cost_operators = [ ] driver_operators = [ ] for i , j in graph . edges ( ) : cost_operators . append ( PauliTerm ( "Z" , i , 0.5 ) * PauliTerm ( "Z" , j ) + PauliTerm ( "I" , 0 , - 0.5 ) ) for i in graph . nodes ( ) : driver_operators . append ( PauliSum ( [ PauliTerm ( "X" , i , - 1.0 ) ] ) ) if connection is None : connection = get_qc ( f"{len(graph.nodes)}q-qvm" ) if minimizer_kwargs is None : minimizer_kwargs = { 'method' : 'Nelder-Mead' , 'options' : { 'ftol' : 1.0e-2 , 'xtol' : 1.0e-2 , 'disp' : False } } if vqe_option is None : vqe_option = { 'disp' : print , 'return_all' : True , 'samples' : samples } qaoa_inst = QAOA ( connection , list ( graph . nodes ( ) ) , steps = steps , cost_ham = cost_operators , ref_ham = driver_operators , store_basis = True , rand_seed = rand_seed , init_betas = initial_beta , init_gammas = initial_gamma , minimizer = minimize , minimizer_kwargs = minimizer_kwargs , vqe_options = vqe_option ) return qaoa_inst
Max cut set up method
11,521
def default_rotations ( * qubits ) : for gates in cartesian_product ( TOMOGRAPHY_GATES . keys ( ) , repeat = len ( qubits ) ) : tomography_program = Program ( ) for qubit , gate in izip ( qubits , gates ) : tomography_program . inst ( gate ( qubit ) ) yield tomography_program
Generates the Quil programs for the tomographic pre - and post - rotations of any number of qubits .
11,522
def default_channel_ops ( nqubits ) : for gates in cartesian_product ( TOMOGRAPHY_GATES . values ( ) , repeat = nqubits ) : yield qt . tensor ( * gates )
Generate the tomographic pre - and post - rotations of any number of qubits as qutip operators .
11,523
def is_functional ( cls ) : if not cls . _tested : cls . _tested = True np . random . seed ( SEED ) test_problem_dimension = 10 mat = np . random . randn ( test_problem_dimension , test_problem_dimension ) posmat = mat . dot ( mat . T ) posvar = cvxpy . Variable ( test_problem_dimension , test_problem_dimension ) prob = cvxpy . Problem ( cvxpy . Minimize ( ( cvxpy . trace ( posmat * posvar ) + cvxpy . norm ( posvar ) ) ) , [ posvar >> 0 , cvxpy . trace ( posvar ) >= 1. ] ) try : prob . solve ( SOLVER ) cls . _functional = True except cvxpy . SolverError : _log . warning ( "No convex SDP solver found. You will not be able to solve" " tomography problems with matrix positivity constraints." ) return cls . _functional
Checks lazily whether a convex solver is installed that handles positivity constraints .
11,524
def measure_wf_coefficients ( prep_program , coeff_list , reference_state , quantum_resource , variance_bound = 1.0E-6 ) : num_qubits = len ( prep_program . get_qubits ( ) ) normalizer_ops = projector_generator ( reference_state , reference_state ) c0_coeff , _ , _ = estimate_locally_commuting_operator ( prep_program , normalizer_ops , variance_bound = variance_bound , quantum_resource = quantum_resource ) c0_coeff = np . sqrt ( c0_coeff ) amplitudes = [ ] for ii in coeff_list : if ii == reference_state : amplitudes . append ( c0_coeff ) else : bra = list ( map ( int , np . binary_repr ( ii , width = num_qubits ) ) ) c_ii_op = projector_generator ( reference_state , bra ) result = estimate_locally_commuting_operator ( prep_program , c_ii_op , variance_bound = variance_bound , quantum_resource = quantum_resource ) amplitudes . append ( result [ 0 ] / c0_coeff ) return amplitudes
Measure a set of coefficients with a phase relative to the reference_state
11,525
def measure_pure_state ( prep_program , reference_state , quantum_resource , variance_bound = 1.0E-6 ) : num_qubits = len ( prep_program . get_qubits ( ) ) amplitudes_to_measure = list ( range ( 2 ** num_qubits ) ) amplitudes = measure_wf_coefficients ( prep_program , amplitudes_to_measure , reference_state , quantum_resource , variance_bound = variance_bound ) wavefunction = np . asarray ( amplitudes ) return wavefunction . reshape ( ( - 1 , 1 ) )
Measure the coefficients of the pure state
11,526
def build ( self ) : self . defined_gates = set ( STANDARD_GATE_NAMES ) prog = self . _recursive_builder ( self . operation , self . gate_name , self . control_qubits , self . target_qubit ) return prog
Builds this controlled gate .
11,527
def _defgate ( self , program , gate_name , gate_matrix ) : new_program = pq . Program ( ) new_program += program if gate_name not in self . defined_gates : new_program . defgate ( gate_name , gate_matrix ) self . defined_gates . add ( gate_name ) return new_program
Defines a gate named gate_name with matrix gate_matrix in program . In addition updates self . defined_gates to track what has been defined .
11,528
def parity_even_p ( state , marked_qubits ) : assert isinstance ( state , int ) , f"{state} is not an integer. Must call parity_even_p with an integer state." mask = 0 for q in marked_qubits : mask |= 1 << q return bin ( mask & state ) . count ( "1" ) % 2 == 0
Calculates the parity of elements at indexes in marked_qubits
11,529
def vqe_run ( self , variational_state_evolve , hamiltonian , initial_params , gate_noise = None , measurement_noise = None , jacobian = None , qc = None , disp = None , samples = None , return_all = False ) : self . _disp_fun = disp if disp is not None else lambda x : None iteration_params = [ ] expectation_vals = [ ] self . _current_expectation = None if samples is None : print ( ) if qc is None : qubits = hamiltonian . get_qubits ( ) qc = QuantumComputer ( name = f"{len(qubits)}q-noisy-qvm" , qam = QVM ( gate_noise = gate_noise , measurement_noise = measurement_noise ) ) else : self . qc = qc def objective_function ( params ) : pyquil_prog = variational_state_evolve ( params ) mean_value = self . expectation ( pyquil_prog , hamiltonian , samples , qc ) self . _current_expectation = mean_value return mean_value def print_current_iter ( iter_vars ) : self . _disp_fun ( "\tParameters: {} " . format ( iter_vars ) ) if jacobian is not None : grad = jacobian ( iter_vars ) self . _disp_fun ( "\tGrad-L1-Norm: {}" . format ( np . max ( np . abs ( grad ) ) ) ) self . _disp_fun ( "\tGrad-L2-Norm: {} " . format ( np . linalg . norm ( grad ) ) ) self . _disp_fun ( "\tE => {}" . format ( self . _current_expectation ) ) if return_all : iteration_params . append ( iter_vars ) expectation_vals . append ( self . _current_expectation ) arguments = funcsigs . signature ( self . minimizer ) . parameters . keys ( ) if disp is not None and 'callback' in arguments : self . minimizer_kwargs [ 'callback' ] = print_current_iter args = [ objective_function , initial_params ] args . extend ( self . minimizer_args ) if 'jac' in arguments : self . minimizer_kwargs [ 'jac' ] = jacobian result = self . minimizer ( * args , ** self . minimizer_kwargs ) if hasattr ( result , 'status' ) : if result . status != 0 : self . _disp_fun ( "Classical optimization exited with an error index: %i" % result . status ) results = OptResults ( ) if hasattr ( result , 'x' ) : results . x = result . x results . fun = result . fun else : results . x = result if return_all : results . iteration_params = iteration_params results . expectation_vals = expectation_vals return results
functional minimization loop .
11,530
def _get_path_from_parent ( self , parent ) : if hasattr ( self , 'get_path_from_parent' ) : return self . get_path_from_parent ( parent ) if self . model is parent : return [ ] model = self . concrete_model chain = model . _meta . get_base_chain ( parent ) or [ ] chain . reverse ( ) chain . append ( model ) path = [ ] for i , ancestor in enumerate ( chain [ : - 1 ] ) : child = chain [ i + 1 ] link = child . _meta . get_ancestor_link ( ancestor ) path . extend ( link . get_reverse_path_info ( ) ) return path
Return a list of PathInfos containing the path from the parent model to the current model or an empty list if parent is not a parent of the current model .
11,531
def patience_sort ( xs ) : pile_tops = list ( ) for x in xs : pile = bisect . bisect_left ( pile_tops , x ) if pile == len ( pile_tops ) : pile_tops . append ( x ) else : pile_tops [ pile ] = x yield x , pile
Patience sort an iterable xs .
11,532
def longest_monotonic_subseq_length ( xs ) : li = longest_increasing_subseq_length ( xs ) ld = longest_decreasing_subseq_length ( xs ) return max ( li , ld ) , li - ld
Return the length of the longest monotonic subsequence of xs second return value is the difference between increasing and decreasing lengths .
11,533
def longest_increasing_subsequence ( xs ) : piles = [ [ ] ] for x , p in patience_sort ( xs ) : if p + 1 == len ( piles ) : piles . append ( [ ] ) piles [ p + 1 ] . append ( ( x , len ( piles [ p ] ) - 1 ) ) npiles = len ( piles ) - 1 prev = 0 lis = list ( ) for pile in range ( npiles , 0 , - 1 ) : x , prev = piles [ pile ] [ prev ] lis . append ( x ) lis . reverse ( ) return lis
Return a longest increasing subsequence of xs .
11,534
def backtracking ( a , L , bestsofar ) : w , j = max ( L . items ( ) ) while j != - 1 : yield j w , j = bestsofar [ j ]
Start with the heaviest weight and emit index
11,535
def mappability ( args ) : p = OptionParser ( mappability . __doc__ ) p . add_option ( "--mer" , default = 50 , type = "int" , help = "User mer size" ) p . set_cpus ( ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( not p . print_help ( ) ) ref , = args K = opts . mer pf = ref . rsplit ( "." , 1 ) [ 0 ] mm = MakeManager ( ) gem = pf + ".gem" cmd = "gem-indexer -i {} -o {}" . format ( ref , pf ) mm . add ( ref , gem , cmd ) mer = pf + ".{}mer" . format ( K ) mapb = mer + ".mappability" cmd = "gem-mappability -I {} -l {} -o {} -T {}" . format ( gem , K , mer , opts . cpus ) mm . add ( gem , mapb , cmd ) wig = mer + ".wig" cmd = "gem-2-wig -I {} -i {} -o {}" . format ( gem , mapb , mer ) mm . add ( mapb , wig , cmd ) bw = mer + ".bw" cmd = "wigToBigWig {} {}.sizes {}" . format ( wig , mer , bw ) mm . add ( wig , bw , cmd ) bg = mer + ".bedGraph" cmd = "bigWigToBedGraph {} {}" . format ( bw , bg ) mm . add ( bw , bg , cmd ) merged = mer + ".filtered-1.merge.bed" cmd = "python -m jcvi.formats.bed filterbedgraph {} 1" . format ( bg ) mm . add ( bg , merged , cmd ) mm . write ( )
%prog mappability reference . fasta
11,536
def freq ( args ) : p = OptionParser ( freq . __doc__ ) p . add_option ( "--mindepth" , default = 3 , type = "int" , help = "Minimum depth [default: %default]" ) p . add_option ( "--minqual" , default = 20 , type = "int" , help = "Minimum quality [default: %default]" ) p . set_outfile ( ) opts , args = p . parse_args ( args ) if len ( args ) != 2 : sys . exit ( not p . print_help ( ) ) fastafile , bamfile = args cmd = "freebayes -f {0} --pooled-continuous {1}" . format ( fastafile , bamfile ) cmd += " -F 0 -C {0}" . format ( opts . mindepth ) cmd += ' | vcffilter -f "QUAL > {0}"' . format ( opts . minqual ) cmd += " | vcfkeepinfo - AO RO TYPE" sh ( cmd , outfile = opts . outfile )
%prog freq fastafile bamfile
11,537
def frommaf ( args ) : p = OptionParser ( frommaf . __doc__ ) p . add_option ( "--validate" , help = "Validate coordinates against FASTA [default: %default]" ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( not p . print_help ( ) ) maf , = args snpfile = maf . rsplit ( "." , 1 ) [ 0 ] + ".vcf" fp = open ( maf ) fw = open ( snpfile , "w" ) total = 0 id = "." qual = 20 filter = "PASS" info = "DP=20" print ( "##fileformat=VCFv4.0" , file = fw ) print ( "#CHROM POS ID REF ALT QUAL FILTER INFO" . replace ( " " , "\t" ) , file = fw ) for row in fp : atoms = row . split ( ) c , pos , ref , alt = atoms [ : 4 ] try : c = int ( c ) except : continue c = "chr{0:02d}" . format ( c ) pos = int ( pos ) print ( "\t" . join ( str ( x ) for x in ( c , pos , id , ref , alt , qual , filter , info ) ) , file = fw ) total += 1 fw . close ( ) validate = opts . validate if not validate : return from jcvi . utils . cbook import percentage f = Fasta ( validate ) fp = open ( snpfile ) nsnps = 0 for row in fp : if row [ 0 ] == '#' : continue c , pos , id , ref , alt , qual , filter , info = row . split ( "\t" ) pos = int ( pos ) feat = dict ( chr = c , start = pos , stop = pos ) s = f . sequence ( feat ) s = str ( s ) assert s == ref , "Validation error: {0} is {1} (expect: {2})" . format ( feat , s , ref ) nsnps += 1 if nsnps % 50000 == 0 : logging . debug ( "SNPs parsed: {0}" . format ( percentage ( nsnps , total ) ) ) logging . debug ( "A total of {0} SNPs validated and written to `{1}`." . format ( nsnps , snpfile ) )
%prog frommaf maffile
11,538
def libs ( args ) : p = OptionParser ( libs . __doc__ ) p . set_db_opts ( dbname = "track" , credentials = None ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( not p . print_help ( ) ) libfile , = args sqlcmd = "select library.lib_id, library.name, bac.gb# from library join bac on " + "library.bac_id=bac.id where bac.lib_name='Medicago'" cur = connect ( opts . dbname ) results = fetchall ( cur , sqlcmd ) fw = open ( libfile , "w" ) for lib_id , name , gb in results : name = name . translate ( None , "\n" ) if not gb : gb = "None" print ( "|" . join ( ( lib_id , name , gb ) ) , file = fw ) fw . close ( )
%prog libs libfile
11,539
def pull ( args ) : p = OptionParser ( pull . __doc__ ) p . set_db_opts ( dbname = "mtg2" , credentials = None ) p . add_option ( "--frag" , default = False , action = "store_true" , help = "The command to pull sequences from db [default: %default]" ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( not p . print_help ( ) ) libfile , = args dbname = opts . dbname frag = opts . frag fp = open ( libfile ) hostname , username , password = get_profile ( ) for row in fp : lib_id , name = row . split ( "|" , 1 ) sqlfile = lib_id + ".sql" if not op . exists ( sqlfile ) : fw = open ( sqlfile , "w" ) print ( "select seq_name from sequence where seq_name like" + " '{0}%' and trash is null" . format ( lib_id ) , file = fw ) fw . close ( ) if frag : cmd = "pullfrag -D {0} -n {1}.sql -o {1} -q -S {2}" . format ( dbname , lib_id , hostname ) cmd += " -U {0} -P {1}" . format ( username , password ) else : cmd = "pullseq -D {0} -n {1}.sql -o {1} -q" . format ( dbname , lib_id ) sh ( cmd )
%prog pull libfile
11,540
def read_record ( fp , first_line = None ) : if first_line is None : first_line = fp . readline ( ) if not first_line : raise EOFError ( ) match = _START . match ( first_line ) if not match : raise Exception ( 'Bad start of message' , first_line ) type = match . group ( 1 ) message = Message ( type ) while True : row = fp . readline ( ) match = _MULTILINE_FIELD . match ( row ) if match : key = match . group ( 1 ) val = "" while row : pos = fp . tell ( ) row = fp . readline ( ) if row [ 0 ] in '.' : break elif row [ 0 ] in '{}' : fp . seek ( pos ) break val += row message . contents . append ( ( key , val , True ) ) continue match = _FIELD . match ( row ) if match : key , val = match . group ( 1 ) , match . group ( 2 ) message . contents . append ( ( key , val , False ) ) continue match = _START . match ( row ) if match : message . append ( read_record ( fp , row ) ) continue if row [ 0 ] == '}' : break raise Exception ( 'Bad line' , row ) return message
Read a record from a file of AMOS messages
11,541
def filter ( args ) : p = OptionParser ( filter . __doc__ ) opts , args = p . parse_args ( args ) if len ( args ) != 2 : sys . exit ( not p . print_help ( ) ) frgfile , idsfile = args assert frgfile . endswith ( ".frg" ) fp = open ( idsfile ) allowed = set ( x . strip ( ) for x in fp ) logging . debug ( "A total of {0} allowed ids loaded." . format ( len ( allowed ) ) ) newfrgfile = frgfile . replace ( ".frg" , ".filtered.frg" ) fp = open ( frgfile ) fw = open ( newfrgfile , "w" ) nfrags , discarded_frags = 0 , 0 nmates , discarded_mates = 0 , 0 for rec in iter_records ( fp ) : if rec . type == "FRG" : readname = rec . get_field ( "acc" ) readname = readname . rstrip ( "ab" ) nfrags += 1 if readname not in allowed : discarded_frags += 1 continue if rec . type == "LKG" : readname = rec . get_field ( "frg" ) readname = readname . rstrip ( "ab" ) nmates += 1 if readname not in allowed : discarded_mates += 1 continue print ( rec , file = fw ) survived_frags = nfrags - discarded_frags survived_mates = nmates - discarded_mates print ( "Survived fragments: {0}" . format ( percentage ( survived_frags , nfrags ) ) , file = sys . stderr ) print ( "Survived mates: {0}" . format ( percentage ( survived_mates , nmates ) ) , file = sys . stderr )
%prog filter frgfile idsfile
11,542
def frg ( args ) : p = OptionParser ( frg . __doc__ ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( p . print_help ( ) ) frgfile , = args fastafile = frgfile . rsplit ( "." , 1 ) [ 0 ] + ".fasta" fp = open ( frgfile ) fw = open ( fastafile , "w" ) for rec in iter_records ( fp ) : if rec . type != "FRG" : continue id = rec . get_field ( "acc" ) seq = rec . get_field ( "seq" ) s = SeqRecord ( Seq ( seq ) , id = id , description = "" ) SeqIO . write ( [ s ] , fw , "fasta" ) fw . close ( )
%prog frg frgfile
11,543
def asm ( args ) : p = OptionParser ( asm . __doc__ ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( p . print_help ( ) ) asmfile , = args prefix = asmfile . rsplit ( "." , 1 ) [ 0 ] ctgfastafile = prefix + ".ctg.fasta" scffastafile = prefix + ".scf.fasta" fp = open ( asmfile ) ctgfw = open ( ctgfastafile , "w" ) scffw = open ( scffastafile , "w" ) for rec in iter_records ( fp ) : type = rec . type if type == "CCO" : fw = ctgfw pp = "ctg" elif type == "SCF" : fw = scffw pp = "scf" else : continue id = rec . get_field ( "acc" ) id = id . translate ( None , "()" ) . split ( "," ) [ 0 ] seq = rec . get_field ( "cns" ) . translate ( None , "-" ) s = SeqRecord ( Seq ( seq ) , id = pp + id , description = "" ) SeqIO . write ( [ s ] , fw , "fasta" ) fw . flush ( ) fw . close ( )
%prog asm asmfile
11,544
def count ( args ) : p = OptionParser ( count . __doc__ ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( p . print_help ( ) ) frgfile , = args fp = open ( frgfile ) counts = defaultdict ( int ) for rec in iter_records ( fp ) : counts [ rec . type ] += 1 for type , cnt in sorted ( counts . items ( ) ) : print ( '{0}: {1}' . format ( type , cnt ) , file = sys . stderr )
%prog count frgfile
11,545
def prepare ( args ) : p = OptionParser ( prepare . __doc__ ) opts , args = p . parse_args ( args ) if len ( args ) != 2 : sys . exit ( not p . print_help ( ) ) counts , families = args countfiles = glob ( op . join ( counts , "*.count" ) ) countsdb = defaultdict ( list ) for c in countfiles : rs = RiceSample ( c ) countsdb [ ( rs . tissue , rs . ind ) ] . append ( rs ) key = lambda x : ( x . label , x . rep ) for ( tissue , ind ) , rs in sorted ( countsdb . items ( ) ) : rs . sort ( key = key ) nrs = len ( rs ) for i in xrange ( nrs ) : ri = rs [ i ] if not ri . working : continue for j in xrange ( i + 1 , nrs ) : rj = rs [ j ] if key ( ri ) != key ( rj ) : continue ri . merge ( rj ) rj . working = False countsdb [ ( tissue , ind ) ] = [ x for x in rs if x . working ] mkdir ( "families" ) for ( tissue , ind ) , r in sorted ( countsdb . items ( ) ) : r = list ( r ) if r [ 0 ] . label != "F1" : continue P1 , P2 = r [ 0 ] . P1 , r [ 0 ] . P2 P1 , P2 = countsdb [ ( tissue , P1 ) ] , countsdb [ ( tissue , P2 ) ] rs = P1 + P2 + r groups = [ 1 ] * len ( P1 ) + [ 2 ] * len ( P2 ) + [ 3 ] * len ( r ) assert len ( rs ) == len ( groups ) outfile = "-" . join ( ( tissue , ind ) ) merge_counts ( rs , op . join ( families , outfile ) ) groupsfile = outfile + ".groups" fw = open ( op . join ( families , groupsfile ) , "w" ) print ( "," . join ( str ( x ) for x in groups ) , file = fw ) fw . close ( )
%prog prepare countfolder families
11,546
def outlier_cutoff ( a , threshold = 3.5 ) : A = np . array ( a , dtype = float ) M = np . median ( A ) D = np . absolute ( A - M ) MAD = np . median ( D ) C = threshold / .67449 * MAD return M - C , M + C
Iglewicz and Hoaglin s robust returns the cutoff values - lower bound and upper bound .
11,547
def bed ( args ) : p = OptionParser ( bed . __doc__ ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( not p . print_help ( ) ) idsfile , = args ids = set ( x . strip ( ) for x in open ( idsfile ) ) data = get_bed_from_phytozome ( list ( ids ) ) pf = idsfile . rsplit ( "." , 1 ) [ 0 ] bedfile = pf + ".bed" fw = open ( bedfile , "w" ) for i , row in enumerate ( data ) : row = row . strip ( ) if row == "" : continue print ( row , file = fw ) logging . debug ( "A total of {0} records written to `{1}`." . format ( i + 1 , bedfile ) )
%prog bed genes . ids
11,548
def bed ( args ) : p = OptionParser ( bed . __doc__ ) p . add_option ( "-o" , dest = "output" , default = "stdout" , help = "Output file name [default: %default]" ) p . add_option ( "--cutoff" , dest = "cutoff" , default = 10 , type = "int" , help = "Minimum read depth to report intervals [default: %default]" ) opts , args = p . parse_args ( args ) if len ( args ) != 2 : sys . exit ( not p . print_help ( ) ) binfile , fastafile = args fw = must_open ( opts . output , "w" ) cutoff = opts . cutoff assert cutoff >= 0 , "Need non-negative cutoff" b = BinFile ( binfile ) ar = b . array fastasize , sizes , offsets = get_offsets ( fastafile ) s = Sizes ( fastafile ) for ctg , ctglen in s . iter_sizes ( ) : offset = offsets [ ctg ] subarray = ar [ offset : offset + ctglen ] key = lambda x : x [ 1 ] >= cutoff for tf , array_elements in groupby ( enumerate ( subarray ) , key = key ) : array_elements = list ( array_elements ) if not tf : continue start = array_elements [ 0 ] [ 0 ] + 1 end = array_elements [ - 1 ] [ 0 ] + 1 mean_depth = sum ( [ x [ 1 ] for x in array_elements ] ) / len ( array_elements ) mean_depth = int ( mean_depth ) name = "na" print ( "\t" . join ( str ( x ) for x in ( ctg , start - 1 , end , name , mean_depth ) ) , file = fw )
%prog bed binfile fastafile
11,549
def query ( args ) : p = OptionParser ( query . __doc__ ) opts , args = p . parse_args ( args ) if len ( args ) != 4 : sys . exit ( not p . print_help ( ) ) binfile , fastafile , ctgID , baseID = args b = BinFile ( binfile , fastafile ) ar = b . mmarray fastasize , sizes , offsets = get_offsets ( fastafile ) oi = offsets [ ctgID ] + int ( baseID ) - 1 print ( "\t" . join ( ( ctgID , baseID , str ( ar [ oi ] ) ) ) )
%prog query binfile fastafile ctgID baseID
11,550
def count ( args ) : p = OptionParser ( count . __doc__ ) opts , args = p . parse_args ( args ) if len ( args ) != 2 : sys . exit ( not p . print_help ( ) ) coveragefile , fastafile = args countsfile = coveragefile . split ( "." ) [ 0 ] + ".bin" if op . exists ( countsfile ) : logging . error ( "`{0}` file exists. Remove before proceed." . format ( countsfile ) ) return fastasize , sizes , offsets = get_offsets ( fastafile ) logging . debug ( "Initialize array of uint8 with size {0}" . format ( fastasize ) ) ar = np . zeros ( fastasize , dtype = np . uint8 ) update_array ( ar , coveragefile , sizes , offsets ) ar . tofile ( countsfile ) logging . debug ( "Array written to `{0}`" . format ( countsfile ) )
%prog count t . coveragePerBase fastafile
11,551
def edges_to_path ( edges ) : if not edges : return None G = edges_to_graph ( edges ) path = nx . topological_sort ( G ) return path
Connect edges and return a path .
11,552
def max_sum ( a ) : max_sum , max_start_index , max_end_index = - Infinity , 0 , 0 current_max_sum = 0 current_start_index = 0 for current_end_index , x in enumerate ( a ) : current_max_sum += x if current_max_sum > max_sum : max_sum , max_start_index , max_end_index = current_max_sum , current_start_index , current_end_index if current_max_sum < 0 : current_max_sum = 0 current_start_index = current_end_index + 1 return max_sum , max_start_index , max_end_index
For an input array a output the range that gives the largest sum
11,553
def silicosoma ( args ) : p = OptionParser ( silicosoma . __doc__ ) p . set_outfile ( ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( not p . print_help ( ) ) silicofile , = args fp = must_open ( silicofile ) fw = must_open ( opts . outfile , "w" ) next ( fp ) positions = [ int ( x ) for x in fp . next ( ) . split ( ) ] for a , b in pairwise ( positions ) : assert a <= b fragsize = int ( round ( ( b - a ) / 1000. ) ) if fragsize : print ( fragsize , 0 , file = fw )
%prog silicosoma in . silico > out . soma
11,554
def condense ( args ) : from itertools import groupby from jcvi . assembly . patch import merge_ranges p = OptionParser ( condense . __doc__ ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( not p . print_help ( ) ) bedfile , = args bed = Bed ( bedfile , sorted = False ) key = lambda x : ( x . seqid , x . start , x . end ) for k , sb in groupby ( bed , key = key ) : sb = list ( sb ) b = sb [ 0 ] chr , start , end , strand = merge_ranges ( sb ) id = "{0}:{1}-{2}" . format ( chr , start , end ) b . accn = id print ( b )
%prog condense OM . bed
11,555
def chimera ( args ) : p = OptionParser ( chimera . __doc__ ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( not p . print_help ( ) ) bedfile , = args bed = Bed ( bedfile ) selected = select_bed ( bed ) mapped = defaultdict ( set ) chimerabed = "chimera.bed" fw = open ( chimerabed , "w" ) for b in selected : scf = range_parse ( b . accn ) . seqid chr = b . seqid mapped [ scf ] . add ( chr ) nchimera = 0 for s , chrs in sorted ( mapped . items ( ) ) : if len ( chrs ) == 1 : continue print ( "=" * 80 , file = sys . stderr ) print ( "{0} mapped to multiple locations: {1}" . format ( s , "," . join ( sorted ( chrs ) ) ) , file = sys . stderr ) ranges = [ ] for b in selected : rr = range_parse ( b . accn ) scf = rr . seqid if scf == s : print ( b , file = sys . stderr ) ranges . append ( rr ) ranges . sort ( key = lambda x : ( x . seqid , x . start , x . end ) ) for a , b in pairwise ( ranges ) : seqid = a . seqid if seqid != b . seqid : continue start , end = a . end , b . start if start > end : start , end = end , start chimeraline = "\t" . join ( str ( x ) for x in ( seqid , start , end ) ) print ( chimeraline , file = fw ) print ( chimeraline , file = sys . stderr ) nchimera += 1 fw . close ( ) logging . debug ( "A total of {0} junctions written to `{1}`." . format ( nchimera , chimerabed ) )
%prog chimera bedfile
11,556
def select_bed ( bed ) : ranges = [ Range ( x . seqid , x . start , x . end , float ( x . score ) , i ) for i , x in enumerate ( bed ) ] selected , score = range_chain ( ranges ) selected = [ bed [ x . id ] for x in selected ] return selected
Return non - overlapping set of ranges choosing high scoring blocks over low scoring alignments when there are conflicts .
11,557
def fasta ( args ) : from jcvi . formats . sizes import Sizes from jcvi . formats . agp import OO , build p = OptionParser ( fasta . __doc__ ) opts , args = p . parse_args ( args ) if len ( args ) != 3 : sys . exit ( not p . print_help ( ) ) bedfile , scffasta , pmolfasta = args pf = bedfile . rsplit ( "." , 1 ) [ 0 ] bed = Bed ( bedfile ) selected = select_bed ( bed ) oo = OO ( ) seen = set ( ) sizes = Sizes ( scffasta ) . mapping agpfile = pf + ".agp" agp = open ( agpfile , "w" ) for b in selected : scf = range_parse ( b . accn ) . seqid chr = b . seqid cs = ( chr , scf ) if cs not in seen : oo . add ( chr , scf , sizes [ scf ] , b . strand ) seen . add ( cs ) else : logging . debug ( "Seen {0}, ignored." . format ( cs ) ) oo . write_AGP ( agp , gaptype = "contig" ) agp . close ( ) build ( [ agpfile , scffasta , pmolfasta ] )
%prog fasta bedfile scf . fasta pseudomolecules . fasta
11,558
def bed ( args ) : from jcvi . formats . bed import sort p = OptionParser ( bed . __doc__ ) p . add_option ( "--blockonly" , default = False , action = "store_true" , help = "Only print out large blocks, not fragments [default: %default]" ) p . add_option ( "--point" , default = False , action = "store_true" , help = "Print accesssion as single point instead of interval" ) p . add_option ( "--scale" , type = "float" , help = "Scale the OM distance by factor" ) p . add_option ( "--switch" , default = False , action = "store_true" , help = "Switch reference and aligned map elements [default: %default]" ) p . add_option ( "--nosort" , default = False , action = "store_true" , help = "Do not sort bed [default: %default]" ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( not p . print_help ( ) ) xmlfile , = args bedfile = xmlfile . rsplit ( "." , 1 ) [ 0 ] + ".bed" om = OpticalMap ( xmlfile ) om . write_bed ( bedfile , point = opts . point , scale = opts . scale , blockonly = opts . blockonly , switch = opts . switch ) if not opts . nosort : sort ( [ bedfile , "--inplace" ] )
%prog bed xmlfile
11,559
def bam ( args ) : from jcvi . formats . sizes import Sizes from jcvi . formats . sam import index p = OptionParser ( bam . __doc__ ) p . set_home ( "eddyyeh" ) p . set_cpus ( ) opts , args = p . parse_args ( args ) if len ( args ) != 2 : sys . exit ( not p . print_help ( ) ) gsnapfile , fastafile = args EYHOME = opts . eddyyeh_home pf = gsnapfile . rsplit ( "." , 1 ) [ 0 ] uniqsam = pf + ".unique.sam" samstats = uniqsam + ".stats" sizesfile = Sizes ( fastafile ) . filename if need_update ( ( gsnapfile , sizesfile ) , samstats ) : cmd = op . join ( EYHOME , "gsnap2gff3.pl" ) cmd += " --format sam -i {0} -o {1}" . format ( gsnapfile , uniqsam ) cmd += " -u -l {0} -p {1}" . format ( sizesfile , opts . cpus ) sh ( cmd ) index ( [ uniqsam ] ) return uniqsam
%prog snp input . gsnap ref . fasta
11,560
def index ( args ) : p = OptionParser ( index . __doc__ ) p . add_option ( "--supercat" , default = False , action = "store_true" , help = "Concatenate reference to speed up alignment" ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( not p . print_help ( ) ) dbfile , = args check_index ( dbfile , supercat = opts . supercat )
%prog index database . fasta Wrapper for gmap_build . Same interface .
11,561
def gmap ( args ) : p = OptionParser ( gmap . __doc__ ) p . add_option ( "--cross" , default = False , action = "store_true" , help = "Cross-species alignment" ) p . add_option ( "--npaths" , default = 0 , type = "int" , help = "Maximum number of paths to show." " If set to 0, prints two paths if chimera" " detected, else one." ) p . set_cpus ( ) opts , args = p . parse_args ( args ) if len ( args ) != 2 : sys . exit ( not p . print_help ( ) ) dbfile , fastafile = args assert op . exists ( dbfile ) and op . exists ( fastafile ) prefix = get_prefix ( fastafile , dbfile ) logfile = prefix + ".log" gmapfile = prefix + ".gmap.gff3" if not need_update ( ( dbfile , fastafile ) , gmapfile ) : logging . error ( "`{0}` exists. `gmap` already run." . format ( gmapfile ) ) else : dbdir , dbname = check_index ( dbfile ) cmd = "gmap -D {0} -d {1}" . format ( dbdir , dbname ) cmd += " -f 2 --intronlength=100000" cmd += " -t {0}" . format ( opts . cpus ) cmd += " --npaths {0}" . format ( opts . npaths ) if opts . cross : cmd += " --cross-species" cmd += " " + fastafile sh ( cmd , outfile = gmapfile , errfile = logfile ) return gmapfile , logfile
%prog gmap database . fasta fastafile
11,562
def align ( args ) : from jcvi . formats . fastq import guessoffset p = OptionParser ( align . __doc__ ) p . add_option ( "--rnaseq" , default = False , action = "store_true" , help = "Input is RNA-seq reads, turn splicing on" ) p . add_option ( "--native" , default = False , action = "store_true" , help = "Convert GSNAP output to NATIVE format" ) p . set_home ( "eddyyeh" ) p . set_outdir ( ) p . set_cpus ( ) opts , args = p . parse_args ( args ) if len ( args ) == 2 : logging . debug ( "Single-end alignment" ) elif len ( args ) == 3 : logging . debug ( "Paired-end alignment" ) else : sys . exit ( not p . print_help ( ) ) dbfile , readfile = args [ : 2 ] outdir = opts . outdir assert op . exists ( dbfile ) and op . exists ( readfile ) prefix = get_prefix ( readfile , dbfile ) logfile = op . join ( outdir , prefix + ".log" ) gsnapfile = op . join ( outdir , prefix + ".gsnap" ) nativefile = gsnapfile . rsplit ( "." , 1 ) [ 0 ] + ".unique.native" if not need_update ( ( dbfile , readfile ) , gsnapfile ) : logging . error ( "`{0}` exists. `gsnap` already run." . format ( gsnapfile ) ) else : dbdir , dbname = check_index ( dbfile ) cmd = "gsnap -D {0} -d {1}" . format ( dbdir , dbname ) cmd += " -B 5 -m 0.1 -i 2 -n 3" if opts . rnaseq : cmd += " -N 1" cmd += " -t {0}" . format ( opts . cpus ) cmd += " --gmap-mode none --nofails" if readfile . endswith ( ".gz" ) : cmd += " --gunzip" try : offset = "sanger" if guessoffset ( [ readfile ] ) == 33 else "illumina" cmd += " --quality-protocol {0}" . format ( offset ) except AssertionError : pass cmd += " " + " " . join ( args [ 1 : ] ) sh ( cmd , outfile = gsnapfile , errfile = logfile ) if opts . native : EYHOME = opts . eddyyeh_home if need_update ( gsnapfile , nativefile ) : cmd = op . join ( EYHOME , "convert2native.pl" ) cmd += " --gsnap {0} -o {1}" . format ( gsnapfile , nativefile ) cmd += " -proc {0}" . format ( opts . cpus ) sh ( cmd ) return gsnapfile , logfile
%prog align database . fasta read1 . fq read2 . fq
11,563
def get_1D_overlap ( eclusters , depth = 1 ) : overlap_set = set ( ) active = set ( ) ends = [ ] for i , ( chr , left , right ) in enumerate ( eclusters ) : ends . append ( ( chr , left , 0 , i ) ) ends . append ( ( chr , right , 1 , i ) ) ends . sort ( ) chr_last = "" for chr , pos , left_right , i in ends : if chr != chr_last : active . clear ( ) if left_right == 0 : active . add ( i ) else : active . remove ( i ) if len ( active ) > depth : overlap_set . add ( tuple ( sorted ( active ) ) ) chr_last = chr return overlap_set
Find blocks that are 1D overlapping returns cliques of block ids that are in conflict
11,564
def make_range ( clusters , extend = 0 ) : eclusters = [ ] for cluster in clusters : xlist , ylist , scores = zip ( * cluster ) score = _score ( cluster ) xchr , xmin = min ( xlist ) xchr , xmax = max ( xlist ) ychr , ymin = min ( ylist ) ychr , ymax = max ( ylist ) xmax += extend ymax += extend if xmax < xmin : xmin , xmax = xmax , xmin if ymax < ymin : ymin , ymax = ymax , ymin eclusters . append ( ( ( xchr , xmin , xmax ) , ( ychr , ymin , ymax ) , score ) ) return eclusters
Convert to interval ends from a list of anchors extend modifies the xmax ymax boundary of the box which can be positive or negative very useful when we want to make the range as fuzzy as we specify
11,565
def get_constraints ( clusters , quota = ( 1 , 1 ) , Nmax = 0 ) : qa , qb = quota eclusters = make_range ( clusters , extend = - Nmax ) nodes = [ ( i + 1 , c [ - 1 ] ) for i , c in enumerate ( eclusters ) ] eclusters_x , eclusters_y , scores = zip ( * eclusters ) constraints_x = get_1D_overlap ( eclusters_x , qa ) constraints_y = get_1D_overlap ( eclusters_y , qb ) return nodes , constraints_x , constraints_y
Check pairwise cluster comparison if they overlap then mark edge as conflict
11,566
def format_lp ( nodes , constraints_x , qa , constraints_y , qb ) : lp_handle = cStringIO . StringIO ( ) lp_handle . write ( "Maximize\n " ) records = 0 for i , score in nodes : lp_handle . write ( "+ %d x%d " % ( score , i ) ) records += 1 if records % 10 == 0 : lp_handle . write ( "\n" ) lp_handle . write ( "\n" ) num_of_constraints = 0 lp_handle . write ( "Subject To\n" ) for c in constraints_x : additions = " + " . join ( "x%d" % ( x + 1 ) for x in c ) lp_handle . write ( " %s <= %d\n" % ( additions , qa ) ) num_of_constraints += len ( constraints_x ) if not ( constraints_x is constraints_y ) : for c in constraints_y : additions = " + " . join ( "x%d" % ( x + 1 ) for x in c ) lp_handle . write ( " %s <= %d\n" % ( additions , qb ) ) num_of_constraints += len ( constraints_y ) print ( "number of variables (%d), number of constraints (%d)" % ( len ( nodes ) , num_of_constraints ) , file = sys . stderr ) lp_handle . write ( "Binary\n" ) for i , score in nodes : lp_handle . write ( " x%d\n" % i ) lp_handle . write ( "End\n" ) lp_data = lp_handle . getvalue ( ) lp_handle . close ( ) return lp_data
Maximize 4 x1 + 2 x2 + 3 x3 + x4 Subject To x1 + x2 < = 1 End
11,567
def solve_lp ( clusters , quota , work_dir = "work" , Nmax = 0 , self_match = False , solver = "SCIP" , verbose = False ) : qb , qa = quota nodes , constraints_x , constraints_y = get_constraints ( clusters , ( qa , qb ) , Nmax = Nmax ) if self_match : constraints_x = constraints_y = constraints_x | constraints_y lp_data = format_lp ( nodes , constraints_x , qa , constraints_y , qb ) if solver == "SCIP" : filtered_list = SCIPSolver ( lp_data , work_dir , verbose = verbose ) . results if not filtered_list : print ( "SCIP fails... trying GLPK" , file = sys . stderr ) filtered_list = GLPKSolver ( lp_data , work_dir , verbose = verbose ) . results elif solver == "GLPK" : filtered_list = GLPKSolver ( lp_data , work_dir , verbose = verbose ) . results if not filtered_list : print ( "GLPK fails... trying SCIP" , file = sys . stderr ) filtered_list = SCIPSolver ( lp_data , work_dir , verbose = verbose ) . results return filtered_list
Solve the formatted LP instance
11,568
def print_maps_by_type ( map_type , number = None ) : map_type = map_type . lower ( ) . capitalize ( ) if map_type not in MAP_TYPES : s = 'Invalid map type, must be one of {0}' . format ( MAP_TYPES ) raise ValueError ( s ) print ( map_type ) map_keys = sorted ( COLOR_MAPS [ map_type ] . keys ( ) ) format_str = '{0:8} : {1}' for mk in map_keys : num_keys = sorted ( COLOR_MAPS [ map_type ] [ mk ] . keys ( ) , key = int ) if not number or str ( number ) in num_keys : num_str = '{' + ', ' . join ( num_keys ) + '}' print ( format_str . format ( mk , num_str ) )
Print all available maps of a given type .
11,569
def get_map ( name , map_type , number , reverse = False ) : number = str ( number ) map_type = map_type . lower ( ) . capitalize ( ) if map_type not in MAP_TYPES : s = 'Invalid map type, must be one of {0}' . format ( MAP_TYPES ) raise ValueError ( s ) map_names = dict ( ( k . lower ( ) , k ) for k in COLOR_MAPS [ map_type ] . keys ( ) ) if name . lower ( ) not in map_names : s = 'Invalid color map name {0!r} for type {1!r}.\n' s = s . format ( name , map_type ) valid_names = [ str ( k ) for k in COLOR_MAPS [ map_type ] . keys ( ) ] valid_names . sort ( ) s += 'Valid names are: {0}' . format ( valid_names ) raise ValueError ( s ) name = map_names [ name . lower ( ) ] if number not in COLOR_MAPS [ map_type ] [ name ] : s = 'Invalid number for map type {0!r} and name {1!r}.\n' s = s . format ( map_type , str ( name ) ) valid_numbers = [ int ( k ) for k in COLOR_MAPS [ map_type ] [ name ] . keys ( ) ] valid_numbers . sort ( ) s += 'Valid numbers are : {0}' . format ( valid_numbers ) raise ValueError ( s ) colors = COLOR_MAPS [ map_type ] [ name ] [ number ] [ 'Colors' ] if reverse : name += '_r' colors = [ x for x in reversed ( colors ) ] return BrewerMap ( name , map_type , colors )
Return a BrewerMap representation of the specified color map .
11,570
def _load_maps_by_type ( map_type ) : seq_maps = COLOR_MAPS [ map_type ] loaded_maps = { } for map_name in seq_maps : loaded_maps [ map_name ] = { } for num in seq_maps [ map_name ] : inum = int ( num ) colors = seq_maps [ map_name ] [ num ] [ 'Colors' ] bmap = BrewerMap ( map_name , map_type , colors ) loaded_maps [ map_name ] [ inum ] = bmap max_num = int ( max ( seq_maps [ map_name ] . keys ( ) , key = int ) ) loaded_maps [ map_name ] [ 'max' ] = loaded_maps [ map_name ] [ max_num ] return loaded_maps
Load all maps of a given type into a dictionary .
11,571
def mpl_colors ( self ) : mc = [ ] for color in self . colors : mc . append ( tuple ( [ x / 255. for x in color ] ) ) return mc
Colors expressed on the range 0 - 1 as used by matplotlib .
11,572
def get_mpl_colormap ( self , ** kwargs ) : if not HAVE_MPL : raise RuntimeError ( 'matplotlib not available.' ) cmap = LinearSegmentedColormap . from_list ( self . name , self . mpl_colors , ** kwargs ) return cmap
A color map that can be used in matplotlib plots . Requires matplotlib to be importable . Keyword arguments are passed to matplotlib . colors . LinearSegmentedColormap . from_list .
11,573
def show_as_blocks ( self , block_size = 100 ) : from ipythonblocks import BlockGrid grid = BlockGrid ( self . number , 1 , block_size = block_size ) for block , color in zip ( grid , self . colors ) : block . rgb = color grid . show ( )
Show colors in the IPython Notebook using ipythonblocks .
11,574
def colorbrewer2_url ( self ) : url = 'http://colorbrewer2.org/index.html?type={0}&scheme={1}&n={2}' return url . format ( self . type . lower ( ) , self . name , self . number )
URL that can be used to view the color map at colorbrewer2 . org .
11,575
def summary ( args ) : from jcvi . formats . fasta import summary as fsummary from jcvi . utils . cbook import percentage , human_size p = OptionParser ( summary . __doc__ ) opts , args = p . parse_args ( args ) if len ( args ) != 3 : sys . exit ( not p . print_help ( ) ) chainfile , oldfasta , newfasta = args chain = Chain ( chainfile ) ungapped , dt , dq = chain . ungapped , chain . dt , chain . dq print ( "File `{0}` contains {1} chains." . format ( chainfile , len ( chain ) ) , file = sys . stderr ) print ( "ungapped={0} dt={1} dq={2}" . format ( human_size ( ungapped ) , human_size ( dt ) , human_size ( dq ) ) , file = sys . stderr ) oldreal , oldnn , oldlen = fsummary ( [ oldfasta , "--outfile=/dev/null" ] ) print ( "Old fasta (`{0}`) mapped: {1}" . format ( oldfasta , percentage ( ungapped , oldreal ) ) , file = sys . stderr ) newreal , newnn , newlen = fsummary ( [ newfasta , "--outfile=/dev/null" ] ) print ( "New fasta (`{0}`) mapped: {1}" . format ( newfasta , percentage ( ungapped , newreal ) ) , file = sys . stderr )
%prog summary old . new . chain old . fasta new . fasta
11,576
def fromagp ( args ) : from jcvi . formats . agp import AGP from jcvi . formats . sizes import Sizes p = OptionParser ( fromagp . __doc__ ) p . add_option ( "--novalidate" , default = False , action = "store_true" , help = "Do not validate AGP" ) opts , args = p . parse_args ( args ) if len ( args ) != 3 : sys . exit ( not p . print_help ( ) ) agpfile , componentfasta , objectfasta = args chainfile = agpfile . rsplit ( "." , 1 ) [ 0 ] + ".chain" fw = open ( chainfile , "w" ) agp = AGP ( agpfile , validate = ( not opts . novalidate ) ) componentsizes = Sizes ( componentfasta ) . mapping objectsizes = Sizes ( objectfasta ) . mapping chain = "chain" score = 1000 tStrand = "+" id = 0 for a in agp : if a . is_gap : continue tName = a . component_id tSize = componentsizes [ tName ] tStart = a . component_beg tEnd = a . component_end tStart -= 1 qName = a . object qSize = objectsizes [ qName ] qStrand = "-" if a . orientation == "-" else "+" qStart = a . object_beg qEnd = a . object_end if qStrand == '-' : _qStart = qSize - qEnd + 1 _qEnd = qSize - qStart + 1 qStart , qEnd = _qStart , _qEnd qStart -= 1 id += 1 size = a . object_span headerline = "\t" . join ( str ( x ) for x in ( chain , score , tName , tSize , tStrand , tStart , tEnd , qName , qSize , qStrand , qStart , qEnd , id ) ) alignmentline = size print ( headerline , file = fw ) print ( alignmentline , file = fw ) print ( file = fw ) fw . close ( ) logging . debug ( "File written to `{0}`." . format ( chainfile ) )
%prog fromagp agpfile componentfasta objectfasta
11,577
def blat ( args ) : p = OptionParser ( blat . __doc__ ) p . add_option ( "--minscore" , default = 100 , type = "int" , help = "Matches minus mismatches gap penalty [default: %default]" ) p . add_option ( "--minid" , default = 98 , type = "int" , help = "Minimum sequence identity [default: %default]" ) p . set_cpus ( ) opts , args = p . parse_args ( args ) if len ( args ) != 2 : sys . exit ( not p . print_help ( ) ) oldfasta , newfasta = args twobitfiles = [ ] for fastafile in args : tbfile = faToTwoBit ( fastafile ) twobitfiles . append ( tbfile ) oldtwobit , newtwobit = twobitfiles cmd = "pblat -threads={0}" . format ( opts . cpus ) if which ( "pblat" ) else "blat" cmd += " {0} {1}" . format ( oldtwobit , newfasta ) cmd += " -tileSize=12 -minScore={0} -minIdentity={1} " . format ( opts . minscore , opts . minid ) pslfile = "{0}.{1}.psl" . format ( * ( op . basename ( x ) . split ( '.' ) [ 0 ] for x in ( newfasta , oldfasta ) ) ) cmd += pslfile sh ( cmd )
%prog blat old . fasta new . fasta
11,578
def frompsl ( args ) : from jcvi . formats . sizes import Sizes p = OptionParser ( frompsl . __doc__ ) opts , args = p . parse_args ( args ) if len ( args ) != 3 : sys . exit ( not p . print_help ( ) ) pslfile , oldfasta , newfasta = args pf = oldfasta . split ( "." ) [ 0 ] chainfile = pf + ".chain" twobitfiles = [ ] for fastafile in ( oldfasta , newfasta ) : tbfile = faToTwoBit ( fastafile ) twobitfiles . append ( tbfile ) oldtwobit , newtwobit = twobitfiles if need_update ( pslfile , chainfile ) : cmd = "axtChain -linearGap=medium -psl {0}" . format ( pslfile ) cmd += " {0} {1} {2}" . format ( oldtwobit , newtwobit , chainfile ) sh ( cmd ) sortedchain = chainfile . rsplit ( "." , 1 ) [ 0 ] + ".sorted.chain" if need_update ( chainfile , sortedchain ) : cmd = "chainSort {0} {1}" . format ( chainfile , sortedchain ) sh ( cmd ) netfile = pf + ".net" oldsizes = Sizes ( oldfasta ) . filename newsizes = Sizes ( newfasta ) . filename if need_update ( ( sortedchain , oldsizes , newsizes ) , netfile ) : cmd = "chainNet {0} {1} {2}" . format ( sortedchain , oldsizes , newsizes ) cmd += " {0} /dev/null" . format ( netfile ) sh ( cmd ) liftoverfile = pf + ".liftover.chain" if need_update ( ( netfile , sortedchain ) , liftoverfile ) : cmd = "netChainSubset {0} {1} {2}" . format ( netfile , sortedchain , liftoverfile ) sh ( cmd )
%prog frompsl old . new . psl old . fasta new . fasta
11,579
def lastz_to_blast ( row ) : atoms = row . strip ( ) . split ( "\t" ) name1 , name2 , coverage , identity , nmismatch , ngap , start1 , end1 , strand1 , start2 , end2 , strand2 , score = atoms identity = identity . replace ( "%" , "" ) hitlen = coverage . split ( "/" ) [ 1 ] score = float ( score ) same_strand = ( strand1 == strand2 ) if not same_strand : start2 , end2 = end2 , start2 evalue = blastz_score_to_ncbi_expectation ( score ) score = blastz_score_to_ncbi_bits ( score ) evalue , score = "%.2g" % evalue , "%.1f" % score return "\t" . join ( ( name1 , name2 , identity , hitlen , nmismatch , ngap , start1 , end1 , start2 , end2 , evalue , score ) )
Convert the lastz tabular to the blast tabular see headers above Obsolete after LASTZ version 1 . 02 . 40
11,580
def lastz_2bit ( t ) : bfasta_fn , afasta_fn , outfile , lastz_bin , extra , mask , format = t ref_tags = [ Darkspace ] qry_tags = [ Darkspace ] ref_tags , qry_tags = add_mask ( ref_tags , qry_tags , mask = mask ) lastz_cmd = Lastz_template . format ( lastz_bin , bfasta_fn , ref_tags , afasta_fn , qry_tags ) if extra : lastz_cmd += " " + extra . strip ( ) lastz_cmd += " --format={0}" . format ( format ) proc = Popen ( lastz_cmd ) out_fh = open ( outfile , "w" ) logging . debug ( "job <%d> started: %s" % ( proc . pid , lastz_cmd ) ) for row in proc . stdout : out_fh . write ( row ) out_fh . flush ( ) logging . debug ( "job <%d> finished" % proc . pid )
Used for formats other than BLAST i . e . lav maf etc . which requires the database file to contain a single FASTA record .
11,581
def augustus ( args ) : p = OptionParser ( augustus . __doc__ ) p . add_option ( "--species" , default = "maize" , help = "Use species model for prediction" ) p . add_option ( "--hintsfile" , help = "Hint-guided AUGUSTUS" ) p . add_option ( "--nogff3" , default = False , action = "store_true" , help = "Turn --gff3=off" ) p . set_home ( "augustus" ) p . set_cpus ( ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( not p . print_help ( ) ) fastafile , = args cpus = opts . cpus mhome = opts . augustus_home gff3 = not opts . nogff3 suffix = ".gff3" if gff3 else ".out" cfgfile = op . join ( mhome , "config/extrinsic/extrinsic.M.RM.E.W.cfg" ) outdir = mkdtemp ( dir = "." ) fs = split ( [ fastafile , outdir , str ( cpus ) ] ) augustuswrap_params = partial ( augustuswrap , species = opts . species , gff3 = gff3 , cfgfile = cfgfile , hintsfile = opts . hintsfile ) g = Jobs ( augustuswrap_params , fs . names ) g . run ( ) gff3files = [ x . rsplit ( "." , 1 ) [ 0 ] + suffix for x in fs . names ] outfile = fastafile . rsplit ( "." , 1 ) [ 0 ] + suffix FileMerger ( gff3files , outfile = outfile ) . merge ( ) shutil . rmtree ( outdir ) if gff3 : from jcvi . annotation . reformat import augustus as reformat_augustus reformat_outfile = outfile . replace ( ".gff3" , ".reformat.gff3" ) reformat_augustus ( [ outfile , "--outfile={0}" . format ( reformat_outfile ) ] )
%prog augustus fastafile
11,582
def star ( args ) : p = OptionParser ( star . __doc__ ) p . add_option ( "--single" , default = False , action = "store_true" , help = "Single end mapping" ) p . set_fastq_names ( ) p . set_cpus ( ) opts , args = p . parse_args ( args ) if len ( args ) != 2 : sys . exit ( not p . print_help ( ) ) folder , reference = args cpus = opts . cpus mm = MakeManager ( ) num = 1 if opts . single else 2 folder , reference = args gd = "GenomeDir" mkdir ( gd ) STAR = "STAR --runThreadN {0} --genomeDir {1}" . format ( cpus , gd ) genomeidx = op . join ( gd , "Genome" ) if need_update ( reference , genomeidx ) : cmd = STAR + " --runMode genomeGenerate" cmd += " --genomeFastaFiles {0}" . format ( reference ) mm . add ( reference , genomeidx , cmd ) for p , prefix in iter_project ( folder , opts . names , num ) : pf = "{0}_star" . format ( prefix ) bamfile = pf + "Aligned.sortedByCoord.out.bam" cmd = STAR + " --readFilesIn {0}" . format ( " " . join ( p ) ) if p [ 0 ] . endswith ( ".gz" ) : cmd += " --readFilesCommand zcat" cmd += " --outSAMtype BAM SortedByCoordinate" cmd += " --outFileNamePrefix {0}" . format ( pf ) cmd += " --twopassMode Basic" cmd += " --outSAMstrandField intronMotif" cmd += " --outFilterIntronMotifs RemoveNoncanonical" mm . add ( p , bamfile , cmd ) mm . write ( )
%prog star folder reference
11,583
def cufflinks ( args ) : p = OptionParser ( cufflinks . __doc__ ) p . add_option ( "--gtf" , help = "Reference annotation [default: %default]" ) p . set_cpus ( ) opts , args = p . parse_args ( args ) if len ( args ) != 2 : sys . exit ( not p . print_help ( ) ) folder , reference = args cpus = opts . cpus gtf = opts . gtf transcripts = "transcripts.gtf" mm = MakeManager ( ) gtfs = [ ] for bam in iglob ( folder , "*.bam" ) : pf = op . basename ( bam ) . split ( "." ) [ 0 ] outdir = pf + "_cufflinks" cmd = "cufflinks" cmd += " -o {0}" . format ( outdir ) cmd += " -p {0}" . format ( cpus ) if gtf : cmd += " -g {0}" . format ( gtf ) cmd += " --frag-bias-correct {0}" . format ( reference ) cmd += " --multi-read-correct" cmd += " {0}" . format ( bam ) cgtf = op . join ( outdir , transcripts ) mm . add ( bam , cgtf , cmd ) gtfs . append ( cgtf ) assemblylist = "assembly_list.txt" cmd = 'find . -name "{0}" > {1}' . format ( transcripts , assemblylist ) mm . add ( gtfs , assemblylist , cmd ) mergedgtf = "merged/merged.gtf" cmd = "cuffmerge" cmd += " -o merged" cmd += " -p {0}" . format ( cpus ) if gtf : cmd += " -g {0}" . format ( gtf ) cmd += " -s {0}" . format ( reference ) cmd += " {0}" . format ( assemblylist ) mm . add ( assemblylist , mergedgtf , cmd ) mm . write ( )
%prog cufflinks folder reference
11,584
def tophat ( args ) : from jcvi . apps . bowtie import check_index from jcvi . formats . fastq import guessoffset p = OptionParser ( tophat . __doc__ ) p . add_option ( "--gtf" , help = "Reference annotation [default: %default]" ) p . add_option ( "--single" , default = False , action = "store_true" , help = "Single end mapping" ) p . add_option ( "--intron" , default = 15000 , type = "int" , help = "Max intron size [default: %default]" ) p . add_option ( "--dist" , default = - 50 , type = "int" , help = "Mate inner distance [default: %default]" ) p . add_option ( "--stdev" , default = 50 , type = "int" , help = "Mate standard deviation [default: %default]" ) p . set_phred ( ) p . set_cpus ( ) opts , args = p . parse_args ( args ) if len ( args ) != 2 : sys . exit ( not p . print_help ( ) ) num = 1 if opts . single else 2 folder , reference = args reference = check_index ( reference ) for p , prefix in iter_project ( folder , n = num ) : outdir = "{0}_tophat" . format ( prefix ) outfile = op . join ( outdir , "accepted_hits.bam" ) if op . exists ( outfile ) : logging . debug ( "File `{0}` found. Skipping." . format ( outfile ) ) continue cmd = "tophat -p {0}" . format ( opts . cpus ) if opts . gtf : cmd += " -G {0}" . format ( opts . gtf ) cmd += " -o {0}" . format ( outdir ) if num == 1 : a , = p else : a , b = p cmd += " --max-intron-length {0}" . format ( opts . intron ) cmd += " --mate-inner-dist {0}" . format ( opts . dist ) cmd += " --mate-std-dev {0}" . format ( opts . stdev ) phred = opts . phred or str ( guessoffset ( [ a ] ) ) if phred == "64" : cmd += " --phred64-quals" cmd += " {0} {1}" . format ( reference , " " . join ( p ) ) sh ( cmd )
%prog tophat folder reference
11,585
def hmean_int ( a , a_min = 5778 , a_max = 1149851 ) : from scipy . stats import hmean return int ( round ( hmean ( np . clip ( a , a_min , a_max ) ) ) )
Harmonic mean of an array returns the closest int
11,586
def golden_array ( a , phi = 1.61803398875 , lb = LB , ub = UB ) : counts = np . zeros ( BB , dtype = int ) for x in a : c = int ( round ( math . log ( x , phi ) ) ) if c < lb : c = lb if c > ub : c = ub counts [ c - lb ] += 1 return counts
Given list of ints we aggregate similar values so that it becomes an array of multiples of phi where phi is the golden ratio .
11,587
def heatmap ( args ) : p = OptionParser ( heatmap . __doc__ ) p . add_option ( "--resolution" , default = 500000 , type = "int" , help = "Resolution when counting the links" ) p . add_option ( "--chr" , help = "Plot this contig/chr only" ) p . add_option ( "--nobreaks" , default = False , action = "store_true" , help = "Do not plot breaks (esp. if contigs are small)" ) opts , args , iopts = p . set_image_options ( args , figsize = "10x10" , style = "white" , cmap = "coolwarm" , format = "png" , dpi = 120 ) if len ( args ) != 2 : sys . exit ( not p . print_help ( ) ) npyfile , jsonfile = args contig = opts . chr header = json . loads ( open ( jsonfile ) . read ( ) ) resolution = header . get ( "resolution" , opts . resolution ) logging . debug ( "Resolution set to {}" . format ( resolution ) ) A = np . load ( npyfile ) if contig : contig_start = header [ "starts" ] [ contig ] contig_size = header [ "sizes" ] [ contig ] contig_end = contig_start + contig_size A = A [ contig_start : contig_end , contig_start : contig_end ] B = A . astype ( "float64" ) B += 1.0 B = np . log ( B ) vmin , vmax = 1 , 7 B [ B < vmin ] = vmin B [ B > vmax ] = vmax print ( B ) logging . debug ( "Matrix log-transformation and thresholding ({}-{}) done" . format ( vmin , vmax ) ) fig = plt . figure ( 1 , ( iopts . w , iopts . h ) ) root = fig . add_axes ( [ 0 , 0 , 1 , 1 ] ) ax = fig . add_axes ( [ .05 , .05 , .9 , .9 ] ) breaks = header [ "starts" ] . values ( ) breaks += [ header [ "total_bins" ] ] breaks = sorted ( breaks ) [ 1 : ] if contig or opts . nobreaks : breaks = [ ] plot_heatmap ( ax , B , breaks , iopts , binsize = resolution ) pf = npyfile . rsplit ( "." , 1 ) [ 0 ] title = pf if contig : title += "-{}" . format ( contig ) root . text ( .5 , .98 , title , color = "darkslategray" , size = 18 , ha = "center" , va = "center" ) normalize_axes ( root ) image_name = title + "." + iopts . format logging . getLogger ( ) . setLevel ( logging . CRITICAL ) savefig ( image_name , dpi = iopts . dpi , iopts = iopts )
%prog heatmap input . npy genome . json
11,588
def get_seqstarts ( bamfile , N ) : import pysam bamfile = pysam . AlignmentFile ( bamfile , "rb" ) seqsize = { } for kv in bamfile . header [ "SQ" ] : if kv [ "LN" ] < 10 * N : continue seqsize [ kv [ "SN" ] ] = kv [ "LN" ] / N + 1 allseqs = natsorted ( seqsize . keys ( ) ) allseqsizes = np . array ( [ seqsize [ x ] for x in allseqs ] ) seqstarts = np . cumsum ( allseqsizes ) seqstarts = np . roll ( seqstarts , 1 ) total_bins = seqstarts [ 0 ] seqstarts [ 0 ] = 0 seqstarts = dict ( zip ( allseqs , seqstarts ) ) return seqstarts , seqsize , total_bins
Go through the SQ headers and pull out all sequences with size greater than the resolution settings i . e . contains at least a few cells
11,589
def get_distbins ( start = 100 , bins = 2500 , ratio = 1.01 ) : b = np . ones ( bins , dtype = "float64" ) b [ 0 ] = 100 for i in range ( 1 , bins ) : b [ i ] = b [ i - 1 ] * ratio bins = np . around ( b ) . astype ( dtype = "int" ) binsizes = np . diff ( bins ) return bins , binsizes
Get exponentially sized
11,590
def simulate ( args ) : p = OptionParser ( simulate . __doc__ ) p . add_option ( "--genomesize" , default = 10000000 , type = "int" , help = "Genome size" ) p . add_option ( "--genes" , default = 1000 , type = "int" , help = "Number of genes" ) p . add_option ( "--contigs" , default = 100 , type = "int" , help = "Number of contigs" ) p . add_option ( "--coverage" , default = 10 , type = "int" , help = "Link coverage" ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( not p . print_help ( ) ) pf , = args GenomeSize = opts . genomesize Genes = opts . genes Contigs = opts . contigs Coverage = opts . coverage PE = 500 Links = int ( GenomeSize * Coverage / PE ) ContigSizes , = np . random . dirichlet ( [ 1 ] * Contigs , 1 ) * GenomeSize ContigSizes = np . array ( np . round_ ( ContigSizes , decimals = 0 ) , dtype = int ) ContigStarts = np . zeros ( Contigs , dtype = int ) ContigStarts [ 1 : ] = np . cumsum ( ContigSizes ) [ : - 1 ] idsfile = pf + ".ids" fw = open ( idsfile , "w" ) print ( "#Contig\tRECounts\tLength" , file = fw ) for i , s in enumerate ( ContigSizes ) : print ( "tig{:04d}\t{}\t{}" . format ( i , s / ( 4 ** 4 ) , s ) , file = fw ) fw . close ( ) GenePositions = np . sort ( np . random . random_integers ( 0 , GenomeSize - 1 , size = Genes ) ) write_last_and_beds ( pf , GenePositions , ContigStarts ) LinkStarts = np . sort ( np . random . random_integers ( 0 , GenomeSize - 1 , size = Links ) ) a , b = 1e-7 , 1e-3 LinkSizes = np . array ( np . round_ ( 1 / ( ( b - a ) * np . random . rand ( Links ) + a ) , decimals = 0 ) , dtype = "int" ) LinkEnds = LinkStarts + LinkSizes LinkStartContigs = np . searchsorted ( ContigStarts , LinkStarts ) - 1 LinkEndContigs = np . searchsorted ( ContigStarts , LinkEnds ) - 1 InterContigLinks = ( LinkStartContigs != LinkEndContigs ) & ( LinkEndContigs != Contigs ) ICLinkStartContigs = LinkStartContigs [ InterContigLinks ] ICLinkEndContigs = LinkEndContigs [ InterContigLinks ] ICLinkStarts = LinkStarts [ InterContigLinks ] ICLinkEnds = LinkEnds [ InterContigLinks ] write_clm ( pf , ICLinkStartContigs , ICLinkEndContigs , ICLinkStarts , ICLinkEnds , ContigStarts , ContigSizes )
%prog simulate test
11,591
def write_last_and_beds ( pf , GenePositions , ContigStarts ) : qbedfile = pf + "tigs.bed" sbedfile = pf + "chr.bed" lastfile = "{}tigs.{}chr.last" . format ( pf , pf ) qbedfw = open ( qbedfile , "w" ) sbedfw = open ( sbedfile , "w" ) lastfw = open ( lastfile , "w" ) GeneContigs = np . searchsorted ( ContigStarts , GenePositions ) - 1 for i , ( c , gstart ) in enumerate ( zip ( GeneContigs , GenePositions ) ) : gene = "gene{:05d}" . format ( i ) tig = "tig{:04d}" . format ( c ) start = ContigStarts [ c ] cstart = gstart - start print ( "\t" . join ( str ( x ) for x in ( tig , cstart , cstart + 1 , gene ) ) , file = qbedfw ) print ( "\t" . join ( str ( x ) for x in ( "chr1" , gstart , gstart + 1 , gene ) ) , file = sbedfw ) lastatoms = [ gene , gene , 100 ] + [ 0 ] * 8 + [ 100 ] print ( "\t" . join ( str ( x ) for x in lastatoms ) , file = lastfw ) qbedfw . close ( ) sbedfw . close ( ) lastfw . close ( )
Write LAST file query and subject BED files .
11,592
def write_clm ( pf , ICLinkStartContigs , ICLinkEndContigs , ICLinkStarts , ICLinkEnds , ContigStarts , ContigSizes ) : clm = defaultdict ( list ) for start , end , linkstart , linkend in zip ( ICLinkStartContigs , ICLinkEndContigs , ICLinkStarts , ICLinkEnds ) : start_a = ContigStarts [ start ] start_b = start_a + ContigSizes [ start ] end_a = ContigStarts [ end ] end_b = end_a + ContigSizes [ end ] if linkend >= end_b : continue clm [ ( start , end ) ] . append ( ( linkstart - start_a , start_b - linkstart , linkend - end_a , end_b - linkend ) ) clmfile = pf + ".clm" fw = open ( clmfile , "w" ) def format_array ( a ) : return [ str ( x ) for x in sorted ( a ) if x > 0 ] for ( start , end ) , links in sorted ( clm . items ( ) ) : start = "tig{:04d}" . format ( start ) end = "tig{:04d}" . format ( end ) nlinks = len ( links ) if not nlinks : continue ff = format_array ( [ ( b + c ) for a , b , c , d in links ] ) fr = format_array ( [ ( b + d ) for a , b , c , d in links ] ) rf = format_array ( [ ( a + c ) for a , b , c , d in links ] ) rr = format_array ( [ ( a + d ) for a , b , c , d in links ] ) print ( "{}+ {}+\t{}\t{}" . format ( start , end , nlinks , " " . join ( ff ) ) , file = fw ) print ( "{}+ {}-\t{}\t{}" . format ( start , end , nlinks , " " . join ( fr ) ) , file = fw ) print ( "{}- {}+\t{}\t{}" . format ( start , end , nlinks , " " . join ( rf ) ) , file = fw ) print ( "{}- {}-\t{}\t{}" . format ( start , end , nlinks , " " . join ( rr ) ) , file = fw ) fw . close ( )
Write CLM file from simulated data .
11,593
def density ( args ) : p = OptionParser ( density . __doc__ ) p . add_option ( "--save" , default = False , action = "store_true" , help = "Write log densitites of contigs to file" ) p . set_cpus ( ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( not p . print_help ( ) ) clmfile , = args clm = CLMFile ( clmfile ) pf = clmfile . rsplit ( "." , 1 ) [ 0 ] if opts . save : logdensities = clm . calculate_densities ( ) densityfile = pf + ".density" fw = open ( densityfile , "w" ) for name , logd in logdensities . items ( ) : s = clm . tig_to_size [ name ] print ( "\t" . join ( str ( x ) for x in ( name , s , logd ) ) , file = fw ) fw . close ( ) logging . debug ( "Density written to `{}`" . format ( densityfile ) ) tourfile = clmfile . rsplit ( "." , 1 ) [ 0 ] + ".tour" tour = clm . activate ( tourfile = tourfile , backuptour = False ) clm . flip_all ( tour ) clm . flip_whole ( tour ) clm . flip_one ( tour )
%prog density test . clm
11,594
def optimize ( args ) : p = OptionParser ( optimize . __doc__ ) p . add_option ( "--skiprecover" , default = False , action = "store_true" , help = "Do not import 'recover' contigs" ) p . add_option ( "--startover" , default = False , action = "store_true" , help = "Do not resume from existing tour file" ) p . add_option ( "--skipGA" , default = False , action = "store_true" , help = "Skip GA step" ) p . set_outfile ( outfile = None ) p . set_cpus ( ) opts , args = p . parse_args ( args ) if len ( args ) != 1 : sys . exit ( not p . print_help ( ) ) clmfile , = args startover = opts . startover runGA = not opts . skipGA cpus = opts . cpus clm = CLMFile ( clmfile , skiprecover = opts . skiprecover ) tourfile = opts . outfile or clmfile . rsplit ( "." , 1 ) [ 0 ] + ".tour" if startover : tourfile = None tour = clm . activate ( tourfile = tourfile ) fwtour = open ( tourfile , "w" ) print_tour ( fwtour , clm . tour , "INIT" , clm . active_contigs , clm . oo , signs = clm . signs ) if runGA : for phase in range ( 1 , 3 ) : tour = optimize_ordering ( fwtour , clm , phase , cpus ) tour = clm . prune_tour ( tour , cpus ) phase = 1 while True : tag1 , tag2 = optimize_orientations ( fwtour , clm , phase , cpus ) if tag1 == REJECT and tag2 == REJECT : logging . debug ( "Terminating ... no more {}" . format ( ACCEPT ) ) break phase += 1 fwtour . close ( )
%prog optimize test . clm
11,595
def optimize_orientations ( fwtour , clm , phase , cpus ) : tour_contigs = clm . active_contigs tour = clm . tour oo = clm . oo print_tour ( fwtour , tour , "FLIPALL{}" . format ( phase ) , tour_contigs , oo , signs = clm . signs ) tag1 = clm . flip_whole ( tour ) print_tour ( fwtour , tour , "FLIPWHOLE{}" . format ( phase ) , tour_contigs , oo , signs = clm . signs ) tag2 = clm . flip_one ( tour ) print_tour ( fwtour , tour , "FLIPONE{}" . format ( phase ) , tour_contigs , oo , signs = clm . signs ) return tag1 , tag2
Optimize the orientations of contigs by using heuristic flipping .
11,596
def iter_last_tour ( tourfile , clm ) : row = open ( tourfile ) . readlines ( ) [ - 1 ] _tour , _tour_o = separate_tour_and_o ( row ) tour = [ ] tour_o = [ ] for tc , to in zip ( _tour , _tour_o ) : if tc not in clm . contigs : logging . debug ( "Contig `{}` in file `{}` not found in `{}`" . format ( tc , tourfile , clm . idsfile ) ) continue tour . append ( tc ) tour_o . append ( to ) return tour , tour_o
Extract last tour from tourfile . The clm instance is also passed in to see if any contig is covered in the clm .
11,597
def iter_tours ( tourfile , frames = 1 ) : fp = open ( tourfile ) i = 0 for row in fp : if row [ 0 ] == '>' : label = row [ 1 : ] . strip ( ) if label . startswith ( "GA" ) : pf , j , score = label . split ( "-" , 2 ) j = int ( j ) else : j = 0 i += 1 else : if j % frames != 0 : continue tour , tour_o = separate_tour_and_o ( row ) yield i , label , tour , tour_o fp . close ( )
Extract tours from tourfile . Tourfile contains a set of contig configurations generated at each iteration of the genetic algorithm . Each configuration has two rows first row contains iteration id and score second row contains list of contigs separated by comma .
11,598
def movie ( args ) : p = OptionParser ( movie . __doc__ ) p . add_option ( "--frames" , default = 500 , type = "int" , help = "Only plot every N frames" ) p . add_option ( "--engine" , default = "ffmpeg" , choices = ( "ffmpeg" , "gifsicle" ) , help = "Movie engine, output MP4 or GIF" ) p . set_beds ( ) opts , args , iopts = p . set_image_options ( args , figsize = "16x8" , style = "white" , cmap = "coolwarm" , format = "png" , dpi = 300 ) if len ( args ) != 3 : sys . exit ( not p . print_help ( ) ) tourfile , clmfile , lastfile = args tourfile = op . abspath ( tourfile ) clmfile = op . abspath ( clmfile ) lastfile = op . abspath ( lastfile ) cwd = os . getcwd ( ) odir = op . basename ( tourfile ) . rsplit ( "." , 1 ) [ 0 ] + "-movie" anchorsfile , qbedfile , contig_to_beds = prepare_synteny ( tourfile , lastfile , odir , p , opts ) args = [ ] for i , label , tour , tour_o in iter_tours ( tourfile , frames = opts . frames ) : padi = "{:06d}" . format ( i ) a , b = op . basename ( anchorsfile ) . split ( "." , 1 ) ianchorsfile = a + "_" + padi + "." + b symlink ( anchorsfile , ianchorsfile ) qb = Bed ( ) for contig , o in zip ( tour , tour_o ) : if contig not in contig_to_beds : continue bedlines = contig_to_beds [ contig ] [ : ] if o == '-' : bedlines . reverse ( ) for x in bedlines : qb . append ( x ) a , b = op . basename ( qbedfile ) . split ( "." , 1 ) ibedfile = a + "_" + padi + "." + b qb . print_to_file ( ibedfile ) image_name = padi + "." + iopts . format tour = "," . join ( tour ) args . append ( [ [ tour , clmfile , ianchorsfile , "--outfile" , image_name , "--label" , label ] ] ) Jobs ( movieframe , args ) . run ( ) os . chdir ( cwd ) make_movie ( odir , odir , engine = opts . engine , format = iopts . format )
%prog movie test . tour test . clm ref . contigs . last
11,599
def prepare_ec ( oo , sizes , M ) : tour = range ( len ( oo ) ) tour_sizes = np . array ( [ sizes . sizes [ x ] for x in oo ] ) tour_M = M [ oo , : ] [ : , oo ] return tour , tour_sizes , tour_M
This prepares EC and converts from contig_id to an index .