idx int64 0 41.2k | question stringlengths 73 5.81k | target stringlengths 5 918 |
|---|---|---|
5,800 | public static byte [ ] toByteArray ( InputStream source , int bufferSize ) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; copy ( source , baos , bufferSize ) ; source . close ( ) ; return baos . toByteArray ( ) ; } | Reads the given input stream into a byte array and closes the input stream |
5,801 | public static String trim ( String value , String toTrim ) { int len = toTrim . length ( ) ; while ( value . startsWith ( toTrim ) ) { value = value . substring ( len ) ; } while ( value . endsWith ( toTrim ) ) { value = value . substring ( 0 , value . length ( ) - len ) ; } return value ; } | Trims the given string from the given value |
5,802 | public static String fullPath ( File file ) { notNull ( "file" , file ) ; try { return file . getCanonicalPath ( ) ; } catch ( IOException e ) { return file . getAbsolutePath ( ) ; } } | Gets the canonical path of the given file or if that throws an exception then gets the absolute path . |
5,803 | public static ByteBuffer toByteBuffer ( String text ) { notNull ( "text" , text ) ; if ( text . isEmpty ( ) ) { return ByteBuffer . allocate ( 0 ) ; } return ByteBuffer . wrap ( text . getBytes ( UTF_8 ) ) ; } | Converts a string to a ByteBuffer with UTF - 8 encoding . |
5,804 | public boolean writeHeaders ( MuRequest request , MuResponse response , Set < Method > allowedMethods ) { boolean written = writeHeadersInternal ( request , response , null ) ; if ( written ) { response . headers ( ) . set ( HeaderNames . ACCESS_CONTROL_ALLOW_METHODS , getAllowedString ( allowedMethods ) ) ; } return written ; } | Adds CORS headers to the response if neeeded . |
5,805 | public PathMatch matcher ( String rawPath ) { if ( rawPath . startsWith ( "/" ) ) { rawPath = rawPath . substring ( 1 ) ; } Matcher matcher = pattern . matcher ( rawPath ) ; if ( matcher . matches ( ) ) { HashMap < String , String > params = new HashMap < > ( ) ; for ( String namedGroup : namedGroups ) { params . put ( namedGroup , urlDecode ( matcher . group ( namedGroup ) ) ) ; } return new PathMatch ( true , params , matcher ) ; } else { return new PathMatch ( false , Collections . emptyMap ( ) , matcher ) ; } } | Matches the given raw path against this pattern . |
5,806 | public CORSConfigBuilder withAllowedOrigins ( Collection < String > allowedOrigins ) { if ( allowedOrigins != null ) { for ( String allowedOrigin : allowedOrigins ) { if ( ! allowedOrigin . startsWith ( "http://" ) && ! allowedOrigin . startsWith ( "https://" ) ) { throw new IllegalArgumentException ( allowedOrigin + " is invalid: origins much have an http:// or https:// prefix" ) ; } if ( allowedOrigin . lastIndexOf ( '/' ) > 8 ) { throw new IllegalArgumentException ( allowedOrigin + " is invalid: origins should not have any paths. Example origin: https://example.org" ) ; } } } this . allowedOrigins = allowedOrigins ; return this ; } | The origin values that CORS requests are allowed for or null to allow all origins . |
5,807 | public CookieBuilder withName ( String name ) { Mutils . notNull ( "name" , name ) ; boolean matches = name . matches ( "^[0-9A-Za-z!#$%&'*+\\-.^_`|~]+$" ) ; if ( ! matches ) { throw new IllegalArgumentException ( "A cookie name can only be alphanumeric ASCII characters or any of \"!#$%&'*+-.^_`|~\" (excluding quotes)" ) ; } this . name = name ; return this ; } | Sets the name of the cookie . |
5,808 | public static String htmlEscape ( String str , boolean avoidDoubleEscape ) { if ( str == null || str . length ( ) == 0 ) { return str ; } StringBuilder sb = new StringBuilder ( str . length ( ) + 16 ) ; for ( int i = 0 ; i < str . length ( ) ; i ++ ) { char c = str . charAt ( i ) ; switch ( c ) { case '&' : if ( avoidDoubleEscape && ( isHtmlCharEntityRef ( str , i ) ) ) { sb . append ( c ) ; } else { sb . append ( "&" ) ; } break ; case '<' : sb . append ( "<" ) ; break ; case '>' : sb . append ( ">" ) ; break ; case '"' : sb . append ( """ ) ; break ; case '\'' : sb . append ( "'" ) ; break ; case '/' : sb . append ( "/" ) ; break ; default : sb . append ( c ) ; } } return sb . toString ( ) ; } | Escapes a string for use in an HTML entity or HTML attribute . |
5,809 | public ResourceHandler build ( ) { if ( resourceProviderFactory == null ) { throw new IllegalStateException ( "No resourceProviderFactory has been set" ) ; } return new ResourceHandler ( resourceProviderFactory , pathToServeFrom , defaultFile , extensionToResourceType ) ; } | Creates the handler |
5,810 | public RestHandlerBuilder addResource ( Object ... resources ) { Mutils . notNull ( "resources" , resources ) ; this . resources . addAll ( asList ( resources ) ) ; return this ; } | Adds one or more rest resources to this handler |
5,811 | public void scenarioNotAllowed ( Scenario scenario , String filter ) { TestState testState = this . testState . get ( ) ; notifier . fireTestIgnored ( testState . currentStep ) ; notifier . fireTestIgnored ( testState . currentScenario ) ; } | Notify the IDE that the current step and scenario is not being executed . Reason is a JBehave meta tag is filtering out this scenario . |
5,812 | private static float computeFocus ( float viewSize , float drawableSize , float currentTranslation , float focusCoordinate ) { if ( currentTranslation > 0 && focusCoordinate < currentTranslation ) { return currentTranslation ; } else if ( currentTranslation < viewSize - drawableSize && focusCoordinate > currentTranslation + drawableSize ) { return drawableSize + currentTranslation ; } return focusCoordinate ; } | If our focal point is outside the image we will project it to our image bounds |
5,813 | public static VariantAnnotation parse ( String text ) { if ( text != null ) { return parse ( Arrays . asList ( text . split ( "\n" ) ) ) ; } return null ; } | Convert a text from VEP int a single Variant Annotation |
5,814 | public static VariantFileMetadata readVariantFileMetadata ( Path path , VariantFileMetadata fileMetadata ) throws IOException { Objects . requireNonNull ( path ) ; try ( InputStream is = FileUtils . newInputStream ( path ) ) { return readVariantFileMetadata ( new VariantVcfHtsjdkReader ( is , fileMetadata . toVariantStudyMetadata ( "" ) ) , fileMetadata ) ; } } | Reads the VariantSource from a Vcf file given a file Path |
5,815 | public static VariantFileMetadata readVariantFileMetadata ( VariantReader reader , VariantFileMetadata metadata ) throws IOException { Objects . requireNonNull ( reader ) ; if ( metadata == null ) { metadata = new VariantFileMetadata ( "" , "" ) ; } try { reader . open ( ) ; reader . pre ( ) ; metadata . setHeader ( reader . getVariantFileMetadata ( ) . getHeader ( ) ) ; metadata . setSampleIds ( reader . getVariantFileMetadata ( ) . getSampleIds ( ) ) ; metadata . setStats ( reader . getVariantFileMetadata ( ) . getStats ( ) ) ; reader . post ( ) ; } finally { reader . close ( ) ; } return metadata ; } | Reads the VariantSource from a Variant file given an initialized VariantReader |
5,816 | public static List < String > getSampleNames ( VariantStudyMetadata variantStudyMetadata ) { if ( variantStudyMetadata == null ) { return null ; } List < String > sampleNames = new ArrayList < > ( ) ; if ( variantStudyMetadata . getIndividuals ( ) != null ) { for ( org . opencb . biodata . models . metadata . Individual individual : variantStudyMetadata . getIndividuals ( ) ) { for ( Sample sample : individual . getSamples ( ) ) { if ( ! StringUtils . isEmpty ( sample . getId ( ) ) ) { sampleNames . add ( sample . getId ( ) ) ; } } } } return sampleNames ; } | Get sample names form a variant study metadata . |
5,817 | public List < MeasureTraitType . CustomAssertionScore > getCustomAssertionScore ( ) { if ( customAssertionScore == null ) { customAssertionScore = new ArrayList < MeasureTraitType . CustomAssertionScore > ( ) ; } return this . customAssertionScore ; } | Gets the value of the customAssertionScore property . |
5,818 | public List < MeasureSetType . Measure > getMeasure ( ) { if ( measure == null ) { measure = new ArrayList < MeasureSetType . Measure > ( ) ; } return this . measure ; } | Gets the value of the measure property . |
5,819 | public List < SampleType . Age > getAge ( ) { if ( age == null ) { age = new ArrayList < SampleType . Age > ( ) ; } return this . age ; } | Gets the value of the age property . |
5,820 | public static Map < Variant , List < String > > alldeNovoVariants ( Pedigree pedigree , Iterator < Variant > variantIterator ) { PedigreeManager pedigreeManager = new PedigreeManager ( pedigree ) ; List < Member > allChildren = pedigreeManager . getWithoutChildren ( ) ; Map < Variant , List < String > > retDenovoVariants = new HashMap < > ( ) ; while ( variantIterator . hasNext ( ) ) { Variant variant = variantIterator . next ( ) ; List < String > individualIds = new ArrayList < > ( ) ; Queue < String > queue = new LinkedList < > ( ) ; queue . addAll ( allChildren . stream ( ) . map ( Member :: getId ) . collect ( Collectors . toList ( ) ) ) ; while ( ! queue . isEmpty ( ) ) { String individualId = queue . remove ( ) ; Member childMember = pedigreeManager . getIndividualMap ( ) . get ( individualId ) ; if ( isDeNovoVariant ( childMember , variant ) ) { individualIds . add ( individualId ) ; } if ( childMember . getFather ( ) != null ) { if ( ! queue . contains ( childMember . getFather ( ) . getId ( ) ) ) { queue . add ( childMember . getFather ( ) . getId ( ) ) ; } } if ( childMember . getMother ( ) != null ) { if ( ! queue . contains ( childMember . getMother ( ) . getId ( ) ) ) { queue . add ( childMember . getMother ( ) . getId ( ) ) ; } } } if ( ! individualIds . isEmpty ( ) ) { retDenovoVariants . put ( variant , individualIds ) ; } } return retDenovoVariants ; } | Get all the de novo variants identified . |
5,821 | public static List < Variant > deNovoVariants ( Member member , Iterator < Variant > variantIterator ) { List < Variant > variantList = new ArrayList < > ( ) ; while ( variantIterator . hasNext ( ) ) { Variant variant = variantIterator . next ( ) ; if ( isDeNovoVariant ( member , variant ) ) { variantList . add ( variant ) ; } } return variantList ; } | Get all the de novo variants identified for the proband . |
5,822 | private static void validateGenotypes ( Map < String , Set < Integer > > gt , PedigreeManager pedigreeManager ) { List < Member > withoutChildren = pedigreeManager . getWithoutChildren ( ) ; Queue < String > queue = new LinkedList < > ( ) ; for ( Member member : withoutChildren ) { queue . add ( member . getId ( ) ) ; } while ( ! queue . isEmpty ( ) ) { String individualId = queue . remove ( ) ; Member member = pedigreeManager . getIndividualMap ( ) . get ( individualId ) ; processIndividual ( member , gt ) ; if ( member . getFather ( ) != null ) { if ( ! queue . contains ( member . getFather ( ) . getId ( ) ) ) { queue . add ( member . getFather ( ) . getId ( ) ) ; } } if ( member . getMother ( ) != null ) { if ( ! queue . contains ( member . getMother ( ) . getId ( ) ) ) { queue . add ( member . getMother ( ) . getId ( ) ) ; } } } } | Validate and removes and genotypes that does not make sense given the parent - child relation . This method should only be called under dominant recessive and x - linked modes of inheritance . It does not support y - linked modes where the mother does not have a possible genotype . |
5,823 | public List < IsoformType . Name > getName ( ) { if ( name == null ) { name = new ArrayList < IsoformType . Name > ( ) ; } return this . name ; } | Gets the value of the name property . |
5,824 | private void obtainQualityMarks ( ) { int total = 0 ; for ( int i = 0 ; i < this . qualityScoresArray . length ; i ++ ) { total += this . qualityScoresArray [ i ] ; this . maximumQuality = Math . max ( this . qualityScoresArray [ i ] , this . maximumQuality ) ; this . minimumQuality = Math . min ( this . qualityScoresArray [ i ] , this . minimumQuality ) ; } this . averageQuality = ( double ) total / this . quality . length ( ) ; } | this method obtains the minimum maximum and average quality values from the quality scores array |
5,825 | private void obtainQualityScoresFromQualityString ( ) { int total = 0 ; this . maximumQuality = Integer . MIN_VALUE ; this . minimumQuality = Integer . MAX_VALUE ; qualityScoresArray = new int [ this . quality . length ( ) ] ; for ( int i = 0 ; i < this . quality . length ( ) ; i ++ ) { char c = this . quality . charAt ( i ) ; qualityScoresArray [ i ] = c - FastQ . SCALE_OFFSET [ this . encoding ] ; total += this . qualityScoresArray [ i ] ; this . maximumQuality = Math . max ( this . qualityScoresArray [ i ] , this . maximumQuality ) ; this . minimumQuality = Math . min ( this . qualityScoresArray [ i ] , this . minimumQuality ) ; } this . averageQuality = ( double ) total / this . quality . length ( ) ; } | This method obtain the quality scores array corresponding to the quality char sequence depending on the sequence s encoding and calculate sequence s average quality and maximum and minimum individual quality scores |
5,826 | public void changeEncoding ( int newEncoding ) { if ( this . encoding != newEncoding ) { int oldEncoding = this . encoding ; this . transformQualityScoresArray ( oldEncoding , newEncoding ) ; this . obtainQualityStringFromQualityScoresArray ( newEncoding ) ; this . encoding = newEncoding ; } } | Change the encoding of the sequence and recalculates the quality scores array |
5,827 | private void transformQualityScoresArray ( int oldEncoding , int newEncoding ) { if ( FastQ . SCALE_SCORE [ oldEncoding ] != FastQ . SCALE_SCORE [ newEncoding ] ) { Map < Integer , Integer > scoreMap ; if ( FastQ . SCALE_SCORE [ oldEncoding ] == FastQ . PHRED_SCORE_TYPE ) { scoreMap = FastQ . phredToSolexaMap ; } else { scoreMap = FastQ . solexaToPhredMap ; } for ( int i = 0 ; i < this . qualityScoresArray . length ; i ++ ) { if ( qualityScoresArray [ i ] < 10 ) { qualityScoresArray [ i ] = scoreMap . get ( qualityScoresArray [ i ] ) ; } } } } | Transform the quality scores array if the score types of the encodings are different |
5,828 | private void obtainQualityStringFromQualityScoresArray ( int encoding ) { char [ ] qualityChars = new char [ this . qualityScoresArray . length ] ; for ( int i = 0 ; i < this . qualityScoresArray . length ; i ++ ) { qualityChars [ i ] = ( char ) ( this . qualityScoresArray [ i ] + FastQ . SCALE_OFFSET [ encoding ] ) ; } this . quality = new String ( qualityChars ) ; } | Obtain the quality string in the indicated quality encoding from the quality scores array |
5,829 | public void trimSequenceTail ( int maxSize ) { this . setSeq ( this . sequence . substring ( 0 , maxSize ) ) ; this . setQuality ( this . quality . substring ( 0 , maxSize ) ) ; } | Trim the sequence s tail if it is longer than a determined size |
5,830 | public void rTrim ( int n ) { super . rTrim ( n ) ; this . setQuality ( this . quality . substring ( 0 , this . quality . length ( ) - n ) ) ; } | Trim the sequence removing the last n characters |
5,831 | public float getSequenceTailAverageQuality ( int numElements ) { float quality = - 1 ; if ( this . size ( ) >= numElements ) { int totalTailQuality = 0 ; for ( int i = 1 ; i <= numElements ; i ++ ) { totalTailQuality += this . qualityScoresArray [ this . size ( ) - i ] ; } quality = totalTailQuality / numElements ; } return quality ; } | Returns the average quality of the last elements of the sequence |
5,832 | public static boolean validQualityEncoding ( int encoding ) { return ( encoding == FastQ . SANGER_ENCODING || encoding == FastQ . SOLEXA_ENCODING || encoding == FastQ . ILLUMINA_ENCODING ) ; } | Check if the given quality encoding type is valid |
5,833 | static VcfSliceProtos . Fields buildDefaultFields ( List < Variant > variants ) { return buildDefaultFields ( variants , null , null ) ; } | With test visibility |
5,834 | public static Path index ( Path wigPath ) throws Exception { Path dbPath = wigPath . getParent ( ) . resolve ( WIG_DB ) ; ChunkFrequencyManager chunkFrequencyManager = new ChunkFrequencyManager ( dbPath ) ; int chunkSize = chunkFrequencyManager . getChunkSize ( ) ; String chromosome = null ; int step , span = 1 , start = 1 , end ; int startChunk , endChunk , partial ; boolean empty = true ; List < Integer > values = new ArrayList < > ( ) ; BufferedReader bufferedReader = FileUtils . newBufferedReader ( wigPath ) ; String line = bufferedReader . readLine ( ) ; while ( line != null ) { if ( WigUtils . isHeaderLine ( line ) ) { if ( ! empty ) { System . out . println ( "\tStoring " + values . size ( ) + " values for " + chromosome ) ; computeAndSaveMeanValues ( values , wigPath , chromosome , chunkSize , chunkFrequencyManager ) ; } System . out . println ( "Loading wig data:" + line ) ; if ( WigUtils . isVariableStep ( line ) ) { throw new UnsupportedOperationException ( "Wig coverage file with 'variableStep'" + " is not supported yet." ) ; } step = WigUtils . getStep ( line ) ; span = WigUtils . getSpan ( line ) ; start = WigUtils . getStart ( line ) ; chromosome = WigUtils . getChromosome ( line ) ; empty = true ; values = new ArrayList < > ( ) ; if ( start <= 0 ) { throw new UnsupportedOperationException ( "Wig coverage file with" + " 'start' <= 0, it must be greater than 0." ) ; } if ( start != 1 ) { for ( int i = 0 ; i < start ; i ++ ) { values . add ( 0 ) ; } } if ( step != 1 ) { throw new UnsupportedOperationException ( "Wig coverage file with" + " 'step' != 1 is not supported yet." ) ; } line = bufferedReader . readLine ( ) ; } else { if ( values != null ) { end = start + span - 1 ; startChunk = start / chunkSize ; endChunk = end / chunkSize ; for ( int chunk = startChunk , pos = startChunk * chunkSize ; chunk <= endChunk ; chunk ++ , pos += chunkSize ) { partial = Math . min ( end , pos + chunkSize ) - Math . max ( start , pos ) ; values . add ( partial * Integer . parseInt ( line ) ) ; empty = false ; } start += span ; } line = bufferedReader . readLine ( ) ; } } if ( ! empty ) { System . out . println ( "\tStoring " + values . size ( ) + " values for " + chromosome ) ; computeAndSaveMeanValues ( values , wigPath , chromosome , chunkSize , chunkFrequencyManager ) ; } return dbPath ; } | Index the entire Wig file content in a SQLite database managed by the ChunkFrequencyManager . |
5,835 | public static String getChromosome ( String headerLine ) throws InvalidObjectException { String chromosome = getHeaderInfo ( "chrom" , headerLine ) ; if ( chromosome == null ) { throw new InvalidObjectException ( "WigFile format, it could not find 'chrom' in the header line" ) ; } return chromosome ; } | Extract the chromosome value from the given Wig header line . |
5,836 | public static int getStart ( String headerLine ) throws InvalidObjectException { String str = getHeaderInfo ( "start" , headerLine ) ; if ( str == null ) { throw new InvalidObjectException ( "WigFile format, it could not find 'start' in the header line" ) ; } return Integer . parseInt ( str ) ; } | Extract the start value from the given Wig header line . |
5,837 | private static String getHeaderInfo ( String name , String headerLine ) { String [ ] fields = headerLine . split ( "[\t ]" ) ; for ( String field : fields ) { if ( field . startsWith ( name + "=" ) ) { String [ ] subfields = field . split ( "=" ) ; return subfields [ 1 ] ; } } return null ; } | Get information from a Wig header line . |
5,838 | public List < ProteinType . AlternativeName > getAlternativeName ( ) { if ( alternativeName == null ) { alternativeName = new ArrayList < ProteinType . AlternativeName > ( ) ; } return this . alternativeName ; } | Gets the value of the alternativeName property . |
5,839 | public List < ProteinType . SubmittedName > getSubmittedName ( ) { if ( submittedName == null ) { submittedName = new ArrayList < ProteinType . SubmittedName > ( ) ; } return this . submittedName ; } | Gets the value of the submittedName property . |
5,840 | public List < ProteinType . Domain > getDomain ( ) { if ( domain == null ) { domain = new ArrayList < ProteinType . Domain > ( ) ; } return this . domain ; } | Gets the value of the domain property . |
5,841 | public List < ProteinType . Component > getComponent ( ) { if ( component == null ) { component = new ArrayList < ProteinType . Component > ( ) ; } return this . component ; } | Gets the value of the component property . |
5,842 | public List < JAXBElement < String > > getProteinNameAndGeneSymbolAndUniprotId ( ) { if ( proteinNameAndGeneSymbolAndUniprotId == null ) { proteinNameAndGeneSymbolAndUniprotId = new ArrayList < JAXBElement < String > > ( ) ; } return this . proteinNameAndGeneSymbolAndUniprotId ; } | Gets the value of the proteinNameAndGeneSymbolAndUniprotId property . |
5,843 | private void checkQualitySize ( String id , String sequence , String quality ) throws FileFormatException { if ( sequence . length ( ) != quality . length ( ) ) { throw new FileFormatException ( "Quality and Sequence lenghts are different in Fasta " + id ) ; } } | Check that the sequence and quality strings have the same length |
5,844 | private void parseStructuralVariationAttributes ( String key , String value ) { if ( key == null || value == null ) { return ; } switch ( key ) { case SVINSSEQ_INFO : if ( VariantType . INSERTION . equals ( type ) ) { if ( alternates . size ( ) > 1 ) { throw new IllegalArgumentException ( "Found SVINSSEQ in a multi allelic variant!" ) ; } else { setCall ( start + ":" + reference + ":" + alternates . get ( 0 ) + ":" + 0 ) ; setAlternate ( reference + value ) ; } } break ; case LEFT_SVINSSEQ_INFO : if ( VariantType . INSERTION . equals ( type ) ) { sv . setLeftSvInsSeq ( value ) ; } break ; case RIGHT_SVINSSEQ_INFO : if ( VariantType . INSERTION . equals ( type ) ) { sv . setRightSvInsSeq ( value ) ; } break ; case CIPOS_INFO : String [ ] parts = value . split ( "," ) ; sv . setCiStartLeft ( start + Integer . parseInt ( parts [ 0 ] ) ) ; sv . setCiStartRight ( start + Integer . parseInt ( parts [ 1 ] ) ) ; break ; case CIEND_INFO : parts = value . split ( "," ) ; sv . setCiEndLeft ( end + Integer . parseInt ( parts [ 0 ] ) ) ; sv . setCiEndRight ( end + Integer . parseInt ( parts [ 1 ] ) ) ; break ; } } | Be aware! this method may change the main alternate |
5,845 | static String getQuality ( float quality ) { quality -= 1 ; if ( quality == - 1 ) { return null ; } else { String q = Float . toString ( quality ) ; if ( q . endsWith ( ".0" ) ) { return q . substring ( 0 , q . lastIndexOf ( "." ) ) ; } else { return q ; } } } | Decodes the Quality float value . |
5,846 | public List < ObservationSet . Method > getMethod ( ) { if ( method == null ) { method = new ArrayList < ObservationSet . Method > ( ) ; } return this . method ; } | Gets the value of the method property . |
5,847 | public List < ObservationSet . ObservedData > getObservedData ( ) { if ( observedData == null ) { observedData = new ArrayList < ObservationSet . ObservedData > ( ) ; } return this . observedData ; } | Gets the value of the observedData property . |
5,848 | static boolean isValidBase ( char base , boolean acceptAmbiguous ) { boolean isValidBase = PRECISE_BASES . contains ( base ) ; if ( ! isValidBase && acceptAmbiguous ) { isValidBase = N . equals ( base ) || AMBIGUOUS_BASES . contains ( base ) ; } return isValidBase ; } | Only accepts as a valid base A C G and T or IUPAC ambiguous if enabled |
5,849 | private boolean isAlleleCorrect ( String allele , boolean acceptAmbiguousBases ) { if ( StringUtils . isNotEmpty ( allele ) ) { for ( char base : allele . toCharArray ( ) ) { if ( ! isValidBase ( base , acceptAmbiguousBases ) ) { return false ; } } } return true ; } | Checks if all bases in the allele are valid bases . |
5,850 | public static void completeDifferencesFromReference ( Alignment alignment , String referenceSequence , long referenceSequenceStart ) throws ShortReferenceSequenceException { int offset = ( int ) ( alignment . getUnclippedStart ( ) - referenceSequenceStart ) ; String subRef ; String subRead ; if ( ( alignment . getFlags ( ) & Alignment . SEGMENT_UNMAPPED ) != 0 ) { return ; } List < Alignment . AlignmentDifference > newDifferences = new LinkedList < > ( ) ; for ( Alignment . AlignmentDifference alignmentDifference : alignment . getDifferences ( ) ) { Alignment . AlignmentDifference currentDifference = null ; switch ( alignmentDifference . getOp ( ) ) { case Alignment . AlignmentDifference . DELETION : try { if ( ! alignmentDifference . isAllSequenceStored ( ) ) { subRef = referenceSequence . substring ( alignmentDifference . getPos ( ) + offset , alignmentDifference . getPos ( ) + offset + alignmentDifference . getLength ( ) ) ; alignmentDifference . setSeq ( subRef ) ; } } catch ( StringIndexOutOfBoundsException e ) { throw new ShortReferenceSequenceException ( "ReferenceSequence Out of Bounds in Alignment.completeDifferences()" ) ; } currentDifference = alignmentDifference ; break ; case Alignment . AlignmentDifference . MATCH_MISMATCH : case Alignment . AlignmentDifference . MISMATCH : try { subRef = referenceSequence . substring ( alignmentDifference . getPos ( ) + offset , alignmentDifference . getPos ( ) + offset + alignmentDifference . getLength ( ) ) ; } catch ( StringIndexOutOfBoundsException e ) { throw new ShortReferenceSequenceException ( "ReferenceSequence Out of Bounds in Alignment.completeDifferences()" ) ; } subRead = alignmentDifference . getSeq ( ) ; newDifferences . addAll ( getMismatchDiff ( subRef , subRead , alignmentDifference . getPos ( ) ) ) ; break ; case Alignment . AlignmentDifference . HARD_CLIPPING : currentDifference = alignmentDifference ; break ; case Alignment . AlignmentDifference . SOFT_CLIPPING : currentDifference = alignmentDifference ; try { if ( alignmentDifference . isAllSequenceStored ( ) ) { subRef = referenceSequence . substring ( alignmentDifference . getPos ( ) + offset , alignmentDifference . getPos ( ) + offset + alignmentDifference . getLength ( ) ) ; if ( subRef . equals ( alignmentDifference . getSeq ( ) ) ) { currentDifference . setSeq ( null ) ; } } } catch ( StringIndexOutOfBoundsException e ) { } break ; case Alignment . AlignmentDifference . INSERTION : case Alignment . AlignmentDifference . PADDING : case Alignment . AlignmentDifference . SKIPPED_REGION : currentDifference = alignmentDifference ; break ; } if ( currentDifference != null ) { newDifferences . add ( currentDifference ) ; } } alignment . setDifferences ( newDifferences ) ; } | Compares all differences with the referenceSequence in order to reduce the stored sequence . Also adds sequence for deletion differences . |
5,851 | public boolean write ( Variant variant ) { StudyEntry studyEntry = variant . getStudy ( study ) ; if ( studyEntry == null ) { return true ; } dataOutputStream . print ( variant . getChromosome ( ) ) ; dataOutputStream . print ( TAB ) ; dataOutputStream . print ( variant . getStart ( ) ) ; dataOutputStream . print ( TAB ) ; if ( variant . getReference ( ) . isEmpty ( ) ) { dataOutputStream . print ( MISSING_ALLELE ) ; } else { dataOutputStream . print ( variant . getReference ( ) ) ; } dataOutputStream . print ( TAB ) ; if ( variant . getAlternate ( ) . isEmpty ( ) ) { dataOutputStream . print ( MISSING_ALLELE ) ; } else { dataOutputStream . print ( variant . getAlternate ( ) ) ; } dataOutputStream . print ( TAB ) ; for ( Iterator < String > cohortIterator = cohorts . iterator ( ) ; cohortIterator . hasNext ( ) ; ) { String cohort = cohortIterator . next ( ) ; VariantStats stats = studyEntry . getStats ( cohort ) ; if ( stats == null ) { dataOutputStream . print ( ".\t.\t.\t.\t." ) ; } else { int an = stats . getAlleleCount ( ) ; Integer ac = stats . getAltAlleleCount ( ) ; Float af = stats . getAltAlleleFreq ( ) ; if ( an >= 0 ) { dataOutputStream . print ( an ) ; } else { dataOutputStream . print ( MISSING_NUMBER ) ; } dataOutputStream . print ( TAB ) ; if ( ac >= 0 ) { dataOutputStream . print ( ac ) ; } else { dataOutputStream . print ( MISSING_NUMBER ) ; } dataOutputStream . print ( TAB ) ; if ( af >= 0 ) { dataOutputStream . print ( DECIMAL_FORMAT . format ( af ) ) ; } else { dataOutputStream . print ( MISSING_NUMBER ) ; } dataOutputStream . print ( TAB ) ; if ( stats . getGenotypeFreq ( ) != null && ! stats . getGenotypeFreq ( ) . isEmpty ( ) ) { PopulationFrequency frequency = converter . convert ( "" , "" , stats , "" , "" ) ; dataOutputStream . print ( frequency . getHetGenotypeFreq ( ) + TAB + frequency . getAltHomGenotypeFreq ( ) ) ; } else { dataOutputStream . print ( ".\t." ) ; } } if ( cohortIterator . hasNext ( ) ) { dataOutputStream . print ( TAB ) ; } else { dataOutputStream . print ( "\n" ) ; } } writtenVariants ++ ; return true ; } | Exports a variant . |
5,852 | public void insert ( Path filePath , String chromosome , List < Integer > values ) { Connection conn = null ; try { Class . forName ( "org.sqlite.JDBC" ) ; conn = DriverManager . getConnection ( "jdbc:sqlite:" + databasePath ) ; int fileId = getFileId ( filePath , conn ) ; if ( fileId == - 1 ) { throw new InternalError ( "Impossible to insert file '" + filePath + "' into the database '" + databasePath + "'" ) ; } if ( ! chunkIdMap . containsKey ( buildChunkMapKey ( chromosome , 1 ) ) ) { updateChromosomeChunks ( chromosome , values . size ( ) , conn ) ; } resetCounters ( ) ; int chunk64k = 0 ; PreparedStatement insertCoverage = conn . prepareStatement ( "insert into mean_coverage (chunk_id, " + " file_id, v1, v2, v3, v4, v5, v6, v7, v8) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" ) ; conn . setAutoCommit ( false ) ; for ( int v : values ) { chunkValues [ counter1 ] = ( byte ) Math . min ( v , 255 ) ; if ( ++ counter1 == 8 ) { packedChunkValues [ counter2 ] = bytesToLong ( chunkValues ) ; if ( ++ counter2 == 8 ) { insertPackedChunkValues ( insertCoverage , getChunkId ( chromosome , chunk64k ) , fileId , packedChunkValues ) ; chunk64k ++ ; Arrays . fill ( packedChunkValues , 0 ) ; counter2 = 0 ; } counter1 = 0 ; Arrays . fill ( chunkValues , ( byte ) 0 ) ; } } if ( counter1 > 0 || counter2 > 0 ) { packedChunkValues [ counter2 ] = bytesToLong ( chunkValues ) ; insertPackedChunkValues ( insertCoverage , getChunkId ( chromosome , chunk64k ) , fileId , packedChunkValues ) ; } insertCoverage . executeBatch ( ) ; conn . commit ( ) ; conn . setAutoCommit ( true ) ; conn . close ( ) ; } catch ( SQLException e ) { logger . error ( e . getClass ( ) . getName ( ) + ": " + e . getMessage ( ) ) ; cleanConnectionClose ( conn , null ) ; } catch ( ClassNotFoundException e ) { logger . error ( e . getClass ( ) . getName ( ) + ": " + e . getMessage ( ) ) ; e . printStackTrace ( ) ; } } | Insert values for the given chromosome a value per chunk . |
5,853 | public ChunkFrequency query ( Region region , Path filePath , int windowSize ) { return query ( region , filePath , windowSize , mean ( ) ) ; } | Query values for the given region and file . Values are grouped according to the windowSize and then a mean value is computed . |
5,854 | public BiFunction < Integer , Integer , Short > mean ( ) { return ( a , b ) -> ( short ) Math . min ( Math . round ( 1.0f * a / b ) , 255 ) ; } | Aggregation function to compute the mean for the chunk values . |
5,855 | public BiFunction < Integer , Integer , Short > addition ( ) { return ( a , b ) -> ( short ) Math . min ( a , 255 ) ; } | Aggregation function to compute the addition for the chunk values . |
5,856 | private void init ( ) { if ( databasePath . toFile ( ) . exists ( ) ) { initChunkMap ( ) ; int chunkSize = readChunkSize ( ) ; assert ( this . chunkSize == chunkSize ) ; if ( chunkSize == - 1 ) { throw new InternalError ( "Impossible to read chunk size from the database '" + databasePath + "'" ) ; } this . chunkSize = chunkSize ; logger . debug ( "Database was initialized previously. Nothing to do." ) ; return ; } Statement stmt = null ; Connection connection = null ; try { Class . forName ( "org.sqlite.JDBC" ) ; connection = DriverManager . getConnection ( "jdbc:sqlite:" + databasePath ) ; stmt = connection . createStatement ( ) ; String sql = "CREATE TABLE chunk " + "(id INTEGER PRIMARY KEY AUTOINCREMENT," + "chunk_id VARCHAR NOT NULL," + "chromosome VARCHAR NOT NULL, " + "start INTEGER NOT NULL, " + "end INTEGER NOT NULL); " + "CREATE UNIQUE INDEX chunk_id_idx ON chunk (chunk_id);" + "CREATE INDEX chrom_start_end_idx ON chunk (chromosome, start, end);" ; stmt . executeUpdate ( sql ) ; sql = "CREATE TABLE info " + "(id INTEGER PRIMARY KEY AUTOINCREMENT," + "name VARCHAR NOT NULL," + "value VARCHAR NOT NULL);" + "CREATE UNIQUE INDEX name_idx ON info (name);" ; stmt . executeUpdate ( sql ) ; sql = "insert into info (name, value) values ('chunkSize', '" + chunkSize + "');" ; stmt . executeUpdate ( sql ) ; sql = "CREATE TABLE file " + "(id INTEGER PRIMARY KEY AUTOINCREMENT," + "path VARCHAR NOT NULL," + "name VARCHAR NOT NULL);" + "CREATE UNIQUE INDEX path_idx ON file (path);" ; stmt . executeUpdate ( sql ) ; sql = "CREATE TABLE mean_coverage " + "(chunk_id INTEGER," + "file_id INTEGER," + "v1 INTEGER, " + "v2 INTEGER, " + "v3 INTEGER, " + "v4 INTEGER, " + "v5 INTEGER, " + "v6 INTEGER, " + "v7 INTEGER, " + "v8 INTEGER," + "PRIMARY KEY(chunk_id, file_id));" ; stmt . executeUpdate ( sql ) ; stmt . close ( ) ; connection . close ( ) ; } catch ( SQLException e ) { logger . error ( e . getClass ( ) . getName ( ) + ": " + e . getMessage ( ) ) ; cleanConnectionClose ( connection , stmt ) ; } catch ( ClassNotFoundException e ) { logger . error ( e . getClass ( ) . getName ( ) + ": " + e . getMessage ( ) ) ; e . printStackTrace ( ) ; } logger . debug ( "Initialized database successfully" ) ; } | Initialize database creating tables . |
5,857 | private void initChunkMap ( ) { Statement stmt = null ; Connection connection = null ; try { Class . forName ( "org.sqlite.JDBC" ) ; connection = DriverManager . getConnection ( "jdbc:sqlite:" + databasePath ) ; String sql = "SELECT id, chromosome, start FROM chunk" ; stmt = connection . createStatement ( ) ; ResultSet rs = stmt . executeQuery ( sql ) ; chunkIdMap = new HashMap < > ( ) ; while ( rs . next ( ) ) { chunkIdMap . put ( rs . getString ( "chromosome" ) + "_" + rs . getInt ( "start" ) , rs . getInt ( "id" ) ) ; } } catch ( SQLException e ) { logger . error ( e . getClass ( ) . getName ( ) + ": " + e . getMessage ( ) ) ; cleanConnectionClose ( connection , stmt ) ; } catch ( ClassNotFoundException e ) { logger . error ( e . getClass ( ) . getName ( ) + ": " + e . getMessage ( ) ) ; e . printStackTrace ( ) ; } } | Initialize the map of chunks . |
5,858 | private void updateChromosomeChunks ( String chromosome , int numChunks , Connection conn ) throws SQLException { String minorChunkSuffix = ( chunkSize / 1000 ) * 64 + "k" ; String sql = "insert into chunk (chunk_id, chromosome, start, end) values (?, ?, ?, ?)" ; PreparedStatement insertChunk = conn . prepareStatement ( sql ) ; conn . setAutoCommit ( false ) ; for ( int i = 0 , j = 1 ; i < numChunks ; i ++ , j += chunkSize64k ) { String chunkId = chromosome + "_" + i + "_" + minorChunkSuffix ; insertChunk . setString ( 1 , chunkId ) ; insertChunk . setString ( 2 , chromosome ) ; insertChunk . setInt ( 3 , j ) ; insertChunk . setInt ( 4 , j + chunkSize64k - 1 ) ; insertChunk . addBatch ( ) ; } insertChunk . executeBatch ( ) ; conn . commit ( ) ; conn . setAutoCommit ( true ) ; initChunkMap ( ) ; } | Insert all the chunks for the given chromosome and update the internal map of chunks if necessary . |
5,859 | private int readChunkSize ( ) { int chunkSize = - 1 ; Connection conn = null ; try { Class . forName ( "org.sqlite.JDBC" ) ; conn = DriverManager . getConnection ( "jdbc:sqlite:" + databasePath ) ; Statement stmt = conn . createStatement ( ) ; ResultSet rs = stmt . executeQuery ( "SELECT value FROM info where name = 'chunkSize';" ) ; while ( rs . next ( ) ) { chunkSize = Integer . parseInt ( rs . getString ( "value" ) ) ; break ; } conn . close ( ) ; } catch ( SQLException e ) { logger . error ( e . getClass ( ) . getName ( ) + ": " + e . getMessage ( ) ) ; cleanConnectionClose ( conn , null ) ; } catch ( ClassNotFoundException e ) { logger . error ( e . getClass ( ) . getName ( ) + ": " + e . getMessage ( ) ) ; e . printStackTrace ( ) ; } return chunkSize ; } | Read chunk size from the database . |
5,860 | private void cleanConnectionClose ( Connection connection , Statement stmt ) { try { if ( connection != null ) { connection . rollback ( ) ; } if ( stmt != null ) { stmt . close ( ) ; } if ( connection != null ) { connection . close ( ) ; } } catch ( SQLException e1 ) { e1 . printStackTrace ( ) ; } } | Close connection and sql statement in a clean way . |
5,861 | private int getFileId ( Path filePath , Connection conn ) throws SQLException { int fileId = readFileId ( filePath , conn ) ; if ( fileId == - 1 ) { Statement stmt = conn . createStatement ( ) ; String insertFileSql = "insert into file (path, name) values ('" + filePath . getParent ( ) + "', '" + filePath . getFileName ( ) + "');" ; stmt . executeUpdate ( insertFileSql ) ; stmt . close ( ) ; fileId = readFileId ( filePath , conn ) ; if ( fileId == - 1 ) { throw new InternalError ( "Impossible to read the ID for the file " + filePath + " in database " + databasePath ) ; } stmt . close ( ) ; } return fileId ; } | Read file ID from the database . If it does not exits then insert it to the database and return the file ID . |
5,862 | private int readFileId ( Path filePath , Connection conn ) { int fileId = - 1 ; try { Statement stmt = conn . createStatement ( ) ; ResultSet rs = stmt . executeQuery ( "SELECT id FROM file where path = '" + filePath . getParent ( ) + "';" ) ; while ( rs . next ( ) ) { fileId = rs . getInt ( "id" ) ; } stmt . close ( ) ; } catch ( SQLException e ) { e . printStackTrace ( ) ; } return fileId ; } | Read file ID from the database . |
5,863 | private void insertPackedChunkValues ( PreparedStatement insertPStatement , int chunkId , int fileId , long [ ] packedValues ) throws SQLException { assert ( chunkId != - 1 ) ; insertPStatement . setInt ( 1 , chunkId ) ; insertPStatement . setInt ( 2 , fileId ) ; for ( int i = 0 ; i < 8 ; i ++ ) { insertPStatement . setLong ( i + 3 , packedValues [ i ] ) ; } insertPStatement . addBatch ( ) ; } | Insert packed values into the database using a prepared statement batch for efficiency . |
5,864 | private int getChunkId ( String chromosome , int chunk64k ) { int chunkId , pos = chunk64k * chunkSize64k + 1 ; String key = buildChunkMapKey ( chromosome , pos ) ; if ( chunkIdMap . containsKey ( key ) ) { chunkId = chunkIdMap . get ( key ) ; } else { throw new InternalError ( "Coverage chunk " + chromosome + ":" + pos + "-, not found in database (64-chunk : " + chunk64k + ")" ) ; } return chunkId ; } | Get the chunk ID from the map of chunks . |
5,865 | public Path createIndex ( ) throws IOException { Path indexPath = bamFile . getParent ( ) . resolve ( bamFile . getFileName ( ) . toString ( ) + ".bai" ) ; return createIndex ( indexPath ) ; } | Creates a index file for the BAM or CRAM input file . |
5,866 | public RegionCoverage coverage ( Region region , int windowSize ) throws IOException , AlignmentCoverageException { if ( Paths . get ( bamFile . toString ( ) + ".bw" ) . toFile ( ) . exists ( ) ) { return BamUtils . getCoverageFromBigWig ( region , windowSize , Paths . get ( bamFile . toString ( ) + ".bw" ) ) ; } else { if ( Paths . get ( bamFile . toString ( ) + COVERAGE_BIGWIG_EXTENSION ) . toFile ( ) . exists ( ) ) { return BamUtils . getCoverageFromBigWig ( region , windowSize , Paths . get ( this . bamFile . toString ( ) + COVERAGE_BIGWIG_EXTENSION ) ) ; } else { if ( windowSize == 1 ) { return coverage ( region , null , new AlignmentOptions ( ) ) ; } else { throw new AlignmentCoverageException ( "No bigwig file has been found and windowSize is > 1" ) ; } } } } | Return the coverage average given a window size from a BigWig file . This is expected to have the same name that the BAM file with . coverage . bw or . bw suffix . If no BigWig file is found and windowSize is 1 then we calculate te coverage from the BAM file . |
5,867 | public RegionCoverage coverage ( Region region , AlignmentFilters < SAMRecord > filters , AlignmentOptions options ) throws AlignmentCoverageException { if ( region . size ( ) > MAX_REGION_COVERAGE ) { throw new AlignmentCoverageException ( "Region size is bigger than MAX_REGION_COVERAGE [" + MAX_REGION_COVERAGE + "]" ) ; } RegionCoverage regionCoverage = new RegionCoverage ( region ) ; if ( options == null ) { options = new AlignmentOptions ( ) ; } SamRecordRegionCoverageCalculator calculator = new SamRecordRegionCoverageCalculator ( options . getMinBaseQuality ( ) ) ; try ( BamIterator < SAMRecord > iterator = iterator ( region , filters , options ) ) { while ( iterator . hasNext ( ) ) { SAMRecord next = iterator . next ( ) ; if ( ! next . getReadUnmappedFlag ( ) ) { calculator . update ( next , regionCoverage ) ; } } } catch ( Exception e ) { e . printStackTrace ( ) ; } return regionCoverage ; } | This method get some filters and calculate the coverage from the BAM file with the reads filtered . |
5,868 | public List < RegionCoverage > getUncoveredRegions ( Region region , int maxCoverage ) throws IOException , AlignmentCoverageException { List < RegionCoverage > uncoveredRegions = new ArrayList < > ( ) ; RegionCoverage coverageRegion = coverage ( region , 1 ) ; float [ ] coverages = new float [ region . size ( ) ] ; int i = 0 ; int pos = coverageRegion . getStart ( ) ; boolean isProcessing = false ; RegionCoverage uncoveredRegion = null ; for ( float coverage : coverageRegion . getValues ( ) ) { if ( coverage < maxCoverage ) { if ( ! isProcessing ) { uncoveredRegion = new RegionCoverage ( region . getChromosome ( ) , pos , 0 ) ; isProcessing = true ; i = 0 ; } coverages [ i ] = coverage ; i ++ ; } else { if ( isProcessing ) { uncoveredRegion . setEnd ( pos ) ; uncoveredRegion . setValues ( Arrays . copyOf ( coverages , i ) ) ; uncoveredRegions . add ( uncoveredRegion ) ; isProcessing = false ; } } pos ++ ; } if ( isProcessing ) { uncoveredRegion . setEnd ( pos - 1 ) ; uncoveredRegion . setValues ( Arrays . copyOf ( coverages , i ) ) ; uncoveredRegions . add ( uncoveredRegion ) ; } return uncoveredRegions ; } | Return a list of RegionCoverage with a coverage less than o equal to the input maximum coverage . |
5,869 | public List < Variant > create ( VariantStudyMetadata metadata , String line ) throws IllegalArgumentException , NotAVariantException { String [ ] fields = line . split ( "\t" ) ; if ( fields . length < 8 ) { throw new IllegalArgumentException ( "Not enough fields provided (min 8)" ) ; } String chromosome = fields [ 0 ] ; int position = Integer . parseInt ( fields [ 1 ] ) ; String id = fields [ 2 ] . equals ( "." ) ? null : fields [ 2 ] ; List < String > ids = id == null ? Collections . emptyList ( ) : Arrays . asList ( id . split ( ";" ) ) ; String reference = fields [ 3 ] . equals ( "." ) ? "" : fields [ 3 ] ; String alternate = fields [ 4 ] ; String [ ] alternateAlleles = alternate . split ( "," ) ; String mainAlternate = alternateAlleles [ 0 ] ; float quality = fields [ 5 ] . equals ( "." ) ? - 1 : Float . parseFloat ( fields [ 5 ] ) ; String filter = fields [ 6 ] . equals ( "." ) ? "" : fields [ 6 ] ; String info = fields [ 7 ] . equals ( "." ) ? "" : fields [ 7 ] ; String format = ( fields . length <= 8 || fields [ 8 ] . equals ( "." ) ) ? "" : fields [ 8 ] ; int end = position + reference . length ( ) - 1 ; Variant variant = new Variant ( chromosome , position , end , reference , mainAlternate ) ; List < AlternateCoordinate > secondaryAlternatesMap = Arrays . stream ( alternateAlleles , 1 , alternateAlleles . length ) . map ( a -> new AlternateCoordinate ( chromosome , null , null , null , a , null ) ) . collect ( Collectors . toList ( ) ) ; StudyEntry entry = new StudyEntry ( metadata . getId ( ) , secondaryAlternatesMap , Arrays . asList ( format . split ( ":" ) ) ) ; VariantFileMetadata fileMetadata = new VariantFileMetadata ( metadata . getFiles ( ) . get ( 0 ) ) ; entry . setFileId ( fileMetadata . getId ( ) ) ; variant . addStudyEntry ( entry ) ; try { parseSplitSampleData ( entry , fileMetadata , fields , reference , alternateAlleles ) ; setOtherFields ( variant , entry , fileMetadata , ids , quality , filter , info , format , alternateAlleles , line ) ; } catch ( NonStandardCompliantSampleField ex ) { Logger . getLogger ( VariantFactory . class . getName ( ) ) . log ( Level . SEVERE , String . format ( "Variant %s:%d:%s>%s will not be saved\n%s" , chromosome , position , reference , alternate , ex . getMessage ( ) ) ) ; } return Collections . singletonList ( variant ) ; } | Creates a list of Variant objects using the fields in a record of a VCF file . A new Variant object is created per allele so several of them can be created from a single line . |
5,870 | public boolean updateDefaultKeys ( String from , String to ) { if ( null == from ) { return false ; } if ( null == to ) { return false ; } if ( StringUtils . equals ( from , to ) ) { return false ; } String value = this . defaultValues . remove ( from ) ; if ( null == value ) { return false ; } this . defaultValues . put ( to , value ) ; return true ; } | Update a key |
5,871 | public Variant merge ( Variant current , Collection < Variant > load ) { isValidVariant ( current ) ; List < Pair < Variant , List < AlternateCoordinate > > > loadAlts = updateCollapseDeletions ( current , load . stream ( ) . map ( v -> new MutablePair < > ( v , buildAltList ( v ) ) ) . filter ( p -> hasAnyOverlap ( current , p . getLeft ( ) , p . getRight ( ) ) ) ) . collect ( Collectors . toList ( ) ) ; mergeVariants ( current , loadAlts ) ; return current ; } | Merge a collection of variants into one variant . |
5,872 | private List < Integer > alternatesToHash ( List < AlternateCoordinate > alternates ) { List < Integer > list = new ArrayList < > ( alternates . size ( ) ) ; for ( AlternateCoordinate a : alternates ) { int result = 1 ; result = 31 * result + a . getChromosome ( ) . hashCode ( ) ; result = 31 * result + a . getStart ( ) . hashCode ( ) ; result = 31 * result + a . getEnd ( ) . hashCode ( ) ; result = 31 * result + a . getReference ( ) . hashCode ( ) ; result = 31 * result + a . getAlternate ( ) . hashCode ( ) ; result = 31 * result + a . getType ( ) . hashCode ( ) ; list . add ( result ) ; } return list ; } | Create a list of alternates as Integers to speed up the creation of the VariantAlternateRearranger . |
5,873 | private List < Integer > collapseGT ( List < String > gtsStr ) { if ( gtsStr . isEmpty ( ) ) { return Collections . emptyList ( ) ; } if ( gtsStr . size ( ) == 1 ) { return Collections . singletonList ( 0 ) ; } List < Genotype > gts = gtsStr . stream ( ) . map ( Genotype :: new ) . collect ( Collectors . toList ( ) ) ; Predicate < Genotype > findAlts = gt -> Arrays . stream ( gt . getAllelesIdx ( ) ) . anyMatch ( i -> i > 0 ) ; Predicate < Genotype > findHomRef = gt -> Arrays . stream ( gt . getAllelesIdx ( ) ) . allMatch ( i -> i == 0 ) ; Predicate < Genotype > findOneRef = gt -> Arrays . stream ( gt . getAllelesIdx ( ) ) . anyMatch ( i -> i == 0 ) ; Predicate < Genotype > findNoCalls = gt -> Arrays . stream ( gt . getAllelesIdx ( ) ) . anyMatch ( i -> i < 0 ) ; List < Integer > oneAltAllele = getMatchingPositions ( gts , findAlts ) ; if ( ! oneAltAllele . isEmpty ( ) ) { return oneAltAllele ; } List < Integer > reference = getMatchingPositions ( gts , findHomRef ) ; if ( ! reference . isEmpty ( ) ) { return reference ; } List < Integer > oneReferenceAllele = getMatchingPositions ( gts , findOneRef ) ; if ( ! oneReferenceAllele . isEmpty ( ) ) { return oneReferenceAllele ; } List < Integer > nocalls = getMatchingPositions ( gts , findNoCalls ) ; if ( nocalls . size ( ) == gtsStr . size ( ) ) { return Collections . singletonList ( nocalls . get ( 0 ) ) ; } if ( this . collapseDeletions ) { throw new IllegalStateException ( "Not able to resolve GT: " + StringUtils . join ( gtsStr , "," ) ) ; } return IntStream . range ( 0 , gtsStr . size ( ) - 1 ) . boxed ( ) . collect ( Collectors . toList ( ) ) ; } | Collapses a list of GT to a minimal set . |
5,874 | public List < AlternateCoordinate > buildAltList ( Variant variant ) { AlternateCoordinate mainAlternate = getMainAlternate ( variant ) ; List < AlternateCoordinate > alternates = new ArrayList < > ( ) ; boolean emptyRefBlock = mainAlternate . getType ( ) . equals ( VariantType . NO_VARIATION ) && ( mainAlternate . getAlternate ( ) . isEmpty ( ) || mainAlternate . getAlternate ( ) . equals ( Allele . NO_CALL_STRING ) ) ; if ( ! emptyRefBlock ) { alternates . add ( mainAlternate ) ; } StudyEntry se = getStudy ( variant ) ; if ( se . getSecondaryAlternates ( ) != null ) { se . getSecondaryAlternates ( ) . forEach ( alt -> alternates . add ( copyAlt ( variant , alt ) ) ) ; } return alternates ; } | Build a list of all the alternates from a variant . Includes the main and the secondary alternates . |
5,875 | public static AlternateCoordinate getMainAlternate ( Variant variant ) { VariantType type ; switch ( variant . getType ( ) ) { case SNP : type = VariantType . SNV ; break ; case MNP : type = VariantType . MNV ; break ; case SV : type = VariantBuilder . inferType ( variant . getReference ( ) , variant . getAlternate ( ) ) ; break ; default : type = variant . getType ( ) ; } return new AlternateCoordinate ( variant . getChromosome ( ) , variant . getStart ( ) , variant . getEnd ( ) , variant . getReference ( ) , variant . getAlternate ( ) , type ) ; } | Get the variant as Alternate Coordinate . |
5,876 | public Boolean hasIndex ( ) { Boolean hasIndex = false ; if ( this . indexedFastaSequenceFile != null ) { hasIndex = this . indexedFastaSequenceFile . isIndexed ( ) ; } return hasIndex ; } | Checks if the set FASTA file is indexed |
5,877 | public void load ( Path path ) throws IOException { FileUtils . checkPath ( path ) ; logger . debug ( "Loading variant metadata from '{}'" , path . toAbsolutePath ( ) . toString ( ) ) ; variantMetadata = mapper . readValue ( path . toFile ( ) , VariantMetadata . class ) ; for ( VariantStudyMetadata variantStudyMetadata : variantMetadata . getStudies ( ) ) { if ( variantStudyMetadata . getIndividuals ( ) != null ) { for ( org . opencb . biodata . models . metadata . Individual individual : variantStudyMetadata . getIndividuals ( ) ) { for ( Sample sample : individual . getSamples ( ) ) { sample . getAnnotations ( ) . put ( INDIVIDUAL_ID , individual . getId ( ) ) ; sample . getAnnotations ( ) . put ( INDIVIDUAL_FAMILY , individual . getFamily ( ) ) ; sample . getAnnotations ( ) . put ( INDIVIDUAL_FATHER , individual . getFather ( ) ) ; sample . getAnnotations ( ) . put ( INDIVIDUAL_MOTHER , individual . getMother ( ) ) ; sample . getAnnotations ( ) . put ( INDIVIDUAL_SEX , individual . getSex ( ) ) ; sample . getAnnotations ( ) . put ( INDIVIDUAL_PHENOTYPE , individual . getPhenotype ( ) ) ; } } } } } | Load variant metadata file . |
5,878 | public VariantStudyMetadata getVariantStudyMetadata ( String studyId ) { if ( studyId != null ) { if ( variantMetadata . getStudies ( ) == null ) { variantMetadata . setStudies ( new ArrayList < > ( ) ) ; } for ( VariantStudyMetadata study : variantMetadata . getStudies ( ) ) { if ( studyId . equals ( study . getId ( ) ) ) { return study ; } } } else { logger . error ( "Study ID is null" ) ; } return null ; } | Retrieve the variant study metadata from its study ID . |
5,879 | public void addVariantDatasetMetadata ( VariantStudyMetadata variantStudyMetadata ) { if ( variantStudyMetadata != null ) { VariantStudyMetadata found = getVariantStudyMetadata ( variantStudyMetadata . getId ( ) ) ; if ( found == null ) { if ( variantMetadata . getStudies ( ) == null ) { variantMetadata . setStudies ( new ArrayList < > ( ) ) ; } variantMetadata . getStudies ( ) . add ( variantStudyMetadata ) ; } else { logger . error ( "Study ID already exists" ) ; } } } | Add a variant study metadata . Study ID must not exist . |
5,880 | public void printSummary ( ) { StringBuilder res = new StringBuilder ( ) ; res . append ( "Num. studies: " ) . append ( variantMetadata . getStudies ( ) . size ( ) ) . append ( "\n" ) ; int counter , studyCounter = 0 ; for ( VariantStudyMetadata study : variantMetadata . getStudies ( ) ) { studyCounter ++ ; res . append ( "\tStudy #" ) . append ( studyCounter ) . append ( ": " ) . append ( study . getId ( ) ) . append ( "\n" ) ; res . append ( "\tNum. files: " ) . append ( study . getFiles ( ) . size ( ) ) . append ( "\n" ) ; counter = 0 ; for ( VariantFileMetadata file : study . getFiles ( ) ) { counter ++ ; res . append ( "\t\tFile #" ) . append ( counter ) . append ( ": " ) . append ( file . getId ( ) ) ; res . append ( " (" ) . append ( file . getSampleIds ( ) . size ( ) ) . append ( " samples)\n" ) ; } res . append ( "\tNum. cohorts: " ) . append ( study . getCohorts ( ) . size ( ) ) . append ( "\n" ) ; counter = 0 ; for ( Cohort cohort : study . getCohorts ( ) ) { counter ++ ; res . append ( "\t\tCohort #" ) . append ( counter ) . append ( ": " ) . append ( cohort . getId ( ) ) ; res . append ( " (" ) . append ( cohort . getSampleIds ( ) . size ( ) ) . append ( " samples)\n" ) ; } } System . out . println ( res . toString ( ) ) ; } | Print to the standard output a summary of the variant metadata manager . |
5,881 | public void save ( Path filename , boolean pretty ) throws IOException { if ( filename == null || Files . exists ( filename ) ) { throw new IOException ( "File path not correct, either it is null or file already exists: " + filename ) ; } String text ; if ( pretty ) { text = mapper . writerWithDefaultPrettyPrinter ( ) . writeValueAsString ( variantMetadata ) ; } else { text = mapper . writeValueAsString ( variantMetadata ) ; } PrintWriter writer = new PrintWriter ( new FileOutputStream ( filename . toFile ( ) ) ) ; writer . write ( text ) ; writer . close ( ) ; } | Save variant metadata manager in JSON format into the given filename . |
5,882 | public List < CommentType . Link > getLink ( ) { if ( link == null ) { link = new ArrayList < CommentType . Link > ( ) ; } return this . link ; } | Gets the value of the link property . |
5,883 | protected static String getReferenceBase ( String chromosome , int from , int to , Map < Integer , Character > referenceAlleles ) { int length = to - from ; if ( length < 0 ) { throw new IllegalStateException ( "Sequence length is negative: chromosome " + chromosome + " from " + from + " to " + to ) ; } StringBuilder sb = new StringBuilder ( length ) ; for ( int i = from ; i < to ; i ++ ) { sb . append ( referenceAlleles . getOrDefault ( i , 'N' ) ) ; } return sb . toString ( ) ; } | Get bases from reference sequence . |
5,884 | public static void save ( Pedigree pedigree , Path pedigreePath ) throws IOException { final OutputStream os = new FileOutputStream ( pedigreePath . toFile ( ) ) ; if ( pedigree != null ) { writeHeader ( pedigree , os ) ; write ( pedigree , os ) ; } os . close ( ) ; } | Save a Pedigree object into a Pedigree format file . |
5,885 | public List < V > apply ( List < Variant > variants ) { List < V > gaVariants = new ArrayList < > ( variants . size ( ) ) ; for ( Variant variant : variants ) { String id = variant . toString ( ) ; List < String > variantIds = new ArrayList < > ( variant . getIds ( ) ) ; for ( StudyEntry study : variant . getStudies ( ) ) { List < String > alternates = new ArrayList < > ( study . getSecondaryAlternatesAlleles ( ) . size ( ) + 1 ) ; alternates . add ( variant . getAlternate ( ) ) ; alternates . addAll ( study . getSecondaryAlternatesAlleles ( ) ) ; Long time = 0L ; String variantSetId = study . getStudyId ( ) ; Map < String , List < String > > fileInfo = parseInfo ( study . getFiles ( ) ) ; List calls = parseCalls ( null , study ) ; Long start = Long . valueOf ( to0BasedStart ( variant . getStart ( ) ) ) ; Long end = Long . valueOf ( variant . getEnd ( ) ) ; @ SuppressWarnings ( "unchecked" ) V ga = ( V ) factory . newVariant ( id , variantSetId , variantIds , time , time , variant . getChromosome ( ) , start , end , variant . getReference ( ) , alternates , fileInfo , calls ) ; gaVariants . add ( ga ) ; } } return gaVariants ; } | Given a list of variants creates the equivalent set using the GA4GH API . |
5,886 | public static VariantContextWriter createVariantContextWriter ( OutputStream outputStream , SAMSequenceDictionary sequenceDictionary , Options ... options ) { VariantContextWriterBuilder builder = new VariantContextWriterBuilder ( ) . setOutputStream ( outputStream ) . setReferenceDictionary ( sequenceDictionary ) . setOption ( Options . WRITE_FULL_FORMAT_FIELD ) . unsetOption ( Options . INDEX_ON_THE_FLY ) ; if ( options != null ) { for ( Options option : options ) { builder . setOption ( option ) ; } } return builder . build ( ) ; } | Create a VariantContextWriter . |
5,887 | public static RegionCoverage getCoverageFromBigWig ( Region region , int windowSize , Path bigwigPath ) throws IOException { FileUtils . checkFile ( bigwigPath ) ; BigWigManager bigWigManager = new BigWigManager ( bigwigPath ) ; float [ ] avgCoverage = bigWigManager . groupBy ( region , windowSize ) ; return new RegionCoverage ( region , windowSize , avgCoverage ) ; } | Return the coverage average given a window size from the BigWig file passed . |
5,888 | public static void printWigFormatCoverage ( RegionCoverage regionCoverage , int span , boolean header , PrintWriter writer ) { if ( span < 1 ) { span = 1 ; } if ( header ) { writer . println ( "fixedStep chrom=" + regionCoverage . getChromosome ( ) + " start=1 step=1 span=" + span ) ; } float [ ] values = regionCoverage . getValues ( ) ; if ( span == 1 ) { for ( int i = 0 ; i < values . length ; i ++ ) { writer . println ( values [ i ] ) ; } } else { int counter = 0 ; int sum = 0 ; for ( int i = 0 ; i < values . length ; i ++ ) { counter ++ ; sum += values [ i ] ; if ( counter == span ) { writer . println ( sum / counter ) ; counter = 0 ; sum = 0 ; } } if ( counter > 0 ) { writer . println ( sum / counter ) ; } } } | Write in wig file format the coverage for the region given . It uses fixedStep with step equals to 1 . |
5,889 | public List < MethodType . MethodAttribute > getMethodAttribute ( ) { if ( methodAttribute == null ) { methodAttribute = new ArrayList < MethodType . MethodAttribute > ( ) ; } return this . methodAttribute ; } | Gets the value of the methodAttribute property . |
5,890 | public List < MethodType . ObsMethodAttribute > getObsMethodAttribute ( ) { if ( obsMethodAttribute == null ) { obsMethodAttribute = new ArrayList < MethodType . ObsMethodAttribute > ( ) ; } return this . obsMethodAttribute ; } | Gets the value of the obsMethodAttribute property . |
5,891 | protected void parseStats ( Variant variant , StudyEntry file , int numAllele , String reference , String [ ] alternateAlleles , Map < String , String > info ) { VariantStats vs = new VariantStats ( ) ; Map < String , String > stats = new LinkedHashMap < > ( ) ; for ( Map . Entry < String , String > entry : info . entrySet ( ) ) { String infoTag = entry . getKey ( ) ; String infoValue = entry . getValue ( ) ; if ( statsTags . contains ( infoTag ) ) { stats . put ( infoTag , infoValue ) ; } } calculate ( variant , file , numAllele , reference , alternateAlleles , stats , vs ) ; file . setStats ( StudyEntry . DEFAULT_COHORT , vs ) ; } | Looks for tags contained in statsTags and calculates stats parsing them . |
5,892 | public float [ ] query ( Region region ) throws IOException { BigWigIterator bigWigIterator = iterator ( region ) ; float [ ] values = new float [ region . getEnd ( ) - region . getStart ( ) + 1 ] ; while ( bigWigIterator . hasNext ( ) ) { WigItem wigItem = bigWigIterator . next ( ) ; for ( int i = wigItem . getStartBase ( ) , j = wigItem . getStartBase ( ) - region . getStart ( ) ; i <= wigItem . getEndBase ( ) ; i ++ , j ++ ) { values [ j ] = wigItem . getWigValue ( ) ; } } return values ; } | Query by a given region . |
5,893 | public BigWigIterator iterator ( Region region ) { return bbFileReader . getBigWigIterator ( region . getChromosome ( ) , region . getStart ( ) , region . getChromosome ( ) , region . getEnd ( ) , false ) ; } | Get the iterator for the given region . |
5,894 | public Path index ( Path bigwigPath ) throws Exception { ChunkFrequencyManager chunkFrequencyManager = new ChunkFrequencyManager ( indexPath ) ; int chunkSize = chunkFrequencyManager . getChunkSize ( ) ; int prevChunk = 0 , startChunk , endChunk , partial ; String currChrom , prevChrom = null ; List < Integer > values = new ArrayList < > ( ) ; BigWigIterator bwIterator = bbFileReader . getBigWigIterator ( ) ; while ( bwIterator . hasNext ( ) ) { WigItem wigItem = bwIterator . next ( ) ; currChrom = wigItem . getChromosome ( ) ; startChunk = wigItem . getStartBase ( ) / chunkSize ; endChunk = wigItem . getEndBase ( ) / chunkSize ; if ( prevChrom != currChrom ) { if ( values . size ( ) > 0 ) { WigUtils . computeAndSaveMeanValues ( values , bigwigPath , prevChrom , chunkSize , chunkFrequencyManager ) ; } currChrom = prevChrom ; } if ( prevChunk != startChunk ) { for ( int chunk = prevChunk ; chunk < startChunk ; chunk ++ ) { values . add ( 0 ) ; } } for ( int chunk = startChunk , pos = startChunk * chunkSize ; chunk <= endChunk ; chunk ++ , pos += chunkSize ) { partial = Math . min ( wigItem . getEndBase ( ) , pos + chunkSize ) - Math . max ( wigItem . getStartBase ( ) , pos ) ; values . add ( ( int ) ( partial * wigItem . getWigValue ( ) ) ) ; } prevChunk = endChunk ; } if ( values . size ( ) > 0 ) { WigUtils . computeAndSaveMeanValues ( values , bigwigPath , prevChrom , chunkSize , chunkFrequencyManager ) ; } return indexPath ; } | Index the entire Big Wig file content in a SQLite database . |
5,895 | public Path createIndex ( Path indexPath ) throws IOException { VCFFileReader reader = new VCFFileReader ( input . toFile ( ) , false ) ; if ( ! dataPath . getFileName ( ) . endsWith ( ".gz" ) ) { dataPath = Paths . get ( input + ".gz" ) ; System . out . println ( "Creating compressed file: " + dataPath ) ; VariantContextWriter writer = new VariantContextWriterBuilder ( ) . setOutputFile ( dataPath . toFile ( ) ) . setOutputFileType ( BLOCK_COMPRESSED_VCF ) . build ( ) ; writer . writeHeader ( reader . getFileHeader ( ) ) ; for ( VariantContext vc : reader ) { if ( vc != null ) { writer . add ( vc ) ; } } writer . close ( ) ; } if ( indexPath == null ) { this . indexPath = Paths . get ( dataPath + ".tbi" ) ; } else { this . indexPath = indexPath ; } System . out . println ( "Creating index file: " + this . indexPath ) ; IndexFactory . createTabixIndex ( dataPath . toFile ( ) , new VCFCodec ( ) , TabixFormat . VCF , reader . getFileHeader ( ) . getSequenceDictionary ( ) ) . write ( this . indexPath . toFile ( ) ) ; reader . close ( ) ; return this . indexPath ; } | Creates a VCF index file . |
5,896 | public List < VariantContext > query ( Region region ) throws Exception { return query ( region , null , new VariantOptions ( ) , VariantContext . class ) ; } | This method aims to provide a very simple safe and quick way of accessing to a small fragment of the VCF file . This must not be used in production for reading big data files . It returns a maximum of 10 000 variant records . |
5,897 | public List < SequenceListType . Sequence > getSequence ( ) { if ( sequence == null ) { sequence = new ArrayList < SequenceListType . Sequence > ( ) ; } return this . sequence ; } | Gets the value of the sequence property . |
5,898 | public static DiseasePanel load ( InputStream diseasePanelInputStream ) throws IOException { ObjectMapper objectMapper = new ObjectMapper ( ) ; return objectMapper . readValue ( diseasePanelInputStream , DiseasePanel . class ) ; } | Static method to load and parse a JSON string from an InputStream . |
5,899 | public void export ( Iterator < Variant > iterator ) { if ( writer == null ) { logger . error ( "Error exporting VCF data: exporter must be opened" ) ; return ; } while ( iterator . hasNext ( ) ) { try { VariantContext variantContext = variantConverter . convert ( iterator . next ( ) ) ; writer . add ( variantContext ) ; } catch ( Exception e ) { logger . error ( "Error exporting VCF data: {}" , e . getMessage ( ) , e ) ; } } } | Export variants in VCF file format from a variant iterator . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.