/** * Collect statistics on the stream of reads passed in * * @param stream the stream of reads to analyze */ public void analyze( Iterable<SAMRecord> stream ) { for ( SAMRecord read : stream ) { update(read); } finalizeStats(); }
/** * Construct a new read stream analyzer, providing an ArtificialSingleSampleReadStream that will * serve as the basis for comparison after the analysis is complete. * * @param originalStream the original ArtificialSingleSampleReadStream upon which the stream * that will be fed to the analyzer is based */ public ArtificialSingleSampleReadStreamAnalyzer( ArtificialSingleSampleReadStream originalStream ) { this.originalStream = originalStream; reset(); }
public void run() { GATKSAMIterator downsamplingIter = new PerSampleDownsamplingReadsIterator(mergedReadStream.getGATKSAMIterator(), downsamplerFactory); if ( verifySortedness ) { downsamplingIter = new VerifyingSamIterator(downsamplingIter); } while ( downsamplingIter.hasNext() ) { SAMRecord read = downsamplingIter.next(); String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null; ArtificialSingleSampleReadStreamAnalyzer analyzer = perSampleStreamAnalyzers.get(sampleName); if ( analyzer != null ) { analyzer.update(read); } else { throw new ReviewedGATKException("bug: stream analyzer for sample " + sampleName + " not found"); } } for ( Map.Entry<String, ArtificialSingleSampleReadStreamAnalyzer> analyzerEntry : perSampleStreamAnalyzers.entrySet() ) { ArtificialSingleSampleReadStreamAnalyzer analyzer = analyzerEntry.getValue(); analyzer.finalizeStats(); // Validate the downsampled read stream for each sample individually analyzer.validate(); } // Allow memory used by this test to be reclaimed: mergedReadStream = null; perSampleArtificialReadStreams = null; perSampleStreamAnalyzers = null; } }
public void run() { streamAnalyzer= new ArtificialSingleSampleReadStreamAnalyzer(stream); streamAnalyzer.analyze(stream); // Check whether the observed properties of the stream match its nominal properties streamAnalyzer.validate(); } }
public void update( SAMRecord read ) { if ( read.getReadUnmappedFlag() ) { numUnmappedReads++; if ( numUnmappedReads == 1 && lastRead != null ) { processContigChange(); numContigs--; } } else if ( lastRead == null ) { numContigs = 1; currentContigNumStacks = 1; currentStackNumReads = 1; } else if ( ! read.getReferenceIndex().equals(lastRead.getReferenceIndex()) ) { processContigChange(); } else if ( read.getAlignmentStart() != lastRead.getAlignmentStart() ) { processStackChangeWithinContig(read); } else { currentStackNumReads++; } updateReadLength(read.getReadLength()); allSamplesMatch = allSamplesMatch && readHasCorrectSample(read); totalReads++; lastRead = read; }
public void run() { streamAnalyzer = new PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer(stream, targetCoverage); downsamplingIter = new DownsamplingReadsIterator(stream.getGATKSAMIterator(), new SimplePositionalDownsampler<SAMRecord>(targetCoverage)); streamAnalyzer.analyze(downsamplingIter); // Check whether the observed properties of the downsampled stream are what they should be streamAnalyzer.validate(); // Allow memory used by this test to be reclaimed stream = null; streamAnalyzer = null; downsamplingIter = null; } }
private void processStackChangeWithinContig( SAMRecord read ) { currentContigNumStacks++; updateReadsPerStack(currentStackNumReads); currentStackNumReads = 1; updateDistanceBetweenStacks(read.getAlignmentStart() - lastRead.getAlignmentStart()); }