private IntervalSharder(final BAMScheduler scheduler, final GenomeLocParser parser) { wrappedIterator = new PeekableIterator<FilePointer>(scheduler); this.parser = parser; } public void close() {
/** * Pulls out of the iterator the next group of reads that can be compared to each other to * identify duplicates. */ List<PairedReadSequence> getNextGroup(final PeekableIterator<PairedReadSequence> iterator) { final List<PairedReadSequence> group = new ArrayList<PairedReadSequence>(); final PairedReadSequence first = iterator.next(); group.add(first); outer: while (iterator.hasNext()) { final PairedReadSequence next = iterator.peek(); for (int i = 0; i < MIN_IDENTICAL_BASES; ++i) { if (first.read1[i] != next.read1[i] || first.read2[i] != next.read2[i]) break outer; } group.add(iterator.next()); } return group; }
/** * Clean up underlying BAMSchedule file handles. */ public void close() { if(bamScheduleIterator != null) bamScheduleIterator.close(); }
/** * Resets the SAM reader position to its original state. */ private void initialize() { this.positionIterator = new PeekableIterator<GATKChunk>(positions.iterator()); if(positionIterator.hasNext()) nextBlockAddress = positionIterator.peek().getBlockStart(); else nextBlockAddress = -1; }
bamScheduleIterator.close(); lastReferenceSequenceLoaded = currentContigIndex; bamScheduleIterator = new PeekableIterator<BAMScheduleEntry>(new BAMSchedule(dataSource,lociInContig)); if(!bamScheduleIterator.hasNext()) return null; BAMScheduleEntry bamScheduleEntry = bamScheduleIterator.peek(); while(bamScheduleEntry != null && bamScheduleEntry.isBefore(currentLocus)) { bamScheduleIterator.next(); bamScheduleEntry = bamScheduleIterator.hasNext() ? bamScheduleIterator.peek() : null;
if ( currentContigReadsIterator != null && ! currentContigReadsIterator.hasNext() ) { currentContigReadsIterator.close(); currentContigReadsIterator = new PeekableIterator<SAMRecord>(readsDataSource.getIterator(shard)); if ( currentContigReadsIterator.hasNext() ) { shard.fill(currentContigReadsIterator); nextShard = shard;
/** * The consumer has asked for a bounded set of locations. Prepare an iterator over those locations. * @param loci The list of locations to search and iterate over. */ private void populateFilteredIntervalList(final GenomeLocSortedSet loci) { this.loci = loci; if(!indexFiles.isEmpty()) { // If index data is available, start up the iterator. locusIterator = new PeekableIterator<GenomeLoc>(loci.iterator()); if(locusIterator.hasNext()) currentLocus = locusIterator.next(); advance(); } else { // Otherwise, seed the iterator with a single file pointer over the entire region. nextFilePointer = generatePointerOverEntireFileset(); for(GenomeLoc locus: loci) nextFilePointer.addLocation(locus); locusIterator = new PeekableIterator<GenomeLoc>(Collections.<GenomeLoc>emptyList().iterator()); } }
/** Returns true if there is another record available post-downsampling, false otherwise. */ @Override public boolean hasNext() { // The underlying iterator is always left at the next return-able read, so if it has a next read, so do we return this.underlyingIterator.hasNext(); }
/** * Adds all intervals that overlap the current reference locus to the intervalMap * * @param refLocus the current reference locus */ private void addNewOverlappingIntervals(final GenomeLoc refLocus) { GenomeLoc interval = intervalListIterator.peek(); while (interval != null && !interval.isPast(refLocus)) { intervalMap.put(interval, createIntervalStatistic(interval)); intervalListIterator.next(); interval = intervalListIterator.peek(); } }
public WindowMakerIterator next() { shardGenerated = true; return new WindowMakerIterator(intervalIterator != null ? intervalIterator.next() : null); }
private boolean samHasMore() { return !finishedAlignedReads && (samIterator.peek() != null); }
@Test public void testToList() { final List<Integer> expected = Arrays.asList(1,2,3,4,5); final PeekableIterator<Integer> peeky = new PeekableIterator<>(expected.iterator()); final List<Integer> actual = peeky.toList(); Assert.assertEquals(actual, expected); Assert.assertEquals(peeky.toList(), new ArrayList<>()); // Should be empty the second time }
@Test public void testToStream() { final List<Integer> inputs = Arrays.asList(1,2,3,4,5); final PeekableIterator<Integer> peeky = new PeekableIterator<>(inputs.iterator()); final List<Integer> expected = inputs.stream().map(i -> i*2).collect(Collectors.toList()); final List<Integer> actual = peeky.stream().map(i -> i*2).collect(Collectors.toList()); Assert.assertEquals(actual, expected); } }
/** Returns the next object and advances the iterator. */ @Override public Object next() { Object retval = this.nextObject; advance(); return retval; }
final PeekableIterator<SAMRecord> it = new PeekableIterator<SAMRecord>(reader.iterator()); if (!this.pairedReads) { while (it.hasNext()) { final SAMRecord first = it.next(); final SAMRecord second = it.next(); it.close(); CloserUtil.close(reader); return count;
final PeekableIterator<PairedReadSequence> iterator = new PeekableIterator<PairedReadSequence>(sorter.iterator()); ); while (iterator.hasNext()) { iterator.close(); sorter.cleanup();
/** Returns true if there is another record available post-downsampling, false otherwise. */ @Override public boolean hasNext() { // The underlying iterator is always left at the next return-able read, so if it has a next read, so do we return this.underlyingIterator.hasNext(); }
/** * Process all remaining intervals * * @param result number of loci processed by the walker */ @Override public void onTraversalDone(final Long result) { for (GenomeLoc interval : intervalMap.keySet()) outputStatsToVCF(intervalMap.get(interval), UNCOVERED_ALLELE); GenomeLoc interval = intervalListIterator.peek(); while (interval != null) { outputStatsToVCF(createIntervalStatistic(interval), UNCOVERED_ALLELE); intervalListIterator.next(); interval = intervalListIterator.peek(); } if (thresholds.missingTargets != null) { thresholds.missingTargets.close(); } }
public Shard next() { FilePointer current = filePointers.next(); // FilePointers have already been combined as necessary at the IntervalSharder level. No // need to do so again here. return new LocusShard(parser,readsDataSource,current.getLocations(),current.fileSpans); }