private IntervalSharder(final BAMScheduler scheduler, final GenomeLocParser parser) { wrappedIterator = new PeekableIterator<FilePointer>(scheduler); this.parser = parser; } public void close() {
public void initialize(final SAMDataSource readsDataSource, final Iterator<FilePointer> filePointers, final GenomeLocParser parser) { this.readsDataSource = readsDataSource; this.filePointers = new PeekableIterator<FilePointer>(filePointers); this.parser = parser; } public void close() {
public PairedVariantSubContextIterator(final Iterator<VariantContext> leftIterator, final String leftSample, final Iterator<VariantContext> rightIterator, final String rightSample, final SAMSequenceDictionary dict) { this.leftIterator = new PeekableIterator<>(leftIterator); this.leftSample = leftSample; this.rightIterator = new PeekableIterator<>(rightIterator); this.rightSample = rightSample; this.comparator = new VariantContextComparator(dict); }
/** * For testing */ public RMDDataState(final String name, final PeekableIterator<RODRecordList> iterator) { this.dataSource = null; this.iterator = null; this.stream = new IntervalOverlappingRODsFromStream(name, new PeekableIterator<>(iterator)); }
public PairedVariantSubContextIterator(final Iterator<VariantContext> leftIterator, final String leftSample, final Iterator<VariantContext> rightIterator, final String rightSample, final SAMSequenceDictionary dict) { this.leftIterator = new PeekableIterator<>(leftIterator); this.leftSample = leftSample; this.rightIterator = new PeekableIterator<>(rightIterator); this.rightSample = rightSample; this.comparator = new VariantContextComparator(dict); }
public RMDDataState(ReferenceOrderedDataSource dataSource, LocationAwareSeekableRODIterator iterator) { this.dataSource = dataSource; this.iterator = iterator; this.stream = new IntervalOverlappingRODsFromStream(dataSource.getName(), new PeekableIterator<>(iterator)); }
/** * Constructor * * @param iterator the backing iterator * @param filter the filter (which may be a FilterAggregator) */ public FilteringSamIterator(final Iterator<SAMRecord> iterator, final SamRecordFilter filter) { this.iterator = new PeekableIterator<SAMRecord>(iterator); this.filter = filter; next = getNextRecord(); }
/** * Constructor * * @param iterator the backing iterator * @param filter the filter (which may be a FilterAggregator) */ public FilteringSamIterator(final Iterator<SAMRecord> iterator, final SamRecordFilter filter) { this.iterator = new PeekableIterator<SAMRecord>(iterator); this.filter = filter; next = getNextRecord(); }
/** * Constructor * * @param iterator the backing iterator * @param filter the filter (which may be a FilterAggregator) */ public FilteringIterator(final Iterator<SAMRecord> iterator, final SamRecordFilter filter) { this.iterator = new PeekableIterator<SAMRecord>(iterator); this.filter = filter; next = getNextRecord(); }
/** * The consumer has provided null, meaning to iterate over all available data. Create a file pointer stretching * from just before the start of the region to the end of the region. */ private void populateUnfilteredIntervalList(final GenomeLocParser parser) { this.loci = new GenomeLocSortedSet(parser); locusIterator = new PeekableIterator<GenomeLoc>(Collections.<GenomeLoc>emptyList().iterator()); nextFilePointer = generatePointerOverEntireFileset(); }
/** Constructs a downsampling iterator upon the supplied iterator, using the Random as the source of randomness. */ ConstantMemoryDownsamplingIterator(final Iterator<SAMRecord> iterator, final double proportion, final int seed) { super(proportion); this.hasher = new Murmur3(seed); this.underlyingIterator = new PeekableIterator<SAMRecord>(iterator); final long range = (long) Integer.MAX_VALUE - (long) Integer.MIN_VALUE; this.maxHashValue = Integer.MIN_VALUE + (int) Math.round(range * proportion); advanceToNextAcceptedRead(); }
/** Constructs a downsampling iterator upon the supplied iterator, using the Random as the source of randomness. */ ConstantMemoryDownsamplingIterator(final Iterator<SAMRecord> iterator, final double proportion, final int seed) { super(proportion); this.hasher = new Murmur3(seed); this.underlyingIterator = new PeekableIterator<SAMRecord>(iterator); final long range = (long) Integer.MAX_VALUE - (long) Integer.MIN_VALUE; this.maxHashValue = Integer.MIN_VALUE + (int) Math.round(range * proportion); advanceToNextAcceptedRead(); }
public ReadStateManager(final Iterator<GATKSAMRecord> source, final List<String> samples, final LIBSDownsamplingInfo LIBSDownsamplingInfo, final boolean keepSubmittedReads) { this.samples = samples; this.iterator = new PeekableIterator<GATKSAMRecord>(source); this.keepSubmittedReads = keepSubmittedReads; this.submittedReads = new LinkedList<GATKSAMRecord>(); for (final String sample : samples) { // because this is a linked hash map the order of iteration will be in sample order readStatesBySample.put(sample, new PerSampleReadStateManager(LIBSDownsamplingInfo)); } samplePartitioner = new SamplePartitioner<GATKSAMRecord>(LIBSDownsamplingInfo, samples); }
/** * Resets the SAM reader position to its original state. */ private void initialize() { this.positionIterator = new PeekableIterator<GATKChunk>(positions.iterator()); if(positionIterator.hasNext()) nextBlockAddress = positionIterator.peek().getBlockStart(); else nextBlockAddress = -1; }
public WindowMaker(Shard shard, GenomeLocParser genomeLocParser, GATKSAMIterator iterator, List<GenomeLoc> intervals, Collection<String> sampleNames) { this.sourceInfo = shard.getReadProperties(); this.readIterator = new GATKSAMRecordIterator(iterator); this.libs = new LocusIteratorByState(readIterator, sourceInfo.getDownsamplingMethod(), sourceInfo.includeReadsWithDeletionAtLoci(), sourceInfo.keepUniqueReadListInLIBS(), genomeLocParser,sampleNames); this.sourceIterator = new PeekableIterator<AlignmentContext>(libs); this.intervalIterator = intervals.size()>0 ? new PeekableIterator<GenomeLoc>(intervals.iterator()) : null; }
public MergingPileupElementIterator(PerSamplePileupElementTracker<PE> tracker) { perSampleIterators = new PriorityQueue<PeekableIterator<PE>>(Math.max(1,tracker.getSamples().size()),new PileupElementIteratorComparator()); for(final String sample: tracker.getSamples()) { PileupElementTracker<PE> trackerPerSample = tracker.getElements(sample); if(trackerPerSample.size() != 0) perSampleIterators.add(new PeekableIterator<PE>(trackerPerSample.iterator())); } }
@Test public void testToStream() { final List<Integer> inputs = Arrays.asList(1,2,3,4,5); final PeekableIterator<Integer> peeky = new PeekableIterator<>(inputs.iterator()); final List<Integer> expected = inputs.stream().map(i -> i*2).collect(Collectors.toList()); final List<Integer> actual = peeky.stream().map(i -> i*2).collect(Collectors.toList()); Assert.assertEquals(actual, expected); } }
@Test public void testToList() { final List<Integer> expected = Arrays.asList(1,2,3,4,5); final PeekableIterator<Integer> peeky = new PeekableIterator<>(expected.iterator()); final List<Integer> actual = peeky.toList(); Assert.assertEquals(actual, expected); Assert.assertEquals(peeky.toList(), new ArrayList<>()); // Should be empty the second time }
@Test(expectedExceptions = IllegalStateException.class) public void testAssertingIteratorCorrectlyFailsWhenOutOfOrder() { final List<SAMRecord> unsorted = Arrays.asList(createRecord(10, 1), createRecord(1, 1)); final SamReader.AssertingIterator iter = new SamReader.AssertingIterator(new PeekableIterator<>(unsorted.iterator())); iter.assertSorted(SAMFileHeader.SortOrder.coordinate); while (iter.hasNext()) { iter.next(); } }
@Override public void initialize() { super.initialize(); if (getToolkit().getIntervals() == null || getToolkit().getIntervals().isEmpty()) throw new UserException("This tool only works if you provide one or more intervals (use the -L argument). If you want to run whole genome, use -T DepthOfCoverage instead."); intervalMap = new LinkedHashMap<>(INITIAL_HASH_SIZE); intervalListIterator = new PeekableIterator<>(getToolkit().getIntervals().iterator()); // get all of the unique sample names for the VCF Header samples = ReadUtils.getSAMFileSamples(getToolkit().getSAMFileHeader()); vcfWriter.writeHeader(new VCFHeader(getHeaderInfo(), samples)); // pre load all the statistics classes because it is costly to operate on the JVM and we only want to do it once. loadAllPlugins(thresholds); }