public SequentialReferenceCoverageLookup(Iterator<SAMRecord> it, IdsvMetrics metrics, ReadPairConcordanceCalculator pairing, int windowSize, int category, boolean ignoreDuplicates) { this.pairing = pairing; if (it instanceof Closeable) toClose.add((Closeable)it); List<SamRecordFilter> filters = new ArrayList<>(); filters.add(new AlignedFilter(true)); if (ignoreDuplicates) { filters.add(new DuplicateReadFilter()); } this.reads = Iterators.peekingIterator(new FilteringSamIterator(it, new AggregateFilter(filters))); this.largestWindow = windowSize; this.maxEvidenceWindow = Math.max(metrics.MAX_READ_LENGTH, Math.max(metrics.MAX_READ_MAPPED_LENGTH, pairing != null ? pairing.maxConcordantFragmentSize() : 0)); this.category = category; } public void close() {
AggregateFilter aggregateFilter= new AggregateFilter(filters); while(sam.hasNext()){ SAMRecord rec= sam.next(); if( !aggregateFilter.filterOut(rec) ){ cnt++;
AggregateFilter aggregateFilter= new AggregateFilter(this.getFeatureFilter().getSamRecordFilter()); boolean passed; if(!rec.getReadUnmappedFlag() && !aggregateFilter.filterOut(rec) && rec.getAlignmentEnd() >= rec.getAlignmentStart()){ passed= true;
/** * Applies filters such as duplicate removal that apply to all SAMRecord parsing * @param iterator raw reads * @param filterSecondaryAlignment should secondary alignment be filtered out * @return iterator with filtered record excluded */ public CloseableIterator<SAMRecord> applyCommonSAMRecordFilters(final CloseableIterator<SAMRecord> iterator, final boolean singleAlignmentPerRead) { List<SamRecordFilter> filters = Lists.<SamRecordFilter>newArrayList(new FailsVendorReadQualityFilter()); if (singleAlignmentPerRead) { filters.add(new SecondaryOrSupplementaryFilter()); } if (filterDuplicates) { filters.add(new DuplicateReadFilter()); } return new AutoClosingIterator<SAMRecord>(new FilteringSamIterator(iterator, new AggregateFilter(filters)), iterator); }
/** * @return iterator over all/all covered locus position in reference according to <code>emitUncoveredLoci</code> * value. */ @Override public Iterator<K> iterator() { if (samIterator != null) { throw new IllegalStateException("Cannot call iterator() more than once on " + this.getClass().getSimpleName()); } CloseableIterator<SAMRecord> tempIterator; if (intervals != null) { tempIterator = new SamRecordIntervalIteratorFactory().makeSamRecordIntervalIterator(samReader, intervals, useIndex); } else { tempIterator = samReader.iterator(); } if (samFilters != null) { tempIterator = new FilteringSamIterator(tempIterator, new AggregateFilter(samFilters)); } samIterator = new PeekableIterator<>(tempIterator); return this; }
/** * @return iterator over all/all covered locus position in reference according to <code>emitUncoveredLoci</code> * value. */ @Override public Iterator<K> iterator() { if (samIterator != null) { throw new IllegalStateException("Cannot call iterator() more than once on " + this.getClass().getSimpleName()); } CloseableIterator<SAMRecord> tempIterator; if (intervals != null) { tempIterator = new SamRecordIntervalIteratorFactory().makeSamRecordIntervalIterator(samReader, intervals, useIndex); } else { tempIterator = samReader.iterator(); } if (samFilters != null) { tempIterator = new FilteringSamIterator(tempIterator, new AggregateFilter(samFilters)); } samIterator = new PeekableIterator<>(tempIterator); return this; }
public Iterator<LocusInfo> iterator() { if (samIterator != null) { throw new IllegalStateException("Cannot call iterator() more than once on SamLocusIterator"); } CloseableIterator<SAMRecord> tempIterator; if (intervals != null) { tempIterator = new SamRecordIntervalIteratorFactory().makeSamRecordIntervalIterator(samReader, intervals, useIndex); } else { tempIterator = samReader.iterator(); } if (samFilters != null) { tempIterator = new FilteringIterator(tempIterator, new AggregateFilter(samFilters)); } samIterator = new PeekableIterator<SAMRecord>(tempIterator); return this; }
filters.add(new InsertSizeFilter(MINIMUM_INSERT_SIZE, effectiveMaxInsertSize)); recordFilter = new AggregateFilter(filters);
filters.add(new InsertSizeFilter(MINIMUM_INSERT_SIZE, effectiveMaxInsertSize)); recordFilter = new AggregateFilter(filters);
/** * Path for writing bfqs for single-end reads * * @param iterator the iterator with he SAM Records to write * @param filters the list of filters to be applied */ private void writeSingleEndBfqs(final Iterator<SAMRecord> iterator, final List<SamRecordFilter> filters) { // Open the codecs for writing int fileIndex = 0; initializeNextBfqFiles(fileIndex++); int records = 0; final FilteringSamIterator it = new FilteringSamIterator(iterator, new AggregateFilter(filters)); while (it.hasNext()) { final SAMRecord record = it.next(); records++; if (records % increment == 0) { record.setReadName(record.getReadName() + "/1"); writeFastqRecord(codec1, record); wrote++; if (wrote % 1000000 == 0) { log.info(wrote + " records processed."); } if (chunk > 0 && wrote % chunk == 0) { initializeNextBfqFiles(fileIndex++); } } } }
/** * Path for writing bfqs for single-end reads * * @param iterator the iterator with he SAM Records to write * @param filters the list of filters to be applied */ private void writeSingleEndBfqs(final Iterator<SAMRecord> iterator, final List<SamRecordFilter> filters) { // Open the codecs for writing int fileIndex = 0; initializeNextBfqFiles(fileIndex++); int records = 0; final FilteringSamIterator it = new FilteringSamIterator(iterator, new AggregateFilter(filters)); while (it.hasNext()) { final SAMRecord record = it.next(); records++; if (records % increment == 0) { record.setReadName(record.getReadName() + "/1"); writeFastqRecord(codec1, record); wrote++; if (wrote % 1000000 == 0) { log.info(wrote + " records processed."); } if (chunk > 0 && wrote % chunk == 0) { initializeNextBfqFiles(fileIndex++); } } } }
filters.add(new FailsVendorReadQualityFilter()); final FilteringSamIterator itr = new FilteringSamIterator(it, new AggregateFilter(filters)); while (itr.hasNext()) { itr.next();
filters.add(new FailsVendorReadQualityFilter()); final FilteringSamIterator itr = new FilteringSamIterator(it, new AggregateFilter(filters)); while (itr.hasNext()) { itr.next();
); final FilteringSamIterator filteredSamRecordIterator = new FilteringSamIterator(samRecordIterator, new AggregateFilter(samFilters)); log.info("Queried BAM, getting duplicate sets.");
); final FilteringSamIterator filteredSamRecordIterator = new FilteringSamIterator(samRecordIterator, new AggregateFilter(samFilters)); log.info("Queried BAM, getting duplicate sets.");