/** Records multiple SAMRecords and triggers logging if necessary. */ public boolean record(final SAMRecord... recs) { boolean triggered = false; for (final SAMRecord rec : recs) triggered = record(rec) || triggered; return triggered; }
/** * Records that a given record has been processed and triggers logging if necessary. * @return boolean true if logging was triggered, false otherwise */ public synchronized boolean record(final SAMRecord rec) { if (rec.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) { return record(null, 0); } else { return record(rec.getReferenceName(), rec.getAlignmentStart()); } }
/** * Add record if it is primary or optionally secondary. */ private void addIfNotFiltered(final Sink out, final SAMRecord rec) { if (includeSecondaryAlignments || !rec.getNotPrimaryAlignmentFlag()) { out.add(rec); if (this.progress.record(rec) && crossSpeciesReads > 0) { log.info(String.format("%d Reads have been unmapped due to being suspected of being Cross-species contamination.", crossSpeciesReads)); } } }
/** * Add record if it is primary or optionally secondary. */ private void addIfNotFiltered(final Sink out, final SAMRecord rec) { if (includeSecondaryAlignments || !rec.getNotPrimaryAlignmentFlag()) { out.add(rec); if (this.progress.record(rec) && crossSpeciesReads > 0) { log.info(String.format("%d Reads have been unmapped due to being suspected of being Cross-species contamination.", crossSpeciesReads)); } } }
private void fillGraphFromAGroup(final List<? extends PhysicalLocation> wholeList, final List<Integer> groupList, final boolean logProgress, final ProgressLogger progressLoggerForKeeper, final int distance, final GraphUtils.Graph<Integer> opticalDistanceRelationGraph) { for (int i = 0; i < groupList.size(); i++) { final int iIndex = groupList.get(i); final PhysicalLocation currentLoc = wholeList.get(iIndex); // The main point of adding this log and if statement (also below) is a workaround a bug in the JVM // which causes a deep exception (https://github.com/broadinstitute/picard/issues/472). // It seems that this is related to https://bugs.openjdk.java.net/browse/JDK-8033717 which // was closed due to non-reproducibility. We came across a bam file that evoked this error // every time we tried to duplicate-mark it. The problem seemed to be a duplicate-set of size 500,000, // and this loop seemed to kill the JVM for some reason. This logging statement (and the one in the // loop below) solved the problem. if (logProgress) { progressLoggerForKeeper.record(String.format("%d", currentLoc.getReadGroup()), currentLoc.getX()); } for (int j = i + 1; j < groupList.size(); j++) { final int jIndex = groupList.get(j); final PhysicalLocation other = wholeList.get(jIndex); if (closeEnoughShort(currentLoc, other, distance)) { opticalDistanceRelationGraph.addEdge(iIndex, jIndex); } } } }
/** Incorporates the provided variant's data into the metric analysis. */ @Override public void accumulate(final VariantContext vc) { progress.record(vc.getContig(), vc.getStart()); if (!isVariantExcluded(vc)) { final String singletonSample = getSingletonSample(vc); updateSummaryMetric(summaryMetric, null, vc, singletonSample != null); // The summary metric has no genotype. vc.getSampleNames().stream() .filter(sampleName -> !vc.getGenotype(sampleName).isHomRef()) .forEach(sampleName -> updateDetailMetric(sampleMetricsMap.get(sampleName), vc.getGenotype(sampleName), vc, sampleName.equals(singletonSample))); } }
/** * Assigns the given evidence to the appropriate call * */ private void assignEvidence(DirectedEvidence evidence) { BreakendSummary bs = evidence.getBreakendSummary(); bs = context.getVariantCallingParameters().withMargin(bs); if (assignEvidenceToSingleBreakpoint) { assignToBest(bs, evidence); } else { assignToAll(bs, evidence); } if (evidence instanceof NonReferenceReadPair) { progressLogger.record(((NonReferenceReadPair)evidence).getLocalledMappedRead()); } else if (evidence instanceof SingleReadEvidence) { progressLogger.record(((SingleReadEvidence)evidence).getSAMRecord()); } } private void assignToBest(BreakendSummary bs, DirectedEvidence evidence) {
/** Incorporates the provided variant's data into the metric analysis. */ @Override public void accumulate(final VariantContext vc) { progress.record(vc.getContig(), vc.getStart()); if (!isVariantExcluded(vc)) { final String singletonSample = getSingletonSample(vc); updateSummaryMetric(summaryMetric, null, vc, singletonSample != null); // The summary metric has no genotype. vc.getSampleNames().stream() .filter(sampleName -> !vc.getGenotype(sampleName).isHomRef()) .forEach(sampleName -> updateDetailMetric(sampleMetricsMap.get(sampleName), vc.getGenotype(sampleName), vc, sampleName.equals(singletonSample))); } }
protected void saveVcf(File file, Iterator<IdsvVariantContext> calls) throws IOException { File tmp = gridss.Defaults.OUTPUT_TO_TEMP_FILE ? FileSystemContext.getWorkingFileFor(file) : file; final ProgressLogger writeProgress = new ProgressLogger(log); try (VariantContextWriter vcfWriter = getContext().getVariantContextWriter(tmp, getOutputHeader(), true)) { while (calls.hasNext()) { IdsvVariantContext record = calls.next(); vcfWriter.add(record); writeProgress.record(record.getContig(), record.getStart()); } } if (tmp != file) { FileHelper.move(tmp, file, true); } } protected VCFHeader getInputHeader() {
private void writeSortedOutput(final VCFHeader outputHeader, final SortingCollection<VariantContext> sortedOutput) { final ProgressLogger writeProgress = new ProgressLogger(log, 25000, "wrote", "records"); final EnumSet<Options> options = CREATE_INDEX ? EnumSet.of(Options.INDEX_ON_THE_FLY) : EnumSet.noneOf(Options.class); final VariantContextWriter out = new VariantContextWriterBuilder(). setReferenceDictionary(outputHeader.getSequenceDictionary()). setOptions(options). setOutputFile(OUTPUT).build(); out.writeHeader(outputHeader); for (final VariantContext variantContext : sortedOutput) { out.add(variantContext); writeProgress.record(variantContext.getContig(), variantContext.getStart()); } out.close(); } }
private void writeSortedOutput(final VCFHeader outputHeader, final SortingCollection<VariantContext> sortedOutput) { final ProgressLogger writeProgress = new ProgressLogger(log, 25000, "wrote", "records"); final EnumSet<Options> options = CREATE_INDEX ? EnumSet.of(Options.INDEX_ON_THE_FLY) : EnumSet.noneOf(Options.class); final VariantContextWriter out = new VariantContextWriterBuilder(). setReferenceDictionary(outputHeader.getSequenceDictionary()). setOptions(options). setOutputFile(OUTPUT).build(); out.writeHeader(outputHeader); for (final VariantContext variantContext : sortedOutput) { out.add(variantContext); writeProgress.record(variantContext.getContig(), variantContext.getStart()); } out.close(); } }
private void filterReads(final FilteringSamIterator filteringIterator) { // get OUTPUT header from INPUT and overwrite it if necessary final SAMFileHeader fileHeader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).getFileHeader(INPUT); final SAMFileHeader.SortOrder inputSortOrder = fileHeader.getSortOrder(); if (SORT_ORDER != null) { fileHeader.setSortOrder(SORT_ORDER); } if (FILTER == Filter.includePairedIntervals && fileHeader.getSortOrder() != SAMFileHeader.SortOrder.coordinate) { throw new UnsupportedOperationException("Input must be coordinate sorted to use includePairedIntervals"); } final boolean presorted = inputSortOrder.equals(fileHeader.getSortOrder()); log.info("Filtering [presorted=" + presorted + "] " + INPUT.getName() + " -> OUTPUT=" + OUTPUT.getName() + " [sortorder=" + fileHeader.getSortOrder().name() + "]"); // create OUTPUT file final SAMFileWriter outputWriter = new SAMFileWriterFactory().makeSAMOrBAMWriter(fileHeader, presorted, OUTPUT); final ProgressLogger progress = new ProgressLogger(log, (int) 1e6, "Written"); while (filteringIterator.hasNext()) { final SAMRecord rec = filteringIterator.next(); outputWriter.addAlignment(rec); progress.record(rec); } filteringIterator.close(); outputWriter.close(); log.info(new DecimalFormat("#,###").format(progress.getCount()) + " SAMRecords written to " + OUTPUT.getName()); }
/** Creates a simple SAM file from a single fastq file. */ protected int doUnpaired(final FastqReader freader, final SAMFileWriter writer) { int readCount = 0; final ProgressLogger progress = new ProgressLogger(LOG); for ( ; freader.hasNext() ; readCount++) { final FastqRecord frec = freader.next(); final SAMRecord srec = createSamRecord(writer.getFileHeader(), SequenceUtil.getSamReadNameFromFastqHeader(frec.getReadHeader()) , frec, false) ; srec.setReadPairedFlag(false); writer.addAlignment(srec); progress.record(srec); } return readCount; }
/** Creates a simple SAM file from a single fastq file. */ protected int doUnpaired(final FastqReader freader, final SAMFileWriter writer) { int readCount = 0; final ProgressLogger progress = new ProgressLogger(LOG); for ( ; freader.hasNext() ; readCount++) { final FastqRecord frec = freader.next(); final SAMRecord srec = createSamRecord(writer.getFileHeader(), SequenceUtil.getSamReadNameFromFastqHeader(frec.getReadHeader()) , frec, false) ; srec.setReadPairedFlag(false); writer.addAlignment(srec); progress.record(srec); } return readCount; }
protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(INPUT); final SAMFileWriter writer = new SAMFileWriterFactory().makeWriter(reader.getFileHeader(), true, OUTPUT, REFERENCE_SEQUENCE); if (CREATE_INDEX && writer.getFileHeader().getSortOrder() != SAMFileHeader.SortOrder.coordinate) { throw new PicardException("Can't CREATE_INDEX unless sort order is coordinate"); } final ProgressLogger progress = new ProgressLogger(Log.getInstance(SamFormatConverter.class)); for (final SAMRecord rec : reader) { writer.addAlignment(rec); progress.record(rec); } CloserUtil.close(reader); writer.close(); return 0; } }
protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(INPUT); final SAMFileWriter writer = new SAMFileWriterFactory().makeWriter(reader.getFileHeader(), true, OUTPUT, REFERENCE_SEQUENCE); if (CREATE_INDEX && writer.getFileHeader().getSortOrder() != SAMFileHeader.SortOrder.coordinate) { throw new PicardException("Can't CREATE_INDEX unless sort order is coordinate"); } final ProgressLogger progress = new ProgressLogger(Log.getInstance(SamFormatConverter.class)); for (final SAMRecord rec : reader) { writer.addAlignment(rec); progress.record(rec); } CloserUtil.close(reader); writer.close(); return 0; } }
protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(INPUT); ; reader.getFileHeader().setSortOrder(SORT_ORDER.getSortOrder()); final SAMFileWriter writer = new SAMFileWriterFactory().makeSAMOrBAMWriter(reader.getFileHeader(), false, OUTPUT); writer.setProgressLogger( new ProgressLogger(log, (int) 1e7, "Wrote", "records from a sorting collection")); final ProgressLogger progress = new ProgressLogger(log, (int) 1e7, "Read"); for (final SAMRecord rec : reader) { writer.addAlignment(rec); progress.record(rec); } log.info("Finished reading inputs, merging and writing to output now."); CloserUtil.close(reader); writer.close(); return 0; } }
protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(INPUT); ; reader.getFileHeader().setSortOrder(SORT_ORDER.getSortOrder()); final SAMFileWriter writer = new SAMFileWriterFactory().makeSAMOrBAMWriter(reader.getFileHeader(), false, OUTPUT); writer.setProgressLogger( new ProgressLogger(log, (int) 1e7, "Wrote", "records from a sorting collection")); final ProgressLogger progress = new ProgressLogger(log, (int) 1e7, "Read"); for (final SAMRecord rec : reader) { writer.addAlignment(rec); progress.record(rec); } log.info("Finished reading inputs, merging and writing to output now."); CloserUtil.close(reader); writer.close(); return 0; } }
private void standardReheader(final SAMFileHeader replacementHeader) { final SamReader recordReader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).validationStringency(ValidationStringency.SILENT).open(INPUT); if (replacementHeader.getSortOrder() != recordReader.getFileHeader().getSortOrder()) { throw new PicardException("Sort orders of INPUT (" + recordReader.getFileHeader().getSortOrder().name() + ") and HEADER (" + replacementHeader.getSortOrder().name() + ") do not agree."); } final SAMFileWriter writer = new SAMFileWriterFactory().makeSAMOrBAMWriter(replacementHeader, true, OUTPUT); final ProgressLogger progress = new ProgressLogger(Log.getInstance(ReplaceSamHeader.class)); for (final SAMRecord rec : recordReader) { rec.setHeader(replacementHeader); writer.addAlignment(rec); progress.record(rec); } writer.close(); CloserUtil.close(recordReader); }
private void standardReheader(final SAMFileHeader replacementHeader) { final SamReader recordReader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).validationStringency(ValidationStringency.SILENT).open(INPUT); if (replacementHeader.getSortOrder() != recordReader.getFileHeader().getSortOrder()) { throw new PicardException("Sort orders of INPUT (" + recordReader.getFileHeader().getSortOrder().name() + ") and HEADER (" + replacementHeader.getSortOrder().name() + ") do not agree."); } final SAMFileWriter writer = new SAMFileWriterFactory().makeSAMOrBAMWriter(replacementHeader, true, OUTPUT); final ProgressLogger progress = new ProgressLogger(Log.getInstance(ReplaceSamHeader.class)); for (final SAMRecord rec : recordReader) { rec.setHeader(replacementHeader); writer.addAlignment(rec); progress.record(rec); } writer.close(); CloserUtil.close(recordReader); }