@Override SAMSequenceDictionary extractDictionary(Path sam) { return SamReaderFactory.makeDefault().getFileHeader(sam).getSequenceDictionary(); } },
@Override SAMSequenceDictionary extractDictionary(Path sam) { return SamReaderFactory.makeDefault().getFileHeader(sam).getSequenceDictionary(); } },
/** tests that if we've set the merging to false, we get a SAMException for bam's with different dictionaries. */ @Test(expectedExceptions = SequenceUtil.SequenceListsDifferException.class) public void testMergedException() { File INPUT[] = {new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/Chromosome1to10.bam"), new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/Chromosome5to9.bam")}; final List<SAMFileHeader> headers = new ArrayList<SAMFileHeader>(); for (final File inFile : INPUT) { IOUtil.assertFileIsReadable(inFile); headers.add(SamReaderFactory.makeDefault().getFileHeader(inFile)); } new SamFileHeaderMerger(SAMFileHeader.SortOrder.unsorted, headers, false); }
@Test public void testAddCommentsToBam() throws Exception { final File outputFile = File.createTempFile("addCommentsToBamTest.", BamFileIoUtils.BAM_FILE_EXTENSION); outputFile.deleteOnExit(); runIt(INPUT_FILE, outputFile, commentList); final SAMFileHeader newHeader = SamReaderFactory.makeDefault().getFileHeader(outputFile); // The original comments are massaged when they're added to the header. Perform the same massaging here, // and then compare the lists final List<String> massagedComments = new LinkedList<String>(); for (final String comment : commentList) { massagedComments.add(SAMTextHeaderCodec.COMMENT_PREFIX + comment); } Assert.assertEquals(newHeader.getComments(), massagedComments); outputFile.delete(); }
protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); if (INPUT.getAbsolutePath().endsWith(".sam")) { throw new PicardException("SAM files are not supported"); } final SAMFileHeader samFileHeader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).getFileHeader(INPUT); for (final String comment : COMMENT) { if (comment.contains("\n")) { throw new PicardException("Comments can not contain a new line"); } samFileHeader.addComment(comment); } BamFileIoUtils.reheaderBamFile(samFileHeader, INPUT, OUTPUT, CREATE_MD5_FILE, CREATE_INDEX); return 0; } }
protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); if (INPUT.getAbsolutePath().endsWith(".sam")) { throw new PicardException("SAM files are not supported"); } final SAMFileHeader samFileHeader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).getFileHeader(INPUT); for (final String comment : COMMENT) { if (comment.contains("\n")) { throw new PicardException("Comments can not contain a new line"); } samFileHeader.addComment(comment); } BamFileIoUtils.reheaderBamFile(samFileHeader, INPUT, OUTPUT, CREATE_MD5_FILE, CREATE_INDEX); return 0; } }
private void filterReads(final FilteringSamIterator filteringIterator) { // get OUTPUT header from INPUT and overwrite it if necessary final SAMFileHeader fileHeader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).getFileHeader(INPUT); final SAMFileHeader.SortOrder inputSortOrder = fileHeader.getSortOrder(); if (SORT_ORDER != null) { fileHeader.setSortOrder(SORT_ORDER); } if (FILTER == Filter.includePairedIntervals && fileHeader.getSortOrder() != SAMFileHeader.SortOrder.coordinate) { throw new UnsupportedOperationException("Input must be coordinate sorted to use includePairedIntervals"); } final boolean presorted = inputSortOrder.equals(fileHeader.getSortOrder()); log.info("Filtering [presorted=" + presorted + "] " + INPUT.getName() + " -> OUTPUT=" + OUTPUT.getName() + " [sortorder=" + fileHeader.getSortOrder().name() + "]"); // create OUTPUT file final SAMFileWriter outputWriter = new SAMFileWriterFactory().makeSAMOrBAMWriter(fileHeader, presorted, OUTPUT); final ProgressLogger progress = new ProgressLogger(log, (int) 1e6, "Written"); while (filteringIterator.hasNext()) { final SAMRecord rec = filteringIterator.next(); outputWriter.addAlignment(rec); progress.record(rec); } filteringIterator.close(); outputWriter.close(); log.info(new DecimalFormat("#,###").format(progress.getCount()) + " SAMRecords written to " + OUTPUT.getName()); }
@BeforeTest public void prepare() throws IOException { Log.setGlobalLogLevel(Log.LogLevel.ERROR); source = new ReferenceSource(new FakeReferenceSequenceFile(SamReaderFactory.makeDefault().getFileHeader(BAM_FILE).getSequenceDictionary().getSequences())); cramBytes = cramFromBAM(BAM_FILE, source); cramFile = File.createTempFile(BAM_FILE.getName(), ".cram") ; cramFile.deleteOnExit(); indexFile = new File (cramFile.getAbsolutePath() + ".bai"); indexFile.deleteOnExit(); FileOutputStream fos = new FileOutputStream(cramFile); fos.write(cramBytes); fos.close(); CRAMBAIIndexer.createIndex(new SeekableFileStream(cramFile), indexFile, null, ValidationStringency.STRICT); baiBytes = readFile(indexFile); }
@BeforeTest public void prepare() throws IOException { Log.setGlobalLogLevel(Log.LogLevel.ERROR); source = new ReferenceSource(new FakeReferenceSequenceFile( SamReaderFactory.makeDefault().getFileHeader(BAM_FILE).getSequenceDictionary().getSequences())); tmpCramFile = File.createTempFile(BAM_FILE.getName(), ".cram") ; tmpCramFile.deleteOnExit(); tmpCraiFile = new File (tmpCramFile.getAbsolutePath() + ".crai"); tmpCraiFile.deleteOnExit(); cramBytes = cramFromBAM(BAM_FILE, source); FileOutputStream fos = new FileOutputStream(tmpCramFile); fos.write(cramBytes); fos.close(); FileOutputStream fios = new FileOutputStream(tmpCraiFile); CRAMCRAIIndexer.writeIndex(new SeekableFileStream(tmpCramFile), fios); craiBytes = readFile(tmpCraiFile); }
private void filterReads(final FilteringSamIterator filteringIterator) { // get OUTPUT header from INPUT and overwrite it if necessary final SAMFileHeader fileHeader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).getFileHeader(INPUT); final SAMFileHeader.SortOrder inputSortOrder = fileHeader.getSortOrder(); if (SORT_ORDER != null) { fileHeader.setSortOrder(SORT_ORDER); } if (FILTER == Filter.includePairedIntervals && fileHeader.getSortOrder() != SAMFileHeader.SortOrder.coordinate) { throw new UnsupportedOperationException("Input must be coordinate sorted to use includePairedIntervals"); } final boolean presorted = inputSortOrder.equals(fileHeader.getSortOrder()); log.info("Filtering [presorted=" + presorted + "] " + INPUT.getName() + " -> OUTPUT=" + OUTPUT.getName() + " [sortorder=" + fileHeader.getSortOrder().name() + "]"); // create OUTPUT file final SAMFileWriter outputWriter = new SAMFileWriterFactory().makeSAMOrBAMWriter(fileHeader, presorted, OUTPUT); final ProgressLogger progress = new ProgressLogger(log, (int) 1e6, "Written"); while (filteringIterator.hasNext()) { final SAMRecord rec = filteringIterator.next(); outputWriter.addAlignment(rec); progress.record(rec); } filteringIterator.close(); outputWriter.close(); log.info(new DecimalFormat("#,###").format(progress.getCount()) + " SAMRecords written to " + OUTPUT.getName()); }
/** * Simple implementation of a gather operations that uses SAMFileReaders and Writers in order to concatenate * multiple BAM files. */ private static void gatherNormally(final List<File> inputs, final File output, final boolean createIndex, final boolean createMd5, final File referenceFasta) { final SAMFileHeader header; { header = SamReaderFactory.makeDefault().referenceSequence(referenceFasta).getFileHeader(inputs.get(0)); } final SAMFileWriter out = new SAMFileWriterFactory().setCreateIndex(createIndex).setCreateMd5File(createMd5).makeSAMOrBAMWriter(header, true, output); for (final File f : inputs) { log.info("Gathering " + f.getAbsolutePath()); final SamReader in = SamReaderFactory.makeDefault().referenceSequence(referenceFasta).open(f); for (final SAMRecord rec : in) out.addAlignment(rec); CloserUtil.close(in); } out.close(); }
/** * Simple implementation of a gather operations that uses SAMFileReaders and Writers in order to concatenate * multiple BAM files. */ private static void gatherNormally(final List<File> inputs, final File output, final boolean createIndex, final boolean createMd5, final File referenceFasta) { final SAMFileHeader header; { header = SamReaderFactory.makeDefault().referenceSequence(referenceFasta).getFileHeader(inputs.get(0)); } final SAMFileWriter out = new SAMFileWriterFactory().setCreateIndex(createIndex).setCreateMd5File(createMd5).makeSAMOrBAMWriter(header, true, output); for (final File f : inputs) { log.info("Gathering " + f.getAbsolutePath()); final SamReader in = SamReaderFactory.makeDefault().referenceSequence(referenceFasta).open(f); for (final SAMRecord rec : in) out.addAlignment(rec); CloserUtil.close(in); } out.close(); }
/** * Do the work after command line has been parsed. * RuntimeException may be thrown by this method, and are reported appropriately. * * @return program exit status. */ @Override protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsReadable(HEADER); IOUtil.assertFileIsWritable(OUTPUT); final SAMFileHeader replacementHeader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).getFileHeader(HEADER); if (BamFileIoUtils.isBamFile(INPUT)) { blockCopyReheader(replacementHeader); } else { standardReheader(replacementHeader); } return 0; }
@Override protected int doWork() { final List<String> sampleList = new ArrayList<String>(); for (final File input : INPUT) IOUtil.assertFileIsReadable(input); if (SEQUENCE_DICTIONARY != null) IOUtil.assertFileIsReadable(SEQUENCE_DICTIONARY); SAMSequenceDictionary samSequenceDictionary = null; if (SEQUENCE_DICTIONARY != null) { samSequenceDictionary = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).getFileHeader(SEQUENCE_DICTIONARY).getSequenceDictionary(); CloserUtil.close(SEQUENCE_DICTIONARY); } // Gather up a file reader and file header for each input file. Check for sequence dictionary compatibility along the way. collectFileReadersAndHeaders(sampleList, samSequenceDictionary); // Create the merged output header from the input headers final VCFHeader outputHeader = new VCFHeader(VCFUtils.smartMergeHeaders(inputHeaders, false), sampleList); // Load entries into the sorting collection final SortingCollection<VariantContext> sortedOutput = sortInputs(inputReaders, outputHeader); // Output to the final file writeSortedOutput(outputHeader, sortedOutput); return 0; }
/** * Do the work after command line has been parsed. * RuntimeException may be thrown by this method, and are reported appropriately. * * @return program exit status. */ @Override protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsReadable(HEADER); IOUtil.assertFileIsWritable(OUTPUT); final SAMFileHeader replacementHeader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).getFileHeader(HEADER); if (BamFileIoUtils.isBamFile(INPUT)) { blockCopyReheader(replacementHeader); } else { standardReheader(replacementHeader); } return 0; }
@Override protected int doWork() { final List<String> sampleList = new ArrayList<String>(); for (final File input : INPUT) IOUtil.assertFileIsReadable(input); if (SEQUENCE_DICTIONARY != null) IOUtil.assertFileIsReadable(SEQUENCE_DICTIONARY); SAMSequenceDictionary samSequenceDictionary = null; if (SEQUENCE_DICTIONARY != null) { samSequenceDictionary = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).getFileHeader(SEQUENCE_DICTIONARY).getSequenceDictionary(); CloserUtil.close(SEQUENCE_DICTIONARY); } // Gather up a file reader and file header for each input file. Check for sequence dictionary compatibility along the way. collectFileReadersAndHeaders(sampleList, samSequenceDictionary); // Create the merged output header from the input headers final VCFHeader outputHeader = new VCFHeader(VCFUtils.smartMergeHeaders(inputHeaders, false), sampleList); // Load entries into the sorting collection final SortingCollection<VariantContext> sortedOutput = sortInputs(inputReaders, outputHeader); // Output to the final file writeSortedOutput(outputHeader, sortedOutput); return 0; }
baseCount + " bases."); final SAMFileHeader toHeader = SamReaderFactory.makeDefault().getFileHeader(SEQUENCE_DICTIONARY); liftOver.validateToSequences(toHeader.getSequenceDictionary()); final IntervalList toIntervals = new IntervalList(toHeader);
baseCount + " bases."); final SAMFileHeader toHeader = SamReaderFactory.makeDefault().getFileHeader(SEQUENCE_DICTIONARY); liftOver.validateToSequences(toHeader.getSequenceDictionary()); final IntervalList toIntervals = new IntervalList(toHeader);
? SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).getFileHeader(SEQUENCE_DICTIONARY).getSequenceDictionary() : fileHeader.getSequenceDictionary(); if (CREATE_INDEX && sequenceDictionary == null) {
? SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).getFileHeader(SEQUENCE_DICTIONARY).getSequenceDictionary() : fileHeader.getSequenceDictionary(); if (CREATE_INDEX && sequenceDictionary == null) {