try { for ( String fileName : new XReadLines(new File(inputFileName), true, LIST_FILE_COMMENT_START) ) { unpackedReads.add(new SAMReaderID(fileName,parser.getTags(inputFileName))); unpackedReads.add(new SAMReaderID(inputFileName,inputFileNameTags)); unpackedReads.add(new SAMReaderID(inputFileName,inputFileNameTags)); unpackedReads.add(new SAMReaderID(inputFileName,inputFileNameTags));
@Test(expectedExceptions=UserException.class) public void testDuplicateSamFileHandlingMultipleDuplicates() throws Exception { GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine(); Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>(); samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags())); samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleNORG.bam"), new Tags())); samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags())); samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleNORG.bam"), new Tags())); testEngine.setSAMFileIDs(samFiles); testEngine.checkForDuplicateSamFiles(); }
@Test public void testSAMReaderIDHashingAndEquality() { // Test to make sure that two SAMReaderIDs that point at the same file via an absolute vs. relative // path are equal according to equals() and have the same hash code final File relativePathToBAMFile = new File(publicTestDir + "exampleBAM.bam"); final File absolutePathToBAMFile = new File(relativePathToBAMFile.getAbsolutePath()); final SAMReaderID relativePathSAMReaderID = new SAMReaderID(relativePathToBAMFile, new Tags()); final SAMReaderID absolutePathSAMReaderID = new SAMReaderID(absolutePathToBAMFile, new Tags()); Assert.assertEquals(relativePathSAMReaderID, absolutePathSAMReaderID, "Absolute-path and relative-path SAMReaderIDs not equal according to equals()"); Assert.assertEquals(relativePathSAMReaderID.hashCode(), absolutePathSAMReaderID.hashCode(), "Absolute-path and relative-path SAMReaderIDs have different hash codes"); } }
@Test(expectedExceptions=UserException.class) public void testDuplicateSamFileHandlingSingleDuplicate() throws Exception { GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine(); Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>(); samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags())); samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags())); testEngine.setSAMFileIDs(samFiles); testEngine.checkForDuplicateSamFiles(); }
@Test public void testCommentSupportInBAMListFiles() throws Exception { File tempListFile = createTempListFile("testCommentSupport", "#", publicTestDir + "exampleBAM.bam", "#" + publicTestDir + "foo.bam", " # " + publicTestDir + "bar.bam" ); List<SAMReaderID> expectedBAMFileListAfterUnpacking = new ArrayList<SAMReaderID>(); expectedBAMFileListAfterUnpacking.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags())); performBAMListFileUnpackingTest(tempListFile, expectedBAMFileListAfterUnpacking); }
@Test public void testIgnoreBlankLinesInBAMListFiles() throws Exception { File tempListFile = createTempListFile("testIgnoreBlankLines", "", publicTestDir + "exampleBAM.bam", " " ); List<SAMReaderID> expectedBAMFileListAfterUnpacking = new ArrayList<SAMReaderID>(); expectedBAMFileListAfterUnpacking.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags())); performBAMListFileUnpackingTest(tempListFile, expectedBAMFileListAfterUnpacking); }
@Test(expectedExceptions=UserException.class) public void testDuplicateSamFileHandlingAbsoluteVsRelativePath() { GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine(); final File relativePathToBAMFile = new File(publicTestDir + "exampleBAM.bam"); final File absolutePathToBAMFile = new File(relativePathToBAMFile.getAbsolutePath()); Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>(); samFiles.add(new SAMReaderID(relativePathToBAMFile, new Tags())); samFiles.add(new SAMReaderID(absolutePathToBAMFile, new Tags())); testEngine.setSAMFileIDs(samFiles); testEngine.checkForDuplicateSamFiles(); }
@Test(expectedExceptions = UserException.class) public void testFailOnReducedReads() { readers.add(new SAMReaderID(new File(privateTestDir + "old.reduced.bam"), new Tags())); SAMDataSource data = new SAMDataSource( referenceFile, readers, new ThreadAllocation(), null, genomeLocParser, false, ValidationStringency.SILENT, null, null, new ValidationExclusion(), new ArrayList<ReadFilter>(), false); }
@Test(expectedExceptions = UserException.class) public void testFailOnReducedReadsRemovingProgramRecords() { readers.add(new SAMReaderID(new File(privateTestDir + "old.reduced.bam"), new Tags())); SAMDataSource data = new SAMDataSource( referenceFile, readers, new ThreadAllocation(), null, genomeLocParser, false, ValidationStringency.SILENT, null, null, new ValidationExclusion(), new ArrayList<ReadFilter>(), Collections.<ReadTransformer>emptyList(), false, (byte) -1, true, false, null, IntervalMergingRule.ALL); } }
public void timeWalkerPerformance(final int reps) { for(int i = 0; i < reps; i++) { GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); // Establish the argument collection GATKArgumentCollection argCollection = new GATKArgumentCollection(); argCollection.referenceFile = new File(referenceFile); argCollection.samFiles = Collections.singletonList(inputFile.getAbsolutePath()); engine.setArguments(argCollection); // Bugs in the engine mean that this has to be set twice. engine.setSAMFileIDs(Collections.singletonList(new SAMReaderID(inputFile,new Tags()))); engine.setFilters(Collections.<ReadFilter>singletonList(new UnmappedReadFilter())); engine.setReferenceMetaDataFiles(Collections.<RMDTriplet>emptyList()); // Create the walker engine.setWalker(walkerType.create()); engine.execute(); } }
@Test public void testCountsFromReadTraversal() { final GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); engine.setGenomeLocParser(genomeLocParser); final Collection<SAMReaderID> samFiles = new ArrayList<>(); final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); samFiles.add(readerID); final SAMDataSource dataSource = new SAMDataSource(referenceFile, samFiles, new ThreadAllocation(), null, genomeLocParser, false, ValidationStringency.STRICT, null, null, new ValidationExclusion(), new ArrayList<ReadFilter>(), new ArrayList<ReadTransformer>(), false, (byte)30, false, true, null, IntervalMergingRule.ALL); engine.setReadsDataSource(dataSource); final TraverseReadsNano traverseReadsNano = new TraverseReadsNano(1); final DummyReadWalker walker = new DummyReadWalker(); traverseReadsNano.initialize(engine, walker, null); for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new ReadShardBalancer()) ) { final ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard, engine.getGenomeLocParser(), dataSource.seek(shard), reference, new ArrayList<ReferenceOrderedDataSource>()); traverseReadsNano.traverse(walker, dataProvider, 0); dataProvider.close(); } Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig); Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig); }
readers.add(new SAMReaderID(new File(validationDataLocation+"/NA12878.chrom6.SLX.SRP000032.2009_06.selected.bam"),new Tags()));
readers.add(new SAMReaderID(new File(b37GoodBAM),new Tags()));
@Test public void testFilteredCounts() { final GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); engine.setGenomeLocParser(genomeLocParser); final Collection<SAMReaderID> samFiles = new ArrayList<>(); final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); samFiles.add(readerID); final List<ReadFilter> filters = new ArrayList<>(); filters.add(new EveryTenthReadFilter()); final SAMDataSource dataSource = new SAMDataSource(referenceFile, samFiles, new ThreadAllocation(), null, genomeLocParser, false, ValidationStringency.STRICT, null, null, new ValidationExclusion(), filters, new ArrayList<ReadTransformer>(), false, (byte)30, false, true, null, IntervalMergingRule.ALL); engine.setReadsDataSource(dataSource); final TraverseReadsNano traverseReadsNano = new TraverseReadsNano(1); final DummyReadWalker walker = new DummyReadWalker(); traverseReadsNano.initialize(engine, walker, null); for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new ReadShardBalancer()) ) { final ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard, engine.getGenomeLocParser(), dataSource.seek(shard), reference, new ArrayList<ReferenceOrderedDataSource>()); traverseReadsNano.traverse(walker, dataProvider, 0); dataProvider.close(); } Assert.assertEquals((long)engine.getCumulativeMetrics().getCountsByFilter().get(EveryTenthReadFilter.class.getSimpleName()), contigs.size() * numReadsPerContig / 10); }
private List<LocusShardDataProvider> createDataProviders(TraverseActiveRegions traverseActiveRegions, final Walker walker, List<GenomeLoc> intervals, File bamFile) { GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); engine.setGenomeLocParser(genomeLocParser); Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>(); SAMReaderID readerID = new SAMReaderID(bamFile, new Tags()); samFiles.add(readerID); SAMDataSource dataSource = new SAMDataSource(referenceFile, samFiles, new ThreadAllocation(), null, genomeLocParser, false, ValidationStringency.STRICT, null, null, new ValidationExclusion(), new ArrayList<ReadFilter>(), new ArrayList<ReadTransformer>(), false, (byte)30, false, true, null, IntervalMergingRule.ALL); engine.setReadsDataSource(dataSource); final Set<String> samples = ReadUtils.getSAMFileSamples(dataSource.getHeader()); traverseActiveRegions.initialize(engine, walker); List<LocusShardDataProvider> providers = new ArrayList<LocusShardDataProvider>(); for (Shard shard : dataSource.createShardIteratorOverIntervals(new GenomeLocSortedSet(genomeLocParser, intervals), new ActiveRegionShardBalancer())) { for (WindowMaker.WindowMakerIterator window : new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples)) { providers.add(new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList<ReferenceOrderedDataSource>())); } } return providers; }
final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); samFiles.add(readerID);
private void createTestBAM() { header = ArtificialSAMUtils.createArtificialSamHeader(numContigs, 1, 100000); SAMReadGroupRecord readGroup = new SAMReadGroupRecord("foo"); readGroup.setSample("testSample"); header.addReadGroup(readGroup); ArtificialSingleSampleReadStream artificialReads = new ArtificialSingleSampleReadStream(header, "foo", numContigs, numStacksPerContig, stackSize, stackSize, 1, 100, 50, 150, numUnmappedReads); final File testBAMFile = createTempFile("SAMDataSourceFillShardBoundaryTest", ".bam"); SAMFileWriter bamWriter = new SAMFileWriterFactory().setCreateIndex(true).makeBAMWriter(header, true, testBAMFile); for ( SAMRecord read : artificialReads ) { bamWriter.addAlignment(read); } bamWriter.close(); testBAM = new SAMReaderID(testBAMFile, new Tags()); new File(testBAM.getSamFilePath().replace(".bam", ".bai")).deleteOnExit(); new File(testBAM.getSamFilePath() + ".bai").deleteOnExit(); } }
final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); samFiles.add(readerID);