/** * Adds a Collection of intervals to the list of intervals. */ public void addall(final Collection<Interval> intervals) { //use this instead of addAll so that the contig checking happens. for (Interval interval : intervals) { add(interval); } }
/** * Adds a Collection of intervals to the list of intervals. */ public void addall(final Collection<Interval> intervals) { //use this instead of addAll so that the contig checking happens. for (Interval interval : intervals) { add(interval); } }
/** * Creates an IntervalList from the given sequence name * * @param header header to use to create IntervalList * @param sequenceName name of sequence in header * @return a new intervalList with given header that contains the reference name */ public static IntervalList fromName(final SAMFileHeader header, final String sequenceName) { final IntervalList ref = new IntervalList(header); ref.add(new Interval(sequenceName, 1, header.getSequence(sequenceName).getSequenceLength())); return ref; }
/** * Creates an IntervalList from the given sequence name * * @param header header to use to create IntervalList * @param sequenceName name of sequence in header * @return a new intervalList with given header that contains the reference name */ public static IntervalList fromName(final SAMFileHeader header, final String sequenceName) { final IntervalList ref = new IntervalList(header); ref.add(new Interval(sequenceName, 1, header.getSequence(sequenceName).getSequenceLength())); return ref; }
@Test(expectedExceptions = IllegalArgumentException.class) public void TestFailAdd() { IntervalList test = new IntervalList(this.fileHeader); test.add(new Interval("blarg", 1, 1)); }
/** * Adds a HaplotypeBlock to the map and updates all the relevant caches/indices. */ public void addHaplotype(final HaplotypeBlock haplotypeBlock) { this.haplotypeBlocks.add(haplotypeBlock); for (final Snp snp : haplotypeBlock.getSnps()) { if (haplotypesBySnp.containsKey(snp)) { throw new IllegalStateException("Same snp name cannot be used twice" + snp.toString()); } this.haplotypesBySnp.put(snp, haplotypeBlock); this.haplotypesBySnpName.put(snp.getName(), haplotypeBlock); this.haplotypesBySnpLocus.put(toKey(snp.getChrom(), snp.getPos()), haplotypeBlock); this.snpsByPosition.put(toKey(snp.getChrom(), snp.getPos()), snp); this.intervals.add(new Interval(snp.getChrom(), snp.getPos(), snp.getPos(), false, snp.getName())); } }
@Test public void uniqueIntervalsWithoutNames() { final IntervalList test = new IntervalList(this.fileHeader); test.add(new Interval("1", 100, 200)); test.add(new Interval("1", 500, 600)); test.add(new Interval("1", 550, 700)); for (final boolean concat : new boolean[]{true, false}) { final IntervalList unique = test.uniqued(concat); Assert.assertEquals(unique.size(), 2); } }
/** * Adds a HaplotypeBlock to the map and updates all the relevant caches/indices. */ public void addHaplotype(final HaplotypeBlock haplotypeBlock) { this.haplotypeBlocks.add(haplotypeBlock); for (final Snp snp : haplotypeBlock.getSnps()) { if (haplotypesBySnp.containsKey(snp)) { throw new IllegalStateException("Same snp name cannot be used twice" + snp.toString()); } this.haplotypesBySnp.put(snp, haplotypeBlock); this.haplotypesBySnpName.put(snp.getName(), haplotypeBlock); this.haplotypesBySnpLocus.put(toKey(snp.getChrom(), snp.getPos()), haplotypeBlock); this.snpsByPosition.put(toKey(snp.getChrom(), snp.getPos()), snp); this.intervals.add(new Interval(snp.getChrom(), snp.getPos(), snp.getPos(), false, snp.getName())); } }
@DataProvider(name = "subtractData") public Object[][] subtractData() { final IntervalList subtract12_from_3 = new IntervalList(fileHeader); subtract12_from_3.add(new Interval("1", 201, 201)); subtract12_from_3.add(new Interval("2", 401, 600)); subtract12_from_3.add(new Interval("3", 50, 470)); return new Object[][]{ new Object[]{CollectionUtil.makeList(list3), CollectionUtil.makeList(list1, list2), subtract12_from_3}, }; }
@Test public void testSimpleOverlap() { final IntervalList intervalList = new IntervalList(header); intervalList.add(new Interval("2", 167166899, 167166899)); final VCFFileReader reader = getReader(CEU_TRIOS_SNPS_VCF); final Iterator<VariantContext> iterator = new ByIntervalListVariantContextIterator(reader, intervalList); Assert.assertTrue(iterator.hasNext()); final VariantContext ctx = iterator.next(); Assert.assertEquals(ctx.getStart(), 167166899); Assert.assertFalse(iterator.hasNext()); reader.close(); }
@Test public void testSimpleEnclosing() { final IntervalList intervalList = new IntervalList(header); intervalList.add(new Interval("12", 68921962, 68921962)); // deletion spans this final VCFFileReader reader = getReader(CEU_TRIOS_INDELS_VCF); final Iterator<VariantContext> iterator = new ByIntervalListVariantContextIterator(reader, intervalList); Assert.assertTrue(iterator.hasNext()); final VariantContext ctx = iterator.next(); Assert.assertEquals(ctx.getStart(), 68921960); Assert.assertEquals(ctx.getEnd(), 68921966); Assert.assertFalse(iterator.hasNext()); reader.close(); }
@Test public void testVariantOverlappingMultipleIntervalsIsReturnedOnlyOnce() { final IntervalList intervalList = new IntervalList(header); intervalList.add(new Interval("12", 68921962, 68921962)); // deletion spans this intervalList.add(new Interval("12", 68921964, 68921964)); // deletion spans this final VCFFileReader reader = getReader(CEU_TRIOS_INDELS_VCF); final Iterator<VariantContext> iterator = new ByIntervalListVariantContextIterator(reader, intervalList); Assert.assertTrue(iterator.hasNext()); final VariantContext ctx = iterator.next(); Assert.assertEquals(ctx.getStart(), 68921960); Assert.assertEquals(ctx.getEnd(), 68921966); Assert.assertFalse(iterator.hasNext()); reader.close(); } }
@DataProvider(name = "overlapsData") public Object[][] overlapData() { final IntervalList three_overlaps_one_and_two = new IntervalList(fileHeader); three_overlaps_one_and_two.add(new Interval("1", 25, 400)); three_overlaps_one_and_two.add(new Interval("2", 200, 600)); //three_overlaps_one_and_two.add(new Interval("3", 50, 470)); return new Object[][]{ new Object[]{CollectionUtil.makeList(list3), CollectionUtil.makeList(list1, list2), three_overlaps_one_and_two}, }; }
@Test public void testNoOverlapDifferentContig() { final IntervalList intervalList = new IntervalList(header); intervalList.add(new Interval("3", 167166899, 167166899)); final VCFFileReader reader = getReader(CEU_TRIOS_SNPS_VCF); final Iterator<VariantContext> iterator = new ByIntervalListVariantContextIterator(reader, intervalList); Assert.assertFalse(iterator.hasNext()); reader.close(); }
@Test public void testNoVariants() { final IntervalList intervalList = new IntervalList(header); intervalList.add(new Interval(this.dict.getSequence(0).getSequenceName(), 1, 100)); final VCFFileReader reader = getReader(EMPTY_VCF); final Iterator<VariantContext> iterator = new ByIntervalListVariantContextIterator(reader, intervalList); Assert.assertFalse(iterator.hasNext()); reader.close(); }
/** Gets the intervals over which we will calculate metrics. */ protected IntervalList getIntervalsToExamine() { final IntervalList intervals; if (INTERVALS != null) { IOUtil.assertFileIsReadable(INTERVALS); intervals = IntervalList.fromFile(INTERVALS); } else { intervals = new IntervalList(this.header); for (final SAMSequenceRecord rec : this.header.getSequenceDictionary().getSequences()) { final Interval interval = new Interval(rec.getSequenceName(), 1, rec.getSequenceLength()); intervals.add(interval); } } return intervals; }
@Test(dataProvider="invalidIntervalTestData", expectedExceptions=UserException.class, enabled = true) public void testInvalidPicardIntervalHandling(GenomeLocParser genomeLocParser, String contig, int intervalStart, int intervalEnd ) throws Exception { SAMFileHeader picardFileHeader = new SAMFileHeader(); picardFileHeader.addSequence(genomeLocParser.getContigInfo(contig)); IntervalList picardIntervals = new IntervalList(picardFileHeader); picardIntervals.add(new Interval(contig, intervalStart, intervalEnd, true, "dummyname")); File picardIntervalFile = createTempFile("testInvalidPicardIntervalHandling", ".intervals"); picardIntervals.write(picardIntervalFile); List<IntervalBinding<Feature>> intervalArgs = new ArrayList<IntervalBinding<Feature>>(1); intervalArgs.add(new IntervalBinding<Feature>(picardIntervalFile.getAbsolutePath())); IntervalUtils.loadIntervals(intervalArgs, IntervalSetRule.UNION, IntervalMergingRule.ALL, 0, genomeLocParser); }
private IntervalList buildIntervalList(final int start, final int end) { final SAMFileHeader header = new SAMFileHeader(); header.addSequence(new SAMSequenceRecord("CONTIG", 100000000)); final IntervalList intervals = new IntervalList(header); if (0 < start) intervals.add(new Interval("CONTIG", start, end)); return intervals; }
@Test(dataProvider = "invertData") public void testInvertSquared(final IntervalList list, @SuppressWarnings("UnusedParameters") final IntervalList ignored) throws Exception { final IntervalList inverseSquared = IntervalList.invert(IntervalList.invert(list)); final IntervalList originalClone = new IntervalList(list.getHeader()); for (final Interval interval : list) { originalClone.add(interval); } Assert.assertEquals( CollectionUtil.makeCollection(inverseSquared.iterator()), CollectionUtil.makeCollection(originalClone.uniqued().iterator())); }
private static IntervalList composeIntervalList(final IntervalList source, final String chromosome, final int... segmentsByPair) { final IntervalList intervals = new IntervalList(source.getHeader()); for (int i = 0; i < segmentsByPair.length; i += 2) { final Interval parentInterval = lookupIntervalContainingLocus(source, chromosome, segmentsByPair[i]); intervals.add(new Interval(chromosome, segmentsByPair[i], segmentsByPair[i + 1], parentInterval.isNegativeStrand(), parentInterval.getName())); } return intervals; }