private static String formatArguments( Collection<Pair<ArgumentDefinition,String>> invalidArgumentValues ) { StringBuilder sb = new StringBuilder(); for( Pair<ArgumentDefinition,String> invalidValue: invalidArgumentValues ) { if(invalidValue.getSecond() == null) sb.append( String.format("%nArgument '--%s' requires a value but none was provided", invalidValue.first.fullName) ); else sb.append( String.format("%nArgument '--%s' has value of incorrect format: %s (should match %s)", invalidValue.first.fullName, invalidValue.second, invalidValue.first.validation) ); } return sb.toString(); } }
@Override public void onTraversalDone(Pair<Long, Long> result) { out.println(String.format("\tReads ending in indels : %d\n\tReads ending in soft-clips: %d\n", result.getFirst(), result.getSecond())); } }
/** * Checks whether there is a non-empty right flanking non-variant trimmed out region. * @return {@code true} if there is a non-trivial right flank region, {@code false} otherwise. */ public boolean hasRightFlankingRegion() { return ! nonVariantFlanks.getSecond().isUnmapped(); }
/** * Returns the trimmed out right non-variant region. */ public ActiveRegion nonVariantRightFlankRegion() { if (rightFlankRegion == null && ! nonVariantFlanks.getSecond().isUnmapped()) rightFlankRegion = originalRegion.trim(nonVariantFlanks.getSecond(),originalRegion.getExtension()); else if (nonVariantFlanks.getSecond().isUnmapped()) throw new IllegalStateException("there is no right flank non-variant trimmed out region"); return rightFlankRegion; }
/** * General interval reduce routine called after all of the traversals are done * @param results interval reduce results */ public void onTraversalDone(List<Pair<GenomeLoc, ReduceType>> results) { for ( Pair<GenomeLoc, ReduceType> result : results ) { logger.info(String.format("[INTERVAL REDUCE RESULT] at %s ", result.getFirst())); this.onTraversalDone(result.getSecond()); } }
public void onTraversalDone(List<Pair<GenomeLoc, Long>> results) { for (Pair<GenomeLoc, Long> result : results ) { GenomeLoc loc = result.getFirst(); Long gcCount = result.getSecond(); double gcContent = (double) gcCount / loc.size(); out.println(loc + "\t" + gcContent); } } }
public ConcordanceMetrics reduce(List<Pair<VariantContext,VariantContext>> evalCompList, ConcordanceMetrics metrics) { for ( Pair<VariantContext,VariantContext> evalComp : evalCompList){ metrics.update(evalComp.getFirst(),evalComp.getSecond()); } return metrics; }
/** * Gets or constructs a event-block through the cache. * @param borders the source and sink vertex pair for the requested event block. * @return never {@code null} */ @Requires("borders != null && border.getFirst() != null && border.getSecond() != null") private EventBlock resolveEventBlock(final Pair<MultiDeBruijnVertex,MultiDeBruijnVertex> borders) { EventBlock result = eventBlockCache.get(borders); if (result == null) eventBlockCache.put(borders,result = new EventBlock(graph, borders.getFirst(),borders.getSecond())); return result; }
public static String getBasesFromRUandNR(final String covariateValue) { Pair<String,Integer> pair = getRUandNRfromCovariate(covariateValue); return getBasesFromRUandNR(pair.getFirst(), pair.getSecond()); }
public Pair<ExpandingArrayList<Long>, Long> reduce(Datum point, Pair<ExpandingArrayList<Long>, Long> sum) { ExpandingArrayList<Long> counts = sum.getFirst(); updateCounts(counts, point.nRodsAtThisLocation, 1); updateCounts(counts, 0, point.nSkippedBases); Pair<ExpandingArrayList<Long>, Long> r = new Pair<ExpandingArrayList<Long>, Long>(counts, point.nTotalBases + sum.getSecond()); //System.out.printf("Reduce: %s %s => %s%n", point, sum, r); return r; } }
@Test(dataProvider = "PrefixSuffixData") public void testPrefixSuffixVertices(final List<String> strings, int expectedPrefixLen, int expectedSuffixLen) { final List<SeqVertex> v = new ArrayList<>(); for ( final String s : strings ) { v.add(new SeqVertex(s)); } final String expectedPrefix = strings.get(0).substring(0, expectedPrefixLen); final String expectedSuffix = strings.get(0).substring(strings.get(0).length() - expectedSuffixLen); final Pair<SeqVertex, SeqVertex> result = SharedVertexSequenceSplitter.commonPrefixAndSuffixOfVertices(v); Assert.assertEquals(result.getFirst().getSequenceString(), expectedPrefix, "Failed suffix test"); Assert.assertEquals(result.getSecond().getSequenceString(), expectedSuffix, "Failed suffix test"); Assert.assertEquals(result.getFirst().isEmpty(), expectedPrefix.isEmpty()); Assert.assertEquals(result.getSecond().isEmpty(), expectedSuffix.isEmpty()); }
@Test(dataProvider = "StratificationStatesTestProvider") public void testStratifierByKey(StratificationStatesTestProvider cfg) { final StratificationManager<IntegerStratifier, Integer> manager = createManager(cfg); for ( int key = 0; key < cfg.nStates; key++ ) { List<Pair<IntegerStratifier, Object>> stratsAndStates = manager.getStratsAndStatesForKey(key); final List<Object> strats = manager.getStatesForKey(key); Assert.assertEquals((int)manager.get(strats), key, "Key -> strats -> key failed to return same key"); for ( int i = 0; i < strats.size(); i++ ) { Assert.assertEquals(stratsAndStates.get(i).getSecond(), strats.get(i), "Strats and StratsAndStates differ"); } } } }
private boolean containsDuplicateRecord( final File vcf, final GenomeLocParser parser ) { final List<Pair<GenomeLoc, HaplotypeCallerGenotypingEngine.Event>> VCs = new ArrayList<>(); try { for( final VariantContext vc : GATKVCFUtils.readVCF(vcf).getSecond() ) { VCs.add(new Pair<>(parser.createGenomeLoc(vc), new HaplotypeCallerGenotypingEngine.Event(vc))); } } catch( IOException e ) { throw new IllegalStateException("Somehow the temporary VCF from the integration test could not be read."); } final Set<Pair<GenomeLoc, HaplotypeCallerGenotypingEngine.Event>> VCsAsSet = new HashSet<>(VCs); return VCsAsSet.size() != VCs.size(); // The se will remove duplicate Events. }
@Test public void testTwoSpansManyBlocksInOne() throws Exception { final String cmd = baseTestString(" -L 1:69512-69634"); final WalkerTestSpec spec = new WalkerTestSpec(cmd, 1, Arrays.asList("")); spec.disableShadowBCF(); final File gVCF = executeTest("testTwoSpansManyBlocksInOne", spec).first.get(0); final List<VariantContext> allVCs = GATKVCFUtils.readVCF(gVCF).getSecond(); Assert.assertEquals(allVCs.size(), 5); }
@Test public void testOneHasAltAndTwoHasNothing() throws Exception { final String cmd = baseTestString(" -L 1:69511"); final WalkerTestSpec spec = new WalkerTestSpec(cmd, 1, Arrays.asList("")); spec.disableShadowBCF(); final File gVCF = executeTest("testOneHasAltAndTwoHasNothing", spec).first.get(0); final List<VariantContext> allVCs = GATKVCFUtils.readVCF(gVCF).getSecond(); Assert.assertEquals(allVCs.size(), 1); final VariantContext first = allVCs.get(0); Assert.assertEquals(first.getStart(), 69511); Assert.assertEquals(first.getEnd(), 69511); Assert.assertEquals(first.getGenotypes().size(), 2); }
private void verifySampleRenaming( final File outputVCF, final String newSampleName ) throws IOException { final Pair<VCFHeader, VCIterable<LineIterator>> headerAndVCIter = VCIterable.readAllVCs(outputVCF, new VCFCodec()); final VCFHeader header = headerAndVCIter.getFirst(); final VCIterable<LineIterator> iter = headerAndVCIter.getSecond(); // Verify that sample renaming occurred at both the header and record levels (checking only the first 10 records): Assert.assertEquals(header.getGenotypeSamples().size(), 1, "Wrong number of samples in output vcf header"); Assert.assertEquals(header.getGenotypeSamples().get(0), newSampleName, "Wrong sample name in output vcf header"); int recordCount = 0; while ( iter.hasNext() && recordCount < 10 ) { final VariantContext vcfRecord = iter.next(); Assert.assertEquals(vcfRecord.getSampleNames().size(), 1, "Wrong number of samples in output vcf record"); Assert.assertEquals(vcfRecord.getSampleNames().iterator().next(), newSampleName, "Wrong sample name in output vcf record"); recordCount++; } }
@Test public void testOneHasAltAndTwoHasRefBlock() throws Exception { final String cmd = baseTestString(" -L 1:69635"); final WalkerTestSpec spec = new WalkerTestSpec(cmd, 1, Arrays.asList("")); spec.disableShadowBCF(); final File gVCF = executeTest("testOneHasAltAndTwoHasRefBlock", spec).first.get(0); final List<VariantContext> allVCs = GATKVCFUtils.readVCF(gVCF).getSecond(); Assert.assertEquals(allVCs.size(), 1); final VariantContext first = allVCs.get(0); Assert.assertEquals(first.getStart(), 69635); Assert.assertEquals(first.getEnd(), 69635); Assert.assertEquals(first.getNAlleles(), 3); Assert.assertEquals(first.getGenotypes().size(), 2); }
public static void assertVCFandBCFFilesAreTheSame(final File vcfFile, final File bcfFile) throws IOException { final Pair<VCFHeader, VCIterable<LineIterator>> vcfData = VCIterable.readAllVCs(vcfFile, new VCFCodec()); final Pair<VCFHeader, VCIterable<PositionalBufferedStream>> bcfData = VCIterable.readAllVCs(bcfFile, new BCF2Codec()); assertVCFHeadersAreEqual(bcfData.getFirst(), vcfData.getFirst()); assertVariantContextStreamsAreEqual(bcfData.getSecond(), vcfData.getSecond()); }