public Pair<Long, Long> reduceInit() { return new Pair<Long, Long>(0L, 0L); }
@Override public Set<Pair<? extends KBestSubHaplotypeFinder, String>> subFinderLabels() { final int subFinderCount = subFinders.size(); final String edgeCost = String.format("%.2f",-Math.log10((double) subFinderCount)); final Set<Pair<? extends KBestSubHaplotypeFinder,String>> result = new LinkedHashSet<>(subFinderCount); for (final KBestSubHaplotypeFinder subFinder : subFinders) result.add(new Pair<>(subFinder,edgeCost)); return result; }
public List<Pair<K, Object>> getStratsAndStatesForKey(final int key) { final List<Pair<K, Object>> states = new ArrayList<Pair<K, Object>>(stratifiers.size()); for ( int i = 0; i < stratifiers.size(); i++ ) { final K strat = stratifiers.get(i); final Object stratValue = stratifierValuesByKey.get(key).get(i); states.add(new Pair<K, Object>(strat, stratValue)); } return states; }
/** * Creates a result indicating that there was no trimming to be done. */ protected static Result noTrimming(final boolean emitReferenceConfidence, final ActiveRegion targetRegion, final int padding, final int usableExtension,final List<VariantContext> events) { final GenomeLoc targetRegionLoc = targetRegion.getLocation(); final Result result = new Result(emitReferenceConfidence,false,targetRegion,padding,usableExtension,events,new Pair<>(GenomeLoc.UNMAPPED,GenomeLoc.UNMAPPED), targetRegionLoc,targetRegionLoc,targetRegionLoc,targetRegionLoc); result.callableRegion = targetRegion; return result; }
static Pair<String,Integer> getArg(String prefix, String argv[], int i) { String arg = null; if ( argv[i].startsWith(prefix) ) { arg = argv[i].substring(prefix.length()); if( arg.length() == 0 ) { i++; if ( i < argv.length ) arg = argv[i]; else { System.err.println("No value found after " + prefix + " argument tag"); System.exit(1); } } i++; } return new Pair<String,Integer>(arg,i); }
@Override public Set<Pair<? extends KBestSubHaplotypeFinder, String>> subFinderLabels() { final Set<Pair<? extends KBestSubHaplotypeFinder,String>> result = new LinkedHashSet<>(subFinders.size()); for (final EdgeSubHaplotypeFinder subFinder : subFinders) result.add(new Pair<>(subFinder,simplifyZeros(String.format("%.4f", subFinder.edgeScore)))); return result; }
/** * Creates a result indicating that no variation was found. */ protected static Result noVariation(final boolean emitReferenceConfidence, final ActiveRegion targetRegion, final int padding, final int usableExtension) { final Result result = new Result(emitReferenceConfidence,false,targetRegion,padding,usableExtension, Collections.<VariantContext>emptyList(),new Pair<>(targetRegion.getLocation(),GenomeLoc.UNMAPPED), GenomeLoc.UNMAPPED,GenomeLoc.UNMAPPED,GenomeLoc.UNMAPPED,GenomeLoc.UNMAPPED); result.leftFlankRegion = targetRegion; return result; } }
private static List<Pair<MultiDeBruijnVertex, MultiDeBruijnVertex>> equals$matchVertexBySequenceMaps( final Map<String,MultiDeBruijnVertex> left, final Map<String, MultiDeBruijnVertex> right, final Collection<MultiDeBruijnVertex> unmatchedLeft, final Collection<MultiDeBruijnVertex> unmatchedRight) { final List<Pair<MultiDeBruijnVertex,MultiDeBruijnVertex>> result = new LinkedList<>(); for (final Map.Entry<String,MultiDeBruijnVertex> leftEntry : left.entrySet()) if (right.containsKey(leftEntry.getKey())) result.add(new Pair<>(leftEntry.getValue(),right.get(leftEntry.getKey()))); else unmatchedLeft.add(leftEntry.getValue()); for (final Map.Entry<String,MultiDeBruijnVertex> rightEntry : right.entrySet()) if (!left.containsKey(rightEntry.getKey())) unmatchedRight.add(rightEntry.getValue()); return result; }
public void addEntry(GATKFeature data) { nameMap.put(data.getName(),data); classMap.add(new Pair<Class, GATKFeature>(data.getClass(),data)); }
private static Pair<Integer, Long> getFixedSplit(List<GenomeLoc> locs, long locsSize, int startIndex, int stopIndex, int minLocs, int maxLocs) { int splitIndex = startIndex; long splitSize = 0; for (int i = 0; i < minLocs; i++) { splitSize += locs.get(splitIndex).size(); splitIndex++; } long halfSize = locsSize / 2; while (splitIndex < (stopIndex - maxLocs) && splitSize < halfSize) { splitSize += locs.get(splitIndex).size(); splitIndex++; } return new Pair<Integer, Long>(splitIndex, splitSize); }
/** * Convenence function that takes a read and the start / stop clipping positions based on the forward * strand, and returns start/stop values appropriate for the strand of the read. * * @param read * @param start * @param stop * @return */ private Pair<Integer, Integer> strandAwarePositions(GATKSAMRecord read, int start, int stop) { if (read.getReadNegativeStrandFlag()) return new Pair<Integer, Integer>(read.getReadLength() - stop - 1, read.getReadLength() - start - 1); else return new Pair<Integer, Integer>(start, stop); }
public Pair<GenomeLoc, String> map(RefMetaDataTracker rodData, ReferenceContext ref, AlignmentContext context) { return new Pair<GenomeLoc, String>(context.getLocation(), String.valueOf((char)ref.getBase())); }
public Pair<ExpandingArrayList<Long>, Long> reduceInit() { return new Pair<ExpandingArrayList<Long>, Long>(new ExpandingArrayList<Long>(), 0l); }
/** * Mask a SNP (inserting N's in the sequence) * * @param tracker the Reference Metadata available at a particular site in the genome * @param context the locus context data * @return mask at the locus or null if no SNP at that locus */ private Pair<GenomeLoc, String> maskSnp(final RefMetaDataTracker tracker, final AlignmentContext context){ for (final VariantContext vc : tracker.getValues(snpmask)) { if (vc.isSNP()) { return new Pair<>(context.getLocation(), "N"); } } return null; }
public static Pair<GenomeLocSortedSet, GenomeLocSortedSet> parseIntervalBindingsPair( final ReferenceSequenceFile referenceSequenceFile, final List<IntervalBinding<Feature>> intervals, final IntervalSetRule intervalSetRule, final IntervalMergingRule intervalMergingRule, final int intervalPadding, final List<IntervalBinding<Feature>> excludeIntervals) { GenomeLocParser genomeLocParser = new GenomeLocParser(referenceSequenceFile); // if include argument isn't given, create new set of all possible intervals GenomeLocSortedSet includeSortedSet = ((intervals == null || intervals.size() == 0) ? GenomeLocSortedSet.createSetFromSequenceDictionary(referenceSequenceFile.getSequenceDictionary()) : loadIntervals(intervals, intervalSetRule, intervalMergingRule, intervalPadding, genomeLocParser)); GenomeLocSortedSet excludeSortedSet = null; if (excludeIntervals != null && excludeIntervals.size() > 0) { excludeSortedSet = loadIntervals(excludeIntervals, IntervalSetRule.UNION, intervalMergingRule, intervalPadding, genomeLocParser); } return new Pair<GenomeLocSortedSet, GenomeLocSortedSet>(includeSortedSet, excludeSortedSet); }
public Pair<ExpandingArrayList<Long>, Long> reduce(Datum point, Pair<ExpandingArrayList<Long>, Long> sum) { ExpandingArrayList<Long> counts = sum.getFirst(); updateCounts(counts, point.nRodsAtThisLocation, 1); updateCounts(counts, 0, point.nSkippedBases); Pair<ExpandingArrayList<Long>, Long> r = new Pair<ExpandingArrayList<Long>, Long>(counts, point.nTotalBases + sum.getSecond()); //System.out.printf("Reduce: %s %s => %s%n", point, sum, r); return r; } }
private Pair<Integer, Integer> estNumberOfEvaluations(final AFCalculatorTestBuilder testBuilder, final VariantContext vc, final int nonTypePL) { final int evalOverhead = 2; // 2 final int maxEvalsPerSamplePerAC = 3; int minEvals = 0, maxEvals = 0; for ( final Allele alt : vc.getAlternateAlleles() ) { final int AC = vc.getCalledChrCount(alt); minEvals += AC + evalOverhead; // everyone is hom-var maxEvals += AC * maxEvalsPerSamplePerAC + 10; } return new Pair<Integer, Integer>(minEvals, maxEvals); }
private boolean containsDuplicateRecord( final File vcf, final GenomeLocParser parser ) { final List<Pair<GenomeLoc, HaplotypeCallerGenotypingEngine.Event>> VCs = new ArrayList<>(); try { for( final VariantContext vc : GATKVCFUtils.readVCF(vcf).getSecond() ) { VCs.add(new Pair<>(parser.createGenomeLoc(vc), new HaplotypeCallerGenotypingEngine.Event(vc))); } } catch( IOException e ) { throw new IllegalStateException("Somehow the temporary VCF from the integration test could not be read."); } final Set<Pair<GenomeLoc, HaplotypeCallerGenotypingEngine.Event>> VCsAsSet = new HashSet<>(VCs); return VCsAsSet.size() != VCs.size(); // The se will remove duplicate Events. }
@Override public Pair<ExpandingArrayList<Long>, Long> treeReduce(Pair<ExpandingArrayList<Long>, Long> lhs, Pair<ExpandingArrayList<Long>, Long> rhs) { ExpandingArrayList<Long> nt = new ExpandingArrayList<Long>(); nt.addAll(lhs.first); int index = 0; for (Long l : rhs.first) { if (nt.get(index) == null) nt.add(l); else nt.set(index,nt.get(index) + l); index++; } return new Pair<ExpandingArrayList<Long>, Long>(nt, lhs.second + rhs.second); }
public Pair<Long, Long> map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker tracker) { List<CigarElement> cigarElements = read.getCigar().getCigarElements(); CigarElement lastElement = null; for (CigarElement element : cigarElements) { if (element.getOperator() != CigarOperator.HARD_CLIP) lastElement = element; } if (lastElement == null) throw new UserException.MalformedBAM(read, "read does not have any bases, it's all hard clips"); long endsInIndel = lastElement.getOperator() == CigarOperator.INSERTION || lastElement.getOperator() == CigarOperator.DELETION? 1 : 0; long endsInSC = lastElement.getOperator() == CigarOperator.SOFT_CLIP ? 1 : 0; return new Pair<Long, Long>(endsInIndel, endsInSC); }