@Override public Pair<ExpandingArrayList<Long>, Long> treeReduce(Pair<ExpandingArrayList<Long>, Long> lhs, Pair<ExpandingArrayList<Long>, Long> rhs) { ExpandingArrayList<Long> nt = new ExpandingArrayList<Long>(); nt.addAll(lhs.first); int index = 0; for (Long l : rhs.first) { if (nt.get(index) == null) nt.add(l); else nt.set(index,nt.get(index) + l); index++; } return new Pair<ExpandingArrayList<Long>, Long>(nt, lhs.second + rhs.second); }
private void maybeExpand(int index, E value) { if ( index >= size() ) { ensureCapacity(index+1); // make sure we have space to hold at least index + 1 elements // We need to add null items until we can safely set index to element for ( int i = size(); i <= index; i++ ) add(value); } }
public void resetPVarInGaussian() { pVarInGaussian.clear(); }
/** * Put mapResult into this MapResultsQueue, associated with its jobID * @param mapResult a non-null map result */ public synchronized void put(final MapResult<MapType> mapResult) { if ( mapResult == null ) throw new IllegalArgumentException("mapResult cannot be null"); // make sure that nothing is at the job id for map assert queue.size() < mapResult.getJobID() || queue.get(mapResult.getJobID()) == null; queue.set(mapResult.getJobID(), mapResult); }
@BeforeMethod public void before() { empty = new ExpandingArrayList<Integer>(); initCap10 = new ExpandingArrayList<Integer>(10); hasOne = new ExpandingArrayList<Integer>(); hasOne.add(1); hasTen = new ExpandingArrayList<Integer>(); hasTen.addAll(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); }
@Override public ExpandingArrayList<VariantDatum> reduceInit() { return new ExpandingArrayList<>(); }
@Test (expectedExceptions=IndexOutOfBoundsException.class ) public void testSetBadGetNegative() { logger.warn("Executing testSetBadGetNegative"); empty.get(-1); }
@Override public ExpandingArrayList<VariantDatum> map( final RefMetaDataTracker tracker, final ReferenceContext ref, final AlignmentContext context ) { final ExpandingArrayList<VariantDatum> mapList = new ExpandingArrayList<>(); if( tracker == null ) { // For some reason RodWalkers get map calls with null trackers return mapList; } mapList.addAll( addOverlappingVariants(input, true, tracker, context) ); if( aggregate != null ) { mapList.addAll( addOverlappingVariants(aggregate, false, tracker, context) ); } return mapList; }
public void assignPVarInGaussian( final double pVar ) { pVarInGaussian.add( pVar ); }
@Override public ExpandingArrayList<VariantDatum> reduce( final ExpandingArrayList<VariantDatum> mapValue, final ExpandingArrayList<VariantDatum> reduceSum ) { reduceSum.addAll( mapValue ); return reduceSum; }
/** * Returns the element at the specified position in this list. If index > size, * returns null. Otherwise tries to access the array * @param index * @return * @throws IndexOutOfBoundsException in index < 0 */ public E get(int index) throws IndexOutOfBoundsException { if ( index < size() ) return super.get(index); else return null; }
@Test public void testSetExpandingReset() { logger.warn("Executing testSetExpandingReset"); Assert.assertEquals(0, empty.size()); empty.set(3, 3); empty.set(2, 2); empty.set(1, 1); empty.set(0, 0); Assert.assertEquals(4, empty.size()); for ( int i = 0; i < 4; i++ ) Assert.assertEquals(i, (int)empty.get(i)); }
public List<VariantDatum> getEvaluationData() { final List<VariantDatum> evaluationData = new ExpandingArrayList<>(); for( final VariantDatum datum : data ) { if( datum != null && !datum.failingSTDThreshold && !datum.atTrainingSite && !datum.atAntiTrainingSite ) { evaluationData.add( datum ); } } return evaluationData; }
@Test public void testTenElements() { logger.warn("Executing testTenElements"); for ( int i = 0; i < 10; i++ ) { Assert.assertEquals(i+1, (int)hasTen.get(i)); } }
/** * add a datum representing a variant site (or allele) to the data in {@code variants}, which represents the callset to be recalibrated * @param variants is modified by having a new VariantDatum added to it */ private void addDatum(final ExpandingArrayList<VariantDatum> variants, final boolean isInput, final RefMetaDataTracker tracker, final AlignmentContext context, final VariantContext vc, final Allele refAllele, final Allele altAllele) { final VariantDatum datum = new VariantDatum(); // Populate the datum with lots of fields from the VariantContext, unfortunately the VC is too big so we just pull in only the things we absolutely need. datum.referenceAllele = refAllele; datum.alternateAllele = altAllele; dataManager.decodeAnnotations(datum, vc, true); //BUGBUG: when run with HierarchicalMicroScheduler this is non-deterministic because order of calls depends on load of machine datum.loc = (isInput ? getToolkit().getGenomeLocParser().createGenomeLoc(vc) : null); datum.originalQual = vc.getPhredScaledQual(); datum.isSNP = vc.isSNP() && vc.isBiallelic(); datum.isTransition = datum.isSNP && GATKVariantContextUtils.isTransition(vc); datum.isAggregate = !isInput; // Loop through the training data sets and if they overlap this locus (and allele, if applicable) then update the prior and training status appropriately dataManager.parseTrainingSets(tracker, context.getLocation(), vc, datum, TRUST_ALL_POLYMORPHIC); final double priorFactor = QualityUtils.qualToProb(datum.prior); datum.prior = Math.log10(priorFactor) - Math.log10(1.0 - priorFactor); variants.add(datum); }
@Override public ExpandingArrayList<VariantDatum> treeReduce( final ExpandingArrayList<VariantDatum> lhs, final ExpandingArrayList<VariantDatum> rhs ) { rhs.addAll( lhs ); return rhs; }