public void addConnection() { numActiveConnections.increment(); }
private void registerPendingPreemption(String host) { writeLock.lock(); try { pendingPreemptions.incrementAndGet(); if (metrics != null) { metrics.incrPendingPreemptionTasksCount(); } MutableInt val = pendingPreemptionsPerHost.get(host); if (val == null) { val = new MutableInt(0); pendingPreemptionsPerHost.put(host, val); } val.increment(); } finally { writeLock.unlock(); } }
); synchronized (waitingForMonitor) { waitingFor.increment();
List<Versioned<byte[]>> retrieved = getResult.retrieved.get(key); MutableInt successCount = keyToSuccessCount.get(key); successCount.increment();
private static HavingFilterQuery traverseHavingFilterQueryAndPopulateMap(HavingQueryTree tree, Map<Integer, HavingFilterQuery> filterQueryMap, MutableInt currentId) { int currentNodeId = currentId.intValue(); currentId.increment(); final List<Integer> filterIds = new ArrayList<>(); if (null != tree.getChildren()) { for (final HavingQueryTree child : tree.getChildren()) { int childNodeId = currentId.intValue(); currentId.increment(); filterIds.add(childNodeId); final HavingFilterQuery filterQuery = traverseHavingFilterQueryAndPopulateMap(child, filterQueryMap, currentId); filterQueryMap.put(childNodeId, filterQuery); } } HavingFilterQuery havingFilterQuery = new HavingFilterQuery(); havingFilterQuery.setAggregationInfo(tree.getAggregationInfo()); havingFilterQuery.setId(currentNodeId); havingFilterQuery.setNestedFilterQueryIds(filterIds); havingFilterQuery.setOperator(tree.getOperator()); havingFilterQuery.setValue(tree.getValue()); return havingFilterQuery; }
private static FilterQuery traverseFilterQueryAndPopulateMap(FilterQueryTree tree, Map<Integer, FilterQuery> filterQueryMap, MutableInt currentId) { int currentNodeId = currentId.intValue(); currentId.increment(); final List<Integer> f = new ArrayList<>(); if (null != tree.getChildren()) { for (final FilterQueryTree c : tree.getChildren()) { int childNodeId = currentId.intValue(); currentId.increment(); f.add(childNodeId); final FilterQuery q = traverseFilterQueryAndPopulateMap(c, filterQueryMap, currentId); filterQueryMap.put(childNodeId, q); } } FilterQuery query = new FilterQuery(); query.setColumn(tree.getColumn()); query.setId(currentNodeId); query.setNestedFilterQueryIds(f); query.setOperator(tree.getOperator()); query.setValue(tree.getValue()); return query; }
successCount.increment();
((System.nanoTime() - start) / Time.NS_PER_MS)); successCount.increment(); pipelineData.getResponses().add(response); failureDetector.recordSuccess(response.getNode(), response.getRequestTime());
public void addGroup(String group) { checkState(!finishedAdding); MutableInt mi = initialCounts.get(group); if (mi == null) { mi = new MutableInt(); initialCounts.put(group, mi); } mi.increment(); }
@Override public Object visit(ASTEQNode node, Object data) { ((MutableInt) data).increment(); return data; }
@Override public Object visit(ASTNENode node, Object data) { ((MutableInt) data).increment(); return data; }
@Override public Object visit(ASTGENode node, Object data) { ((MutableInt) data).increment(); return data; }
@Override public Void answer(InvocationOnMock invocation) throws Throwable { callCount.increment(); return null; } }).given(operation).call();
@Override public Void answer(InvocationOnMock invocation) throws Throwable { callCount.increment(); throw new RuntimeException(); } }).given(operation).call();
@Override public Void answer(InvocationOnMock invocation) throws Throwable { callCount.increment(); throw new RuntimeException(); } }).given(innerOperation).call();
@Override public Void answer(InvocationOnMock invocation) throws Throwable { callCount.increment(); throw new IllegalStateException(); } }).given(operation).call();
@Override public Void answer(InvocationOnMock invocation) throws Throwable { callCount.increment(); if (callCount.intValue() < 2) { throw new RuntimeException(); } return null; } }).given(operation).call();
@Override public AbstractSpan answer(InvocationOnMock invocation) throws Throwable { counter.increment(); if (counter.longValue() >= 5) { // make it available on the fifth attempt return backEndSpan; } else { return null; } } }).when(spanDao).get(Matchers.eq(backEndIdent));
private Double annotateWithPileup(final AlignmentContext stratifiedContext, final VariantContext vc) { final HashMap<Byte, MutableInt> alleleCounts = new HashMap<>(); for ( final Allele allele : vc.getAlleles() ) alleleCounts.put(allele.getBases()[0], new MutableInt(0)); for ( final byte base : stratifiedContext.getBasePileup().getBases() ) { if ( alleleCounts.containsKey(base) ) alleleCounts.get(base).increment(); } final int refCount = alleleCounts.get(vc.getReference().getBases()[0]).intValue(); final int altCount = alleleCounts.get(vc.getAlternateAllele(0).getBases()[0]).intValue(); return (refCount + altCount == 0) ? null : ((double) refCount) / (refCount + altCount); }
@Test public void functionRunnerShouldWorkIfNotSucceededOnMaxRetryReached() throws Exception { final MutableInt value = new MutableInt(0); new FunctionRunnerWithRetry(MAX_RETRY).execute( () -> { value.increment(); return (Integer) value.getValue() == MAX_RETRY; } ); assertThat(value.getValue()).isEqualTo(MAX_RETRY); }