/** * Add a new vector to the sample. * @param v vector to add * @throws DimensionMismatchException if the vector does not have the right dimension */ public void increment(double[] v) throws DimensionMismatchException { if (v.length != means.length) { throw new DimensionMismatchException(v.length, means.length); } for (int i = 0; i < v.length; ++i) { means[i].increment(v[i]); } }
synchronized void recordTiming(long timeNanos) { meanTimeNanos.increment(timeNanos); stdevTimeNanos.increment(timeNanos); }
@Test public void testRecommendLoad() throws Exception { AtomicLong count = new AtomicLong(); Mean meanReqTimeNanos = new Mean(); long start = System.nanoTime(); int workers = LoadTestALSModelFactory.WORKERS; ExecUtils.doInParallel(workers, workers, true, i -> { RandomGenerator random = RandomManager.getRandom(Integer.toString(i).hashCode() ^ System.nanoTime()); for (int j = 0; j < LoadTestALSModelFactory.REQS_PER_WORKER; j++) { String userID = "U" + random.nextInt(LoadTestALSModelFactory.USERS); long callStart = System.nanoTime(); target("/recommend/" + userID).request() .accept(MediaType.APPLICATION_JSON_TYPE).get(LIST_ID_VALUE_TYPE); long timeNanos = System.nanoTime() - callStart; if (j > 0) { // Ignore first iteration's time as 'burn in' synchronized (meanReqTimeNanos) { meanReqTimeNanos.increment(timeNanos); } } long currentCount = count.incrementAndGet(); if (currentCount % 100 == 0) { log(currentCount, meanReqTimeNanos, start); } } }); int totalRequests = workers * LoadTestALSModelFactory.REQS_PER_WORKER; log(totalRequests, meanReqTimeNanos, start); }
private synchronized void addUser(double pop) { mean.increment(pop); } }
synchronized void addUser(UserResult ur) { allMean.increment(ur.avgPrecision); } }
public void addStat(long stat) { min = Math.min(min, stat); max = Math.max(max, stat); sum += stat; mean.increment(stat); }
@Nonnull @Override public MetricResult measureUserRecList(Recommender rec, TestUser user, int targetLength, List<Long> recommendations, Mean context) { int n = recommendations.size(); synchronized (context) { context.increment(n); } return new LengthResult(n); }
synchronized void addUser(UserResult ur) { allMean.increment(ur.getRecipRank()); } }
@Nonnull @Override public MetricResult measureUserRecList(Recommender rec, TestUser user, int targetLength, List<Long> recommendations, Mean context) { if (recommendations == null) { return MetricResult.empty(); } Long2DoubleMap ratings = new Long2DoubleOpenHashMap(); for (Entity e: user.getTestHistory()) { long item = e.getLong(CommonAttributes.ITEM_ID); Object av = e.get(gainAttribute); if (av instanceof Number) { ratings.put(item, ((Number) av).doubleValue()); } else { throw new IllegalArgumentException("value " + av + " for attribute " + gainAttribute + " is not numeric"); } } List<Long> ideal = ratings.keySet() .stream() .sorted(LongUtils.keyValueComparator(ratings).reversed()) .limit(targetLength >= 0 ? targetLength : ratings.size()) .collect(Collectors.toList()); double idealGain = computeDCG(ideal, ratings); double gain = computeDCG(recommendations, ratings); double score = gain / idealGain; synchronized (context) { context.increment(score); } return MetricResult.singleton(columnName, score); }
@Nonnull @Override public MetricResult measureUser(TestUser user, ResultMap predictions, Mean context) { if (predictions == null || predictions.isEmpty()) { return MetricResult.empty(); } Long2DoubleMap ratings = user.getTestRatings(); long[] ideal = ratings.keySet().toLongArray(); LongArrays.quickSort(ideal, LongComparators.oppositeComparator(LongUtils.keyValueComparator(ratings))); long[] actual = LongUtils.asLongSet(predictions.keySet()).toLongArray(); LongArrays.quickSort(actual, LongComparators.oppositeComparator( LongUtils.keyValueComparator( LongUtils.asLong2DoubleMap(predictions.scoreMap())))); double idealGain = computeDCG(ideal, ratings); double gain = computeDCG(actual, ratings); logger.debug("user {} has gain of {} (ideal {})", user.getUserId(), gain, idealGain); double score = gain / idealGain; synchronized (context) { context.increment(score); } ImmutableMap.Builder<String,Double> results = ImmutableMap.builder(); return MetricResult.fromMap(results.put(columnName, score) .put(columnName + ".Raw", gain) .build()); }
public void count(Object rowValue, Double itemScore){ if(rowValue!=null && rowValue.equals(this.rowValue)){ columns.addValue(itemScore); mean.increment(itemScore); rowTotal++; } }
/** * Mean/sigma linking coefficients are computed from the mean and standard deviation of item difficulty. * The summary statistics are computed in a storeless manner. This method allows for the incremental * update to item difficulty summary statistics by combining them with other summary statistics. * * @param mean item difficulty mean. * @param sd item difficulty standard deviation. */ public void incrementMeanSigma(Mean mean, StandardDeviation sd){//TODO check for correctness mean.increment(difficulty); sd.increment(difficulty); }
public void increment(DataPoint<? extends Number> dataPoint) { Number value = dataPoint.getValue(); min.increment(value.doubleValue()); average.increment(value.doubleValue()); max.increment(value.doubleValue()); sum.increment(value.doubleValue()); samples++; percentiles.stream().forEach(p -> p.addValue(value.doubleValue())); }
public void increment(DataPoint<? extends Number> dataPoint) { Number value = dataPoint.getValue(); min.increment(value.doubleValue()); average.increment(value.doubleValue()); max.increment(value.doubleValue()); sum.increment(value.doubleValue()); samples++; percentiles.stream().forEach(p -> p.addValue(value.doubleValue())); }
public void increment(DataPoint<? extends Number> dataPoint) { Number value = dataPoint.getValue(); min.increment(value.doubleValue()); average.increment(value.doubleValue()); max.increment(value.doubleValue()); sum.increment(value.doubleValue()); samples++; percentiles.stream().forEach(p -> p.addValue(value.doubleValue())); }
static private Double evaluate(Collection<?> values){ Mean statistic = new Mean(); for(Object value : values){ Number number = (Number)TypeUtil.parseOrCast(DataType.DOUBLE, value); statistic.increment(number.doubleValue()); } return statistic.getResult(); } }
static private Double evaluate(Collection<?> values){ Mean statistic = new Mean(); for(Object value : values){ Double doubleValue = (Double)TypeUtil.parseOrCast(DataType.DOUBLE, value); statistic.increment(doubleValue.doubleValue()); } return statistic.getResult(); } }
public void increment(double score, String response, double freqWeight){ Double d = Double.valueOf(score); TextItemResponseSummary irs = summaryTreeMap.get(d); if(null==irs){ irs = new TextItemResponseSummary(variableName); summaryTreeMap.put(d, irs); } irs.increment(response, freqWeight); mean.increment(score); sd.increment(score); pearsonCorrelation.increment(score, irs.getScoreAt(response)); }