@Override public synchronized DoubleHistogram copyCorrectedForCoordinatedOmission(final double expectedIntervalBetweenValueSamples) { final DoubleHistogram targetHistogram = new DoubleHistogram(this); targetHistogram.addWhileCorrectingForCoordinatedOmission(this, expectedIntervalBetweenValueSamples); return targetHistogram; }
@Override public synchronized DoubleHistogram copy() { final DoubleHistogram targetHistogram = new DoubleHistogram(this); integerValuesHistogram.copyInto(targetHistogram.integerValuesHistogram); return targetHistogram; }
/** * Create a copy of this histogram, complete with data and everything. * * @return A distinct copy of this histogram. */ public DoubleHistogram copy() { final DoubleHistogram targetHistogram = new DoubleHistogram(configuredHighestToLowestValueRatio, getNumberOfSignificantValueDigits()); targetHistogram.setTrackableValueRange(currentLowestValueInAutoRange, currentHighestValueLimitInAutoRange); integerValuesHistogram.copyInto(targetHistogram.integerValuesHistogram); return targetHistogram; }
private static DoubleHistogram constructHistogramFromBuffer( int cookie, final ByteBuffer buffer, final Class<? extends AbstractHistogram> histogramClass, final long minBarForHighestToLowestValueRatio) throws DataFormatException { int numberOfSignificantValueDigits = buffer.getInt(); long configuredHighestToLowestValueRatio = buffer.getLong(); final AbstractHistogram valuesHistogram; if (isNonCompressedDoubleHistogramCookie(cookie)) { valuesHistogram = AbstractHistogram.decodeFromByteBuffer(buffer, histogramClass, minBarForHighestToLowestValueRatio); } else if (isCompressedDoubleHistogramCookie(cookie)) { valuesHistogram = AbstractHistogram.decodeFromCompressedByteBuffer(buffer, histogramClass, minBarForHighestToLowestValueRatio); } else { throw new IllegalStateException("The buffer does not contain a DoubleHistogram"); } DoubleHistogram histogram = new DoubleHistogram( configuredHighestToLowestValueRatio, numberOfSignificantValueDigits, histogramClass, valuesHistogram ); return histogram; }
/** * Get a copy of this histogram, corrected for coordinated omission. * <p> * To compensate for the loss of sampled values when a recorded value is larger than the expected * interval between value samples, the new histogram will include an auto-generated additional series of * decreasingly-smaller (down to the expectedIntervalBetweenValueSamples) value records for each count found * in the current histogram that is larger than the expectedIntervalBetweenValueSamples. * * Note: This is a post-correction method, as opposed to the at-recording correction method provided * by {@link #recordValueWithExpectedInterval(double, double) recordValueWithExpectedInterval}. The two * methods are mutually exclusive, and only one of the two should be be used on a given data set to correct * for the same coordinated omission issue. * by * <p> * See notes in the description of the Histogram calls for an illustration of why this corrective behavior is * important. * * @param expectedIntervalBetweenValueSamples If expectedIntervalBetweenValueSamples is larger than 0, add * auto-generated value records as appropriate if value is larger * than expectedIntervalBetweenValueSamples * @return a copy of this histogram, corrected for coordinated omission. */ public DoubleHistogram copyCorrectedForCoordinatedOmission(final double expectedIntervalBetweenValueSamples) { final DoubleHistogram targetHistogram = new DoubleHistogram(configuredHighestToLowestValueRatio, getNumberOfSignificantValueDigits()); targetHistogram.setTrackableValueRange(currentLowestValueInAutoRange, currentHighestValueLimitInAutoRange); targetHistogram.addWhileCorrectingForCoordinatedOmission(this, expectedIntervalBetweenValueSamples); return targetHistogram; }
new DoubleHistogram(3); accumulatedDoubleHistogram.reset(); accumulatedDoubleHistogram.setAutoResize(true); new DoubleHistogram(3) : new Histogram(3);
@Override public InternalAggregation buildEmptyAggregation() { DoubleHistogram state; state = new DoubleHistogram(numberOfSignificantValueDigits); state.setAutoResize(true); return new InternalHDRPercentiles(name, keys, state, keyed, format, pipelineAggregators(), metaData()); } }
@Override public InternalAggregation buildEmptyAggregation() { DoubleHistogram state; state = new DoubleHistogram(numberOfSignificantValueDigits); state.setAutoResize(true); return new InternalHDRPercentileRanks(name, keys, state, keyed, format, pipelineAggregators(), metaData()); }
@Override public AbstractInternalHDRPercentiles doReduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) { DoubleHistogram merged = null; for (InternalAggregation aggregation : aggregations) { final AbstractInternalHDRPercentiles percentiles = (AbstractInternalHDRPercentiles) aggregation; if (merged == null) { merged = new DoubleHistogram(percentiles.state); merged.setAutoResize(true); } merged.add(percentiles.state); } return createReduced(getName(), keys, merged, keyed, pipelineAggregators(), getMetaData()); }
public static DoubleHistogram toDoubleHistogram(Histogram source, int numberOfSignificantValueDigits) { DoubleHistogram doubles = new DoubleHistogram(numberOfSignificantValueDigits); // Do max value first, to avoid max value updates on each iteration: int otherMaxIndex = source.countsArrayIndex(source.getMaxValue()); long count = source.getCountAtIndex(otherMaxIndex); doubles.recordValueWithCount(source.valueFromIndex(otherMaxIndex), count); // Record the remaining values, up to but not including the max value: for (int i = 0; i < otherMaxIndex; i++) { count = source.getCountAtIndex(i); if (count > 0) { doubles.recordValueWithCount(source.valueFromIndex(i), count); } } return doubles; } }
@Override public void collect(int doc, long bucket) throws IOException { states = bigArrays.grow(states, bucket + 1); DoubleHistogram state = states.get(bucket); if (state == null) { state = new DoubleHistogram(numberOfSignificantValueDigits); // Set the histogram to autosize so it can resize itself as // the data range increases. Resize operations should be // rare as the histogram buckets are exponential (on the top // level). In the future we could expose the range as an // option on the request so the histogram can be fixed at // initialisation and doesn't need resizing. state.setAutoResize(true); states.set(bucket, state); } if (values.advanceExact(doc)) { final int valueCount = values.docValueCount(); for (int i = 0; i < valueCount; i++) { state.recordValue(values.nextValue()); } } } };
@Override public synchronized DoubleHistogram copy() { final DoubleHistogram targetHistogram = new DoubleHistogram(this); integerValuesHistogram.copyInto(targetHistogram.integerValuesHistogram); return targetHistogram; }
Stream<SimilaritySummaryResult> emptyStream(String writeRelationshipType, String writeProperty) { return Stream.of(SimilaritySummaryResult.from(0, new AtomicLong(0), writeRelationshipType, writeProperty, false, new DoubleHistogram(5))); }
UntypedMetric(MetricSettings metricSettings) { this.metricSettings = metricSettings; if (metricSettings == null || !metricSettings.isHistogram()) { histogram = null; } else { histogram = new DoubleHistogram(metricSettings.getSignificantdigits()); } }
/** * Create a copy of this histogram, complete with data and everything. * * @return A distinct copy of this histogram. */ public DoubleHistogram copy() { final DoubleHistogram targetHistogram = new DoubleHistogram(configuredHighestToLowestValueRatio, getNumberOfSignificantValueDigits()); targetHistogram.setTrackableValueRange(currentLowestValueInAutoRange, currentHighestValueLimitInAutoRange); integerValuesHistogram.copyInto(targetHistogram.integerValuesHistogram); return targetHistogram; }
@Override public InternalAggregation buildEmptyAggregation() { DoubleHistogram state; state = new DoubleHistogram(numberOfSignificantValueDigits); state.setAutoResize(true); return new InternalHDRPercentiles(name, keys, state, keyed, format, pipelineAggregators(), metaData()); } }
@Override public InternalAggregation buildEmptyAggregation() { DoubleHistogram state; state = new DoubleHistogram(numberOfSignificantValueDigits); state.setAutoResize(true); return new InternalHDRPercentileRanks(name, keys, state, keyed, format, pipelineAggregators(), metaData()); }
@Override public InternalAggregation buildEmptyAggregation() { DoubleHistogram state; state = new DoubleHistogram(numberOfSignificantValueDigits); state.setAutoResize(true); return new InternalHDRPercentileRanks(name, keys, state, keyed, format, pipelineAggregators(), metaData()); }
@Override public InternalAggregation buildEmptyAggregation() { DoubleHistogram state; state = new DoubleHistogram(numberOfSignificantValueDigits); state.setAutoResize(true); return new InternalHDRPercentiles(name, keys, state, keyed, formatter, pipelineAggregators(), metaData()); }
@Override public InternalAggregation buildEmptyAggregation() { DoubleHistogram state; state = new DoubleHistogram(numberOfSignificantValueDigits); state.setAutoResize(true); return new InternalHDRPercentileRanks(name, keys, state, keyed, formatter, pipelineAggregators(), metaData()); }