@Override public synchronized void recordValue(final double value) throws ArrayIndexOutOfBoundsException { super.recordValue(value); }
@UserAggregationUpdate public void aggregate(@Name("value") Number value, @Name(value = "percentiles", defaultValue = "[0.5,0.75,0.9,0.95,0.99]") List<Double> percentiles) { if (value != null) { if (doubles!=null) { doubles.recordValue(value.doubleValue()); } else if (value instanceof Double || value instanceof Float) { this.doubles = HistogramUtil.toDoubleHistogram(values, 5); doubles.recordValue(value.doubleValue()); values = null; } else { values.recordValue(value.longValue()); } } this.percentiles = percentiles; }
@UserAggregationUpdate public void aggregate(@Name("value") Number value, @Name(value = "percentiles", defaultValue = "[0.5,0.75,0.9,0.95,0.99]") List<Double> percentiles) { if (value != null) { if (doubles!=null) { doubles.recordValue(value.doubleValue()); } else if (value instanceof Double || value instanceof Float) { this.doubles = HistogramUtil.toDoubleHistogram(values, 5); doubles.recordValue(value.doubleValue()); values = null; } else { values.recordValue(value.longValue()); } } this.percentiles = percentiles; }
void record(DoubleHistogram histogram) { try { histogram.recordValue(similarity); } catch (ArrayIndexOutOfBoundsException ignored) { } } static Comparator<SimilarityResult> ASCENDING = (o1, o2) -> -o1.compareTo(o2);
@Override public synchronized void recordValue(final double value) throws ArrayIndexOutOfBoundsException { super.recordValue(value); }
void record(DoubleHistogram histogram) { try { histogram.recordValue(similarity); } catch (ArrayIndexOutOfBoundsException ignored) { } } static Comparator<SimilarityResult> ASCENDING = (o1, o2) -> -o1.compareTo(o2);
@Override public void collect(int doc, long bucket) throws IOException { states = bigArrays.grow(states, bucket + 1); DoubleHistogram state = states.get(bucket); if (state == null) { state = new DoubleHistogram(numberOfSignificantValueDigits); // Set the histogram to autosize so it can resize itself as // the data range increases. Resize operations should be // rare as the histogram buckets are exponential (on the top // level). In the future we could expose the range as an // option on the request so the histogram can be fixed at // initialisation and doesn't need resizing. state.setAutoResize(true); states.set(bucket, state); } if (values.advanceExact(doc)) { final int valueCount = values.docValueCount(); for (int i = 0; i < valueCount; i++) { state.recordValue(values.nextValue()); } } } };
protected void add(final double num) { if ((num < minValue) || (num > maxValue) || Double.isNaN(num)) { return; } if (num >= 0) { positiveHistogram.recordValue(num); } else { getNegativeHistogram().recordValue(-num); } }
void put(Number x) { outputFormat = AssumedType.GAUGE; current = x.doubleValue(); if (histogram != null) { histogram.recordValue(current); } if (count > 0) { max = Math.max(current, max); min = Math.min(current, min); sum += current; } else { max = current; min = current; sum = current; } ++count; }
@Override public void collect(int doc, long bucket) throws IOException { states = bigArrays.grow(states, bucket + 1); DoubleHistogram state = states.get(bucket); if (state == null) { state = new DoubleHistogram(numberOfSignificantValueDigits); // Set the histogram to autosize so it can resize itself as // the data range increases. Resize operations should be // rare as the histogram buckets are exponential (on the top // level). In the future we could expose the range as an // option on the request so the histogram can be fixed at // initialisation and doesn't need resizing. state.setAutoResize(true); states.set(bucket, state); } values.setDocument(doc); final int valueCount = values.count(); for (int i = 0; i < valueCount; i++) { state.recordValue(values.valueAt(i)); } } };
@Override public void collect(int doc, long bucket) throws IOException { states = bigArrays.grow(states, bucket + 1); DoubleHistogram state = states.get(bucket); if (state == null) { state = new DoubleHistogram(numberOfSignificantValueDigits); // Set the histogram to autosize so it can resize itself as // the data range increases. Resize operations should be // rare as the histogram buckets are exponential (on the top // level). In the future we could expose the range as an // option on the request so the histogram can be fixed at // initialisation and doesn't need resizing. state.setAutoResize(true); states.set(bucket, state); } values.setDocument(doc); final int valueCount = values.count(); for (int i = 0; i < valueCount; i++) { state.recordValue(values.valueAt(i)); } } };
@Override public void collect(int doc, long bucket) throws IOException { states = bigArrays.grow(states, bucket + 1); DoubleHistogram state = states.get(bucket); if (state == null) { state = new DoubleHistogram(numberOfSignificantValueDigits); // Set the histogram to autosize so it can resize itself as // the data range increases. Resize operations should be // rare as the histogram buckets are exponential (on the top // level). In the future we could expose the range as an // option on the request so the histogram can be fixed at // initialisation and doesn't need resizing. state.setAutoResize(true); states.set(bucket, state); } if (values.advanceExact(doc)) { final int valueCount = values.docValueCount(); for (int i = 0; i < valueCount; i++) { state.recordValue(values.nextValue()); } } } };
@Override public void collect(int doc, long bucket) throws IOException { states = bigArrays.grow(states, bucket + 1); DoubleHistogram state = states.get(bucket); if (state == null) { state = new DoubleHistogram(numberOfSignificantValueDigits); // Set the histogram to autosize so it can resize itself as // the data range increases. Resize operations should be // rare as the histogram buckets are exponential (on the top // level). In the future we could expose the range as an // option on the request so the histogram can be fixed at // initialisation and doesn't need resizing. state.setAutoResize(true); states.set(bucket, state); } if (values.advanceExact(doc)) { final int valueCount = values.docValueCount(); for (int i = 0; i < valueCount; i++) { state.recordValue(values.nextValue()); } } } };