/** * Update {@code AgentDigest} in the cache with another {@code AgentDigest}. * * @param key histogram key * @param value {@code AgentDigest} to be merged */ public void put(HistogramKey key, @Nonnull AgentDigest value) { cache.asMap().compute(key, (k, v) -> { if (v == null) { keyIndex.put(key, value.getDispatchTimeMillis()); return value; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis())); v.add(value); return v; } }); }
/** * Update {@code AgentDigest} in the cache with another {@code AgentDigest}. * * @param key histogram key * @param value {@code AgentDigest} to be merged */ public void put(HistogramKey key, @Nonnull AgentDigest value) { cache.asMap().compute(key, (k, v) -> { if (v == null) { keyIndex.put(key, value.getDispatchTimeMillis()); return value; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis())); v.add(value); return v; } }); }
/** * Update {@code AgentDigest} in the cache with a {@code Histogram} value. If such {@code AgentDigest} does not exist * for the specified key, it will be created with the specified compression and ttlMillis settings. * * @param key histogram key * @param value a {@code Histogram} to be merged into the {@code AgentDigest} * @param compression default compression level for new bins * @param ttlMillis default time-to-dispatch in milliseconds for new bins */ public void put(HistogramKey key, Histogram value, short compression, long ttlMillis) { cache.asMap().compute(key, (k, v) -> { if (v == null) { binCreatedCounter.inc(); AgentDigest t = new AgentDigest(compression, System.currentTimeMillis() + ttlMillis); keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < t.getDispatchTimeMillis() ? v1 : t.getDispatchTimeMillis())); mergeHistogram(t, value); return t; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis())); mergeHistogram(v, value); return v; } }); }
/** * Update {@code AgentDigest} in the cache with a {@code Histogram} value. If such {@code AgentDigest} does not exist * for the specified key, it will be created with the specified compression and ttlMillis settings. * * @param key histogram key * @param value a {@code Histogram} to be merged into the {@code AgentDigest} * @param compression default compression level for new bins * @param ttlMillis default time-to-dispatch in milliseconds for new bins */ public void put(HistogramKey key, Histogram value, short compression, long ttlMillis) { cache.asMap().compute(key, (k, v) -> { if (v == null) { binCreatedCounter.inc(); AgentDigest t = new AgentDigest(compression, System.currentTimeMillis() + ttlMillis); keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < t.getDispatchTimeMillis() ? v1 : t.getDispatchTimeMillis())); mergeHistogram(t, value); return t; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis())); mergeHistogram(v, value); return v; } }); }
/** * Update {@code AgentDigest} in the cache with a double value. If such {@code AgentDigest} does not exist for * the specified key, it will be created with the specified compression and ttlMillis settings. * * @param key histogram key * @param value value to be merged into the {@code AgentDigest} * @param compression default compression level for new bins * @param ttlMillis default time-to-dispatch for new bins */ public void put(HistogramKey key, double value, short compression, long ttlMillis) { cache.asMap().compute(key, (k, v) -> { if (v == null) { binCreatedCounter.inc(); AgentDigest t = new AgentDigest(compression, System.currentTimeMillis() + ttlMillis); keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < t.getDispatchTimeMillis() ? v1 : t.getDispatchTimeMillis() )); t.add(value); return t; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis() )); v.add(value); return v; } }); }
/** * Update {@code AgentDigest} in the cache with a double value. If such {@code AgentDigest} does not exist for * the specified key, it will be created with the specified compression and ttlMillis settings. * * @param key histogram key * @param value value to be merged into the {@code AgentDigest} * @param compression default compression level for new bins * @param ttlMillis default time-to-dispatch for new bins */ public void put(HistogramKey key, double value, short compression, long ttlMillis) { cache.asMap().compute(key, (k, v) -> { if (v == null) { binCreatedCounter.inc(); AgentDigest t = new AgentDigest(compression, System.currentTimeMillis() + ttlMillis); keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < t.getDispatchTimeMillis() ? v1 : t.getDispatchTimeMillis() )); t.add(value); return t; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis() )); v.add(value); return v; } }); }
logger.log(Level.SEVERE, "Failed dispatching entry " + k, e); dispatchLagMillis.update(System.currentTimeMillis() - v.getDispatchTimeMillis()); index.remove(); dispatchedCount.incrementAndGet();
logger.log(Level.SEVERE, "Failed dispatching entry " + k, e); dispatchLagMillis.update(System.currentTimeMillis() - v.getDispatchTimeMillis()); index.remove(); dispatchedCount.incrementAndGet();
logger.info("Started: Indexing histogram accumulator"); for (Map.Entry<HistogramKey, AgentDigest> entry : this.backingStore.entrySet()) { keyIndex.put(entry.getKey(), entry.getValue().getDispatchTimeMillis());
logger.info("Started: Indexing histogram accumulator"); for (Map.Entry<HistogramKey, AgentDigest> entry : this.backingStore.entrySet()) { keyIndex.put(entry.getKey(), entry.getValue().getDispatchTimeMillis());
@Override public void run() { for (Utils.HistogramKey key : digests.keySet()) { digests.compute(key, (k, v) -> { if (v == null) { return null; } // Remove and add to shipping queue if (v.getDispatchTimeMillis() < clock.millisSinceEpoch()) { try { ReportPoint out = Utils.pointFromKeyAndDigest(k, v); output.add(out); dispatchCounter.inc(); } catch (Exception e) { logger.log(Level.SEVERE, "Failed dispatching entry " + k, e); } return null; } return v; }); } } }
@Override public void run() { for (Utils.HistogramKey key : digests.keySet()) { digests.compute(key, (k, v) -> { if (v == null) { return null; } // Remove and add to shipping queue if (v.getDispatchTimeMillis() < clock.millisSinceEpoch()) { try { ReportPoint out = Utils.pointFromKeyAndDigest(k, v); output.add(out); dispatchCounter.inc(); } catch (Exception e) { logger.log(Level.SEVERE, "Failed dispatching entry " + k, e); } return null; } return v; }); } } }