@NotNull @Override public AgentDigest read(Bytes in, long size, @Nullable AgentDigest using) { Preconditions.checkArgument(size >= FIXED_SIZE); short compression = in.readShort(); if (using == null || using.compression != compression) { using = new AgentDigest(compression, in.readLong()); } else { using.dispatchTimeMillis = in.readLong(); } using.totalWeight = 0d; using.lastUsedCell = (int) ((size - FIXED_SIZE) / PER_CENTROID_SIZE); using.tempUsed = 0; using.unmergedWeight = 0D; // need explicit nulling of weight past lastUsedCell Arrays.fill(using.weight, using.lastUsedCell, using.weight.length, 0D); for (int i = 0; i < using.lastUsedCell; ++i) { float weight = in.readFloat(); using.weight[i] = weight; using.mean[i] = in.readFloat(); using.totalWeight += weight; } return using; }
@NotNull @Override public AgentDigest read(Bytes in, long size, @Nullable AgentDigest using) { Preconditions.checkArgument(size >= FIXED_SIZE); short compression = in.readShort(); if (using == null || using.compression != compression) { using = new AgentDigest(compression, in.readLong()); } else { using.dispatchTimeMillis = in.readLong(); } using.totalWeight = 0d; using.lastUsedCell = (int) ((size - FIXED_SIZE) / PER_CENTROID_SIZE); using.tempUsed = 0; using.unmergedWeight = 0D; // need explicit nulling of weight past lastUsedCell Arrays.fill(using.weight, using.lastUsedCell, using.weight.length, 0D); for (int i = 0; i < using.lastUsedCell; ++i) { float weight = in.readFloat(); using.weight[i] = weight; using.mean[i] = in.readFloat(); using.totalWeight += weight; } return using; }
/** * Update {@code AgentDigest} in the cache with a {@code Histogram} value. If such {@code AgentDigest} does not exist * for the specified key, it will be created with the specified compression and ttlMillis settings. * * @param key histogram key * @param value a {@code Histogram} to be merged into the {@code AgentDigest} * @param compression default compression level for new bins * @param ttlMillis default time-to-dispatch in milliseconds for new bins */ public void put(HistogramKey key, Histogram value, short compression, long ttlMillis) { cache.asMap().compute(key, (k, v) -> { if (v == null) { binCreatedCounter.inc(); AgentDigest t = new AgentDigest(compression, System.currentTimeMillis() + ttlMillis); keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < t.getDispatchTimeMillis() ? v1 : t.getDispatchTimeMillis())); mergeHistogram(t, value); return t; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis())); mergeHistogram(v, value); return v; } }); }
/** * Update {@code AgentDigest} in the cache with a {@code Histogram} value. If such {@code AgentDigest} does not exist * for the specified key, it will be created with the specified compression and ttlMillis settings. * * @param key histogram key * @param value a {@code Histogram} to be merged into the {@code AgentDigest} * @param compression default compression level for new bins * @param ttlMillis default time-to-dispatch in milliseconds for new bins */ public void put(HistogramKey key, Histogram value, short compression, long ttlMillis) { cache.asMap().compute(key, (k, v) -> { if (v == null) { binCreatedCounter.inc(); AgentDigest t = new AgentDigest(compression, System.currentTimeMillis() + ttlMillis); keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < t.getDispatchTimeMillis() ? v1 : t.getDispatchTimeMillis())); mergeHistogram(t, value); return t; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis())); mergeHistogram(v, value); return v; } }); }
/** * Update {@code AgentDigest} in the cache with a double value. If such {@code AgentDigest} does not exist for * the specified key, it will be created with the specified compression and ttlMillis settings. * * @param key histogram key * @param value value to be merged into the {@code AgentDigest} * @param compression default compression level for new bins * @param ttlMillis default time-to-dispatch for new bins */ public void put(HistogramKey key, double value, short compression, long ttlMillis) { cache.asMap().compute(key, (k, v) -> { if (v == null) { binCreatedCounter.inc(); AgentDigest t = new AgentDigest(compression, System.currentTimeMillis() + ttlMillis); keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < t.getDispatchTimeMillis() ? v1 : t.getDispatchTimeMillis() )); t.add(value); return t; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis() )); v.add(value); return v; } }); }
/** * Update {@code AgentDigest} in the cache with a double value. If such {@code AgentDigest} does not exist for * the specified key, it will be created with the specified compression and ttlMillis settings. * * @param key histogram key * @param value value to be merged into the {@code AgentDigest} * @param compression default compression level for new bins * @param ttlMillis default time-to-dispatch for new bins */ public void put(HistogramKey key, double value, short compression, long ttlMillis) { cache.asMap().compute(key, (k, v) -> { if (v == null) { binCreatedCounter.inc(); AgentDigest t = new AgentDigest(compression, System.currentTimeMillis() + ttlMillis); keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < t.getDispatchTimeMillis() ? v1 : t.getDispatchTimeMillis() )); t.add(value); return t; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis() )); v.add(value); return v; } }); }