private Sample advance(MetricConfig config, long timeMs) { this.current = (this.current + 1) % config.samples(); if (this.current >= samples.size()) { Sample sample = newSample(timeMs); this.samples.add(sample); return sample; } else { Sample sample = current(timeMs); sample.reset(timeMs); return sample; } }
protected void purgeObsoleteSamples(MetricConfig config, long now) { long expireAge = config.samples() * config.timeWindowMs(); for (Sample sample : samples) { if (now - sample.lastWindowMs >= expireAge) sample.reset(now); } }
public static void initialize() { final MetricConfig metricConfig = new MetricConfig() .samples(100) .timeWindow( 1000, TimeUnit.MILLISECONDS ); final List<MetricsReporter> reporters = new ArrayList<>(); reporters.add(new JmxReporter("io.confluent.ksql.metrics")); // Replace all static contents other than Time to ensure they are cleaned for tests that are // not aware of the need to initialize/cleanup this test, in case test processes are reused. // Tests aware of the class clean everything up properly to get the state into a clean state, // a full, fresh instantiation here ensures something like KsqlEngineMetricsTest running after // another test that used MetricsCollector without running cleanUp will behave correctly. metrics = new Metrics(metricConfig, reporters, new SystemTime()); collectorMap = new ConcurrentHashMap<>(); }
public long windowSize(MetricConfig config, long now) { // purge old samples before we compute the window size stat.purgeObsoleteSamples(config, now); /* * Here we check the total amount of time elapsed since the oldest non-obsolete window. * This give the total windowSize of the batch which is the time used for Rate computation. * However, there is an issue if we do not have sufficient data for e.g. if only 1 second has elapsed in a 30 second * window, the measured rate will be very high. * Hence we assume that the elapsed time is always N-1 complete windows plus whatever fraction of the final window is complete. * * Note that we could simply count the amount of time elapsed in the current window and add n-1 windows to get the total time, * but this approach does not account for sleeps. SampledStat only creates samples whenever record is called, * if no record is called for a period of time that time is not accounted for in windowSize and produces incorrect results. */ long totalElapsedTimeMs = now - stat.oldest(now).lastWindowMs; // Check how many full windows of data we have currently retained int numFullWindows = (int) (totalElapsedTimeMs / config.timeWindowMs()); int minFullWindows = config.samples() - 1; // If the available windows are less than the minimum required, add the difference to the totalElapsedTime if (numFullWindows < minFullWindows) totalElapsedTimeMs += (minFullWindows - numFullWindows) * config.timeWindowMs(); return totalElapsedTimeMs; }
_thread.setDaemon(true); MetricConfig metricConfig = new MetricConfig().samples(60).timeWindow(1000, TimeUnit.MILLISECONDS); List<MetricsReporter> reporters = new ArrayList<>(); reporters.add(new JmxReporter(JMX_PREFIX));
_handleNewPartitionsExecutor = Executors.newSingleThreadScheduledExecutor(new HandleNewPartitionsThreadFactory()); MetricConfig metricConfig = new MetricConfig().samples(60).timeWindow(1000, TimeUnit.MILLISECONDS); List<MetricsReporter> reporters = new ArrayList<>(); reporters.add(new JmxReporter(JMX_PREFIX));
@Before public void setup() { config = new MetricConfig().eventWindow(50).samples(2); time = new MockTime(); metrics = new Metrics(config, Arrays.asList((MetricsReporter) new JmxReporter()), time, true); }
.samples(100)); final Sensor sensor = metrics.sensor("sensor");
MetricConfig metricConfig = new MetricConfig().samples(config.getInt(ProducerConfig.METRICS_NUM_SAMPLES_CONFIG)) .timeWindow(config.getLong(ProducerConfig.METRICS_SAMPLE_WINDOW_MS_CONFIG), TimeUnit.MILLISECONDS) .recordLevel(Sensor.RecordingLevel.forName(config.getString(ProducerConfig.METRICS_RECORDING_LEVEL_CONFIG)))
@Test public void testOldDataHasNoEffect() { Max max = new Max(); long windowMs = 100; int samples = 2; MetricConfig config = new MetricConfig().timeWindow(windowMs, TimeUnit.MILLISECONDS).samples(samples); max.record(config, 50, time.milliseconds()); time.sleep(samples * windowMs); assertEquals(Double.NaN, max.measure(config, time.milliseconds()), EPS); }
MetricConfig metricConfig = new MetricConfig().samples(config.getInt(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG)) .timeWindow(config.getLong(ConsumerConfig.METRICS_SAMPLE_WINDOW_MS_CONFIG), TimeUnit.MILLISECONDS) .recordLevel(Sensor.RecordingLevel.forName(config.getString(ConsumerConfig.METRICS_RECORDING_LEVEL_CONFIG)))
Collections.singletonMap(AdminClientConfig.CLIENT_ID_CONFIG, clientId)); Map<String, String> metricTags = Collections.singletonMap("client-id", clientId); MetricConfig metricConfig = new MetricConfig().samples(config.getInt(AdminClientConfig.METRICS_NUM_SAMPLES_CONFIG)) .timeWindow(config.getLong(AdminClientConfig.METRICS_SAMPLE_WINDOW_MS_CONFIG), TimeUnit.MILLISECONDS) .recordLevel(Sensor.RecordingLevel.forName(config.getString(AdminClientConfig.METRICS_RECORDING_LEVEL_CONFIG)))
double elapsedSecs = (config.timeWindowMs() * (config.samples() - 1)) / 1000.0; assertEquals(String.format("Occurrences(0...%d) = %f", count, count / elapsedSecs), count / elapsedSecs, metricValueFunc.apply(metrics.metrics().get(metrics.metricName("test.occurences", "grp1"))), EPS);
@Test public void testRateWindowing() throws Exception { MetricConfig cfg = new MetricConfig().samples(3); Sensor s = metrics.sensor("test.sensor", cfg); MetricName rateMetricName = metrics.metricName("test.rate", "grp1"); int count = cfg.samples() - 1; double elapsedSecs = (cfg.timeWindowMs() * (cfg.samples() - 1) + cfg.timeWindowMs() / 2) / 1000.0; time.sleep(cfg.timeWindowMs() * cfg.samples()); assertEquals(0, (Double) rateMetric.metricValue(), EPS); assertEquals(0, (Double) countRateMetric.metricValue(), EPS);
@Test public void testEventWindowing() { Count count = new Count(); MetricConfig config = new MetricConfig().eventWindow(1).samples(2); count.record(config, 1.0, time.milliseconds()); count.record(config, 1.0, time.milliseconds()); assertEquals(2.0, count.measure(config, time.milliseconds()), EPS); count.record(config, 1.0, time.milliseconds()); // first event times out assertEquals(2.0, count.measure(config, time.milliseconds()), EPS); }
@Test public void testTimeWindowing() { Count count = new Count(); MetricConfig config = new MetricConfig().timeWindow(1, TimeUnit.MILLISECONDS).samples(2); count.record(config, 1.0, time.milliseconds()); time.sleep(1); count.record(config, 1.0, time.milliseconds()); assertEquals(2.0, count.measure(config, time.milliseconds()), EPS); time.sleep(1); count.record(config, 1.0, time.milliseconds()); // oldest event times out assertEquals(2.0, count.measure(config, time.milliseconds()), EPS); }
/** * Some implementations of SampledStat make sense to return the initial value * when there are no values set */ @Test public void testSampledStatReturnsInitialValueWhenNoValuesExist() { Count count = new Count(); Rate.SampledTotal sampledTotal = new Rate.SampledTotal(); long windowMs = 100; int samples = 2; MetricConfig config = new MetricConfig().timeWindow(windowMs, TimeUnit.MILLISECONDS).samples(samples); count.record(config, 50, time.milliseconds()); sampledTotal.record(config, 50, time.milliseconds()); time.sleep(samples * windowMs); assertEquals(0, count.measure(config, time.milliseconds()), EPS); assertEquals(0.0, sampledTotal.measure(config, time.milliseconds()), EPS); }
new Percentile(metrics.metricName("test.p50", "grp1"), 50), new Percentile(metrics.metricName("test.p75", "grp1"), 75)); MetricConfig config = new MetricConfig().eventWindow(50).samples(2); Sensor sensor = metrics.sensor("test", config); sensor.add(percs);
/** * Some implementations of SampledStat make sense to return NaN * when there are no values set rather than the initial value */ @Test public void testSampledStatReturnsNaNWhenNoValuesExist() { // This is tested by having a SampledStat with expired Stats, // because their values get reset to the initial values. Max max = new Max(); Min min = new Min(); Avg avg = new Avg(); long windowMs = 100; int samples = 2; MetricConfig config = new MetricConfig().timeWindow(windowMs, TimeUnit.MILLISECONDS).samples(samples); max.record(config, 50, time.milliseconds()); min.record(config, 50, time.milliseconds()); avg.record(config, 50, time.milliseconds()); time.sleep(samples * windowMs); assertEquals(Double.NaN, max.measure(config, time.milliseconds()), EPS); assertEquals(Double.NaN, min.measure(config, time.milliseconds()), EPS); assertEquals(Double.NaN, avg.measure(config, time.milliseconds()), EPS); }
MetricConfig config = new MetricConfig().timeWindow(1, TimeUnit.SECONDS).samples(10);