private static TimelineMetric createDummyMetric(long ts, Long value) { TimelineMetric metric = new TimelineMetric(); metric.setId("dummy_metric"); metric.addValue(ts, value); metric.setRealtimeAggregationOp(TimelineMetricOperation.SUM); return metric; }
/** * Keep the greater value of incoming and base. Stateless operation. * * @param incoming Metric a * @param base Metric b * @param state Operation state (not used) * @return the greater value of a and b */ @Override public TimelineMetric exec(TimelineMetric incoming, TimelineMetric base, Map<Object, Object> state) { if (base == null) { return incoming; } Number incomingValue = incoming.getSingleDataValue(); Number aggregateValue = base.getSingleDataValue(); if (aggregateValue == null) { aggregateValue = Long.MIN_VALUE; } if (TimelineMetricCalculator.compare(incomingValue, aggregateValue) > 0) { base.addValue(incoming.getSingleDataTimestamp(), incomingValue); } return base; } },
private void storeFlowMetrics(byte[] rowKey, Set<TimelineMetric> metrics, Attribute... attributes) throws IOException { for (TimelineMetric metric : metrics) { byte[] metricColumnQualifier = stringKeyConverter.encode(metric.getId()); Map<Long, Number> timeseries = metric.getValues(); for (Map.Entry<Long, Number> timeseriesEntry : timeseries.entrySet()) { Long timestamp = timeseriesEntry.getKey(); ColumnRWHelper.store(rowKey, flowRunTable, FlowRunColumnPrefix.METRIC, metricColumnQualifier, timestamp, timeseriesEntry.getValue(), attributes); } } }
private TimelineMetric getTimelineMetric(String name, long timestamp, Number value) { TimelineMetric metric = new TimelineMetric(); metric.setId(name); metric.addValue(timestamp, value); return metric; }
/** * Get the latest timeline metric as single value type. * * @param metric Incoming timeline metric * @return The latest metric in the incoming metric */ public static TimelineMetric getLatestSingleValueMetric( TimelineMetric metric) { if (metric.getType() == Type.SINGLE_VALUE) { return metric; } else { TimelineMetric singleValueMetric = new TimelineMetric(Type.SINGLE_VALUE); Long firstKey = metric.values.firstKey(); if (firstKey != null) { Number firstValue = metric.values.get(firstKey); singleValueMetric.addValue(firstKey, firstValue); } return singleValueMetric; } }
private static TimelineMetric getTimeSeriesMetric(String id, TimelineMetricOperation op, Map<Long, Number> metricValues) { TimelineMetric m = new TimelineMetric(); m.setId(id); m.setType(Type.TIME_SERIES); m.setRealtimeAggregationOp(op); m.setValues(metricValues); return m; }
new TimelineMetric(TimelineMetric.Type.TIME_SERIES); metric1.setId("test metric id 1"); metric1.addValue(1L, 1.0F); metric1.addValue(3L, 3.0D); metric1.addValue(2L, 2); Assert.assertEquals(TimelineMetric.Type.TIME_SERIES, metric1.getType()); Iterator<Map.Entry<Long, Number>> itr = metric1.getValues().entrySet().iterator(); Map.Entry<Long, Number> entry = itr.next(); Assert.assertEquals(new Long(3L), entry.getKey()); new TimelineMetric(TimelineMetric.Type.SINGLE_VALUE); metric2.setId("test metric id 1"); metric2.addValue(3L, (short) 3); Assert.assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric2.getType()); Assert.assertTrue( metric2.getValues().values().iterator().next() instanceof Short); Map<Long, Number> points = new HashMap<>(); points.put(4L, 4.0D); points.put(5L, 5.0D); try { metric2.setValues(points); Assert.fail(); } catch (IllegalArgumentException e) { metric2.addValues(points); Assert.fail(); } catch (IllegalArgumentException e) {
/** * Helper method for reading and deserializing {@link TimelineMetric} objects * using the specified column prefix. The timeline metrics then are added to * the given timeline entity. * * @param entity {@link TimelineEntity} object. * @param result {@link Result} object retrieved from backend. * @param columnPrefix Metric column prefix * @throws IOException if any exception is encountered while reading metrics. */ protected void readMetrics(TimelineEntity entity, Result result, ColumnPrefix<?> columnPrefix) throws IOException { NavigableMap<String, NavigableMap<Long, Number>> metricsResult = ColumnRWHelper.readResultsWithTimestamps( result, columnPrefix, stringKeyConverter); for (Map.Entry<String, NavigableMap<Long, Number>> metricResult: metricsResult.entrySet()) { TimelineMetric metric = new TimelineMetric(); metric.setId(metricResult.getKey()); // Simply assume that if the value set contains more than 1 elements, the // metric is a TIME_SERIES metric, otherwise, it's a SINGLE_VALUE metric TimelineMetric.Type metricType = metricResult.getValue().size() > 1 ? TimelineMetric.Type.TIME_SERIES : TimelineMetric.Type.SINGLE_VALUE; metric.setType(metricType); metric.addValues(metricResult.getValue()); entity.addMetric(metric); } }
TimelineMetric aggregatedMetric = TimelineMetric.aggregateTo(m1, null); assertEquals(10000L, aggregatedMetric.getSingleDataValue()); aggregatedMetric = TimelineMetric.aggregateTo(m2, aggregatedMetric); assertEquals(30000L, aggregatedMetric.getSingleDataValue()); TimelineMetric m2New = getSingleValueMetric("MEGA_BYTES_MILLIS", TimelineMetricOperation.SUM, ts, 10000L); aggregatedMetric = TimelineMetric.aggregateTo(m2New, aggregatedMetric, state); assertEquals(20000L, aggregatedMetric.getSingleDataValue()); TimelineMetric aggregatedMax = TimelineMetric.aggregateTo(m3, null); assertEquals(150L, aggregatedMax.getSingleDataValue()); aggregatedMax = TimelineMetric.aggregateTo(m4, aggregatedMax); assertEquals(170L, aggregatedMax.getSingleDataValue()); TimelineMetricOperation.AVG, ts, 150L); try { TimelineMetric.aggregateTo(m5, null); fail("Taking average among metrics is not supported! "); } catch (UnsupportedOperationException e) {
Set<TimelineMetric> metrics = resultEntity.getMetrics(); for (TimelineMetric m : metrics) { if (m.getId().startsWith("HDFS_BYTES_WRITE")) { assertEquals(100 * n, m.getSingleDataValue().intValue()); } else if (m.getId().startsWith("VCORES_USED")) { assertEquals(3 * n, m.getSingleDataValue().intValue()); } else if (m.getId().startsWith("TXN_FINISH_TIME")) { assertEquals(n - 1, m.getSingleDataValue()); } else { fail("Unrecognized metric! " + m.getId()); if (m.getId().equals("HDFS_BYTES_WRITE")) { assertEquals(100 * n, m.getSingleDataValue().intValue()); } else if (m.getId().equals("VCORES_USED")) { assertEquals(3 * n, m.getSingleDataValue().intValue()); } else if (m.getId().equals("TXN_FINISH_TIME")) { assertEquals(n - 1, m.getSingleDataValue()); } else { fail("Unrecognized metric! " + m.getId());
TimelineMetric aggregatedMetric = aggregatedEntity.getMetrics().iterator().next(); assertEquals(750L, aggregatedMetric.getValues().values().iterator().next()); assertEquals(TimelineMetricOperation.SUM, aggregatedMetric.getRealtimeAggregationOp()); assertTrue(aggregatedMetric.getValues().isEmpty()); assertEquals(TimelineMetricOperation.NOP, aggregatedMetric.getRealtimeAggregationOp()); assertEquals(150L, aggregatedMetric.getValues().values().iterator().next()); assertEquals(TimelineMetricOperation.SUM, aggregatedMetric.getRealtimeAggregationOp());
private static TimelineMetric getSingleValueMetric(String id, TimelineMetricOperation op, long timestamp, long value) { TimelineMetric m = new TimelineMetric(); m.setId(id); m.setType(Type.SINGLE_VALUE); m.setRealtimeAggregationOp(op); Map<Long, Number> metricValues = new HashMap<Long, Number>(); metricValues.put(timestamp, value); m.setValues(metricValues); return m; }
@Test public void testPutEntities() throws Exception { TimelineV2Client client = TimelineV2Client.createTimelineClient(ApplicationId.newInstance(0, 1)); try { // Set the timeline service address manually. client.setTimelineCollectorInfo(CollectorInfo.newInstance( collectorManager.getRestServerBindAddress())); client.init(conf); client.start(); TimelineEntity entity = new TimelineEntity(); entity.setType("test entity type"); entity.setId("test entity id"); TimelineMetric metric = new TimelineMetric(TimelineMetric.Type.TIME_SERIES); metric.setId("test metric id"); metric.addValue(1L, 1.0D); metric.addValue(2L, 2.0D); entity.addMetric(metric); client.putEntities(entity); client.putEntitiesAsync(entity); } finally { client.stop(); } }
long currentTimeMillis = System.currentTimeMillis(); if (pmemUsage != ResourceCalculatorProcessTree.UNAVAILABLE) { TimelineMetric memoryMetric = new TimelineMetric(); memoryMetric.setId(ContainerMetric.MEMORY.toString()); memoryMetric.setRealtimeAggregationOp(TimelineMetricOperation.SUM); memoryMetric.addValue(currentTimeMillis, pmemUsage); entity.addMetric(memoryMetric); TimelineMetric cpuMetric = new TimelineMetric(); cpuMetric.setId(ContainerMetric.CPU.toString()); cpuMetric.setRealtimeAggregationOp(TimelineMetricOperation.SUM); cpuMetric.addValue(currentTimeMillis, Math.round(cpuUsagePercentPerCore)); entity.addMetric(cpuMetric);
return incoming; Number incomingValue = incoming.getSingleDataValue(); Number aggregateValue = base.getSingleDataValue(); Number result = TimelineMetricCalculator.sum(incomingValue, aggregateValue); if (prevMetric instanceof TimelineMetric) { result = TimelineMetricCalculator.sub(result, ((TimelineMetric) prevMetric).getSingleDataValue()); base.addValue(incoming.getSingleDataTimestamp(), result); return base;
/** * stores the {@linkplain TimelineMetric} information from the * {@linkplain TimelineEvent} object. */ private <T extends BaseTable<T>> void storeMetrics( byte[] rowKey, Set<TimelineMetric> metrics, ColumnPrefix<T> columnPrefix, TypedBufferedMutator<T> table) throws IOException { if (metrics != null) { for (TimelineMetric metric : metrics) { byte[] metricColumnQualifier = stringKeyConverter.encode(metric.getId()); Map<Long, Number> timeseries = metric.getValues(); for (Map.Entry<Long, Number> timeseriesEntry : timeseries.entrySet()) { Long timestamp = timeseriesEntry.getKey(); ColumnRWHelper.store(rowKey, table, columnPrefix, metricColumnQualifier, timestamp, timeseriesEntry.getValue()); } } } }
TimelineMetric m1 = new TimelineMetric(); m1.setId("HDFS_BYTES_WRITE"); m1.setRealtimeAggregationOp(TimelineMetricOperation.SUM); long ts = System.currentTimeMillis(); m1.addValue(ts - 20000, 100L); metrics.add(m1); TimelineMetric m2 = new TimelineMetric(); m2.setId("VCORES_USED"); m2.setRealtimeAggregationOp(TimelineMetricOperation.SUM); m2.addValue(ts - 20000, 3L); metrics.add(m2); TimelineMetric m3 = new TimelineMetric(); m3.setId("UNRELATED_VALUES"); m3.addValue(ts - 20000, 3L); metrics.add(m3); TimelineMetric m4 = new TimelineMetric(); m4.setId("TXN_FINISH_TIME"); m4.setRealtimeAggregationOp(TimelineMetricOperation.MAX); m4.addValue(ts - 20000, i); metrics.add(m4);
te.addEntity(entity); TimelineMetric metric = new TimelineMetric(); String metricId = "CPU"; metric.setId(metricId); metric.setType(TimelineMetric.Type.SINGLE_VALUE); metric.setRealtimeAggregationOp(TimelineMetricOperation.SUM); metric.addValue(1425016501000L, 1234567L);
entity11.addEvent(event); Set<TimelineMetric> metrics = new HashSet<TimelineMetric>(); TimelineMetric metric1 = new TimelineMetric(); metric1.setId("metric1"); metric1.setType(TimelineMetric.Type.SINGLE_VALUE); metric1.addValue(1425016502006L, 113); metrics.add(metric1); TimelineMetric metric2 = new TimelineMetric(); metric2.setId("metric2"); metric2.setType(TimelineMetric.Type.TIME_SERIES); metric2.addValue(1425016502016L, 34); metrics.add(metric2); entity11.setMetrics(metrics); entity12.addConfigs(configs); metrics.clear(); TimelineMetric metric12 = new TimelineMetric(); metric12.setId("metric2"); metric12.setType(TimelineMetric.Type.TIME_SERIES); metric12.addValue(1425016502032L, 48); metric12.addValue(1425016502054L, 51); metrics.add(metric12); TimelineMetric metric3 = new TimelineMetric(); metric3.setId("metric3"); metric3.setType(TimelineMetric.Type.SINGLE_VALUE); metric3.addValue(1425016502060L, 23L); metrics.add(metric3); entity12.setMetrics(metrics);