private static GenericMetricEntity metricWrapper(Long timestamp, String metricName, double value, Map<String, String> tags) { GenericMetricEntity metricEntity = new GenericMetricEntity(); metricEntity.setTimestamp(timestamp); metricEntity.setTags(tags); metricEntity.setPrefix(metricName); metricEntity.setValue(new double[] {value}); return metricEntity; }
private void updateEntityAggValue(GenericMetricEntity entity, HadoopClusterConstants.AggregateFunc aggFunc, double value, double count) { double lastValue = entity.getValue()[0]; switch (aggFunc) { case MAX: entity.setValue(new double[] {Math.max(lastValue, value)}); return; case AVG: long avgValue = (long) ((lastValue * count + value) / (count + 1)); entity.setValue(new double[] {avgValue}); return; default: throw new IllegalArgumentException("Illegal aggregation function: " + aggFunc); } }
private Map<String, Object> buildStreamEvent(GenericMetricEntity entity) { Map<String, Object> map = new HashMap<>(); map.put("site", entity.getTags().get("site")); map.put("user", entity.getTags().get("user")); map.put("timestamp", entity.getTimestamp()); map.put("metric", entity.getPrefix()); map.put("value", entity.getValue()[0]); return map; }
if(singleMetricEntity == null) singleMetricEntity = new GenericMetricShadowEntity(); GenericMetricEntity e = (GenericMetricEntity)logAPIEntity; if(e.getValue()!=null) { int count = e.getValue().length; @SuppressWarnings("unused") Class<?> cls = ed.getMetricDefinition().getSingleTimestampEntityClass(); singleMetricEntity.setTags(e.getTags()); singleMetricEntity.setValue(e.getValue()[i]);
Map<String, String> tags = entity.getTags(); for (List<String> columnNames : this.aggregateColumns) { List<String> columnValues = new ArrayList<>(); if (!this.aggregateValues.get(aggregatorColumns).containsKey(entity.getTimestamp())) { this.aggregateValues.get(aggregatorColumns).put(entity.getTimestamp(), 0L); Long previous = this.aggregateValues.get(aggregatorColumns).get(entity.getTimestamp()); this.aggregateValues.get(aggregatorColumns).put(entity.getTimestamp(), previous + (long)entity.getValue()[0]);
@Override public void entityCreated(TaggedLogAPIEntity entity) throws Exception { GenericMetricEntity e = (GenericMetricEntity)entity; double[] value = e.getValue(); if (value != null) { int count = value.length; @SuppressWarnings("unused") Class<?> cls = ed.getMetricDefinition().getSingleTimestampEntityClass(); for (int i = 0; i < count; i++) { long ts = entity.getTimestamp() + i * ed.getMetricDefinition().getInterval(); // exclude those entity which is not within the time range in search condition. [start, end) if (ts < start || ts >= end) { continue; } single.setTimestamp(ts); single.setTags(entity.getTags()); single.setValue(e.getValue()[i]); for (EntityCreationListener l : listeners) { l.entityCreated(single); } } } }
@Override public void entityCreated(TaggedLogAPIEntity entity) throws Exception{ GenericMetricEntity e = (GenericMetricEntity)entity; double[] value = e.getValue(); if(value != null) { int count =value.length; @SuppressWarnings("unused") Class<?> cls = ed.getMetricDefinition().getSingleTimestampEntityClass(); for (int i = 0; i < count; i++) { long ts = entity.getTimestamp() + i * ed.getMetricDefinition().getInterval(); // exclude those entity which is not within the time range in search condition. [start, end) if (ts < start || ts >= end) { continue; } single.setTimestamp(ts); single.setTags(entity.getTags()); single.setValue(e.getValue()[i]); for (EntityCreationListener l : _listeners) { l.entityCreated(single); } } } }
private void createMetric(String metricName, Map<String, String> tags, long timestamp, double value) throws Exception { GenericMetricEntity e = new GenericMetricEntity(); e.setPrefix(metricName); e.setTimestamp(timestamp); e.setTags(tags); e.setValue(new double[] {value}); this.metricEntities.add(e); }
public MetricSender send(String metricName,long timestamp,Map<String,String> tags,double ...values) throws IOException, EagleServiceClientException { GenericMetricEntity metric = new GenericMetricEntity(); metric.setPrefix(metricName); metric.setValue(values); metric.setTimestamp(timestamp); metric.setTags(tags); super.send(metric); return this; }
public MetricSender send(String metricName,long timestamp,Map<String,String> tags,double ...values) throws IOException, EagleServiceClientException { GenericMetricEntity metric = new GenericMetricEntity(); metric.setPrefix(metricName); metric.setValue(values); metric.setTimestamp(timestamp); metric.setTags(tags); super.send(metric); return this; }
private void createMetric(String metricName, Map<String, String> tags, long timestamp, int value) { String key = metricName + tags.toString() + " " + timestamp; GenericMetricEntity entity = appMetricEntities.get(key); if (entity == null) { entity = new GenericMetricEntity(); entity.setTags(tags); entity.setTimestamp(timestamp); entity.setPrefix(metricName); entity.setValue(new double[] {0.0}); appMetricEntities.put(key, entity); } double lastValue = entity.getValue()[0]; entity.setValue(new double[] {lastValue + value}); }
protected GenericMetricEntity metricWrapper(Long timestamp, String field, double[] values, Map<String, String> tags) { String metricName = buildMetricName(field); GenericMetricEntity metricEntity = new GenericMetricEntity(); metricEntity.setTimestamp(timestamp); metricEntity.setTags(tags); metricEntity.setPrefix(metricName); metricEntity.setValue(values); return metricEntity; } }
private GenericMetricEntity generateMetric(long timestamp, long count) { GenericMetricEntity metricEntity = new GenericMetricEntity(); Map<String, String> tags = new HashMap<>(); tags.put("appId", appId); tags.put("site", site); tags.put("taskId", String.valueOf(taskId)); metricEntity.setTimestamp(timestamp); metricEntity.setTags(tags); metricEntity.setPrefix(HADOOP_LOG_METRIC_NAME); metricEntity.setValue(new double[] {count}); return metricEntity; }
private void createMetric(Map<String, GenericMetricEntity> appMetricEntities, long timestamp, Map<String, String> tags, String metricName, int value) { String key = metricName + tags.toString() + " " + timestamp; GenericMetricEntity entity = appMetricEntities.get(key); if (entity == null) { entity = new GenericMetricEntity(); entity.setTags(tags); entity.setTimestamp(timestamp); entity.setPrefix(metricName); entity.setValue(new double[] {0.0}); appMetricEntities.put(key, entity); } double lastValue = entity.getValue()[0]; entity.setValue(new double[] {lastValue + value}); }
private GenericMetricEntity buildMetricEntity(long timestamp, String metricFormat, String user, long count, int windowLen) { GenericMetricEntity entity = new GenericMetricEntity(); entity.setTimestamp(timestamp); entity.setValue(new double[]{Double.valueOf(count)}); entity.setPrefix(String.format(metricFormat, windowLen / 60000)); Map<String, String> tags = new HashMap<>(); tags.put("site", config.getString("siteId")); tags.put("user", LogParseUtil.parseUserFromUGI(user)); entity.setTags(tags); return entity; }
public static GenericMetricEntity convert(String name, Metric metric) { //TODO: add other type metric support EagleMetricKey metricName = MetricKeyCodeDecoder.decodeTSMetricKey(name); if (metric instanceof EagleCounterMetric) { EagleCounterMetric counter = (EagleCounterMetric)metric; GenericMetricEntity entity = new GenericMetricEntity(); entity.setPrefix(metricName.metricName); entity.setValue(new double[]{counter.getValue()}); entity.setTags(metricName.tags); entity.setTimestamp(metricName.timestamp); return entity; } else if (metric instanceof EagleGaugeMetric) { EagleGaugeMetric gauge = (EagleGaugeMetric)metric; GenericMetricEntity entity = new GenericMetricEntity(); entity.setPrefix(metricName.metricName); entity.setValue(new double[]{gauge.getValue()}); entity.setTags(metricName.tags); entity.setTimestamp(metricName.timestamp); return entity; } throw new RuntimeException("Not support this metric type for now!"); } }
public static GenericMetricEntity convert(String name, Metric metric) { //TODO: add other type metric support EagleMetricKey metricName = MetricKeyCodeDecoder.decodeTSMetricKey(name); if (metric instanceof EagleCounterMetric) { EagleCounterMetric counter = (EagleCounterMetric) metric; GenericMetricEntity entity = new GenericMetricEntity(); entity.setPrefix(metricName.metricName); entity.setValue(new double[] {counter.getValue()}); entity.setTags(metricName.tags); entity.setTimestamp(metricName.timestamp); return entity; } else if (metric instanceof EagleGaugeMetric) { EagleGaugeMetric gauge = (EagleGaugeMetric) metric; GenericMetricEntity entity = new GenericMetricEntity(); entity.setPrefix(metricName.metricName); entity.setValue(new double[] {gauge.getValue()}); entity.setTags(metricName.tags); entity.setTimestamp(metricName.timestamp); return entity; } throw new RuntimeException("Not support this metric type for now!"); } }
private GenericMetricEntity generateEntity(GregorianCalendar calendar, String state, int count) { GenericMetricEntity metricEntity = new GenericMetricEntity(); metricEntity.setTimestamp(calendar.getTimeInMillis()); metricEntity.setPrefix(String.format(Constants.HADOOP_HISTORY_TOTAL_METRIC_FORMAT, Constants.JOB_LEVEL, Constants.JOB_COUNT_PER_DAY)); metricEntity.setValue(new double[] {count}); @SuppressWarnings("serial") Map<String, String> baseTags = new HashMap<String, String>() { { put("site", appConfig.getJobHistoryEndpointConfig().site); put(MRJobTagName.JOB_STATUS.toString(), state); } }; metricEntity.setTags(baseTags); return metricEntity; } }
private void createMetric(String metricName, long timestamp, double value, HadoopClusterConstants.AggregateFunc aggFunc) { if (timestamp > maxTimestamp) { maxTimestamp = timestamp; } timestamp = timestamp / AGGREGATE_INTERVAL * AGGREGATE_INTERVAL; MetricKey key = new MetricKey(metricName, timestamp); GenericMetricEntity entity = clusterMetricEntities.get(key); if (entity == null) { entity = new GenericMetricEntity(); entity.setTags(buildMetricTags()); entity.setTimestamp(timestamp); entity.setPrefix(metricName); entity.setValue(new double[] {0.0}); clusterMetricEntities.put(key, entity); } clusterMetricCounts.putIfAbsent(key, 0); updateEntityAggValue(entity, aggFunc, value, clusterMetricCounts.get(key)); clusterMetricCounts.put(key, clusterMetricCounts.get(key) + 1); }