private MetricDataQuery getTotalCounterQuery(Map<String, String> context, String metricName) { return new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, context, new ArrayList<String>()); }
private CubeQuery buildCubeQuery(MetricDataQuery query) { return new CubeQuery(null, query.getStartTs(), query.getEndTs(), query.getResolution(), query.getLimit(), query.getMetrics(), query.getSliceByTags(), query.getGroupByTags(), query.getInterpolator()); }
@Nullable private String getAggregation(MetricDataQuery query) { // We mostly rely on auto-selection of aggregation during query (in which case null is returned from // this method). In some specific cases we need to help resolve the aggregation though. Set<String> tagNames = ImmutableSet.<String>builder() .addAll(query.getSliceByTags().keySet()).addAll(query.getGroupByTags()).build(); if (tagNames.contains(Constants.Metrics.Tag.FLOW)) { // NOTE: BY_FLOWLET_QUEUE agg has only producer and consumer metrics if (tagNames.contains(Constants.Metrics.Tag.PRODUCER) || tagNames.contains(Constants.Metrics.Tag.CONSUMER)) { return BY_FLOWLET_QUEUE; } else { return BY_FLOW; } } return null; }
private void computeProcessBusyness(MetricDataQuery query, TimeSeriesResponse.Builder builder) throws Exception { PeekingIterator<TimeValue> tuplesReadItor = Iterators.peekingIterator(queryTimeSeries(new MetricDataQuery(query, "system.process.tuples.read", AggregationFunction.SUM))); PeekingIterator<TimeValue> eventsProcessedItor = Iterators.peekingIterator(queryTimeSeries(new MetricDataQuery(query, "system.process.events.processed", AggregationFunction.SUM))); long resultTimeStamp = query.getStartTs(); for (int i = 0; i < query.getLimit(); i++) { long tupleRead = 0; long eventProcessed = 0; if (tuplesReadItor.hasNext() && tuplesReadItor.peek().getTimestamp() == resultTimeStamp) { tupleRead = tuplesReadItor.next().getValue(); } if (eventsProcessedItor.hasNext() && eventsProcessedItor.peek().getTimestamp() == resultTimeStamp) { eventProcessed = eventsProcessedItor.next().getValue(); } if (eventProcessed != 0) { int busyness = (int) ((float) tupleRead / eventProcessed * 100); builder.addData(resultTimeStamp, busyness > 100 ? 100 : busyness); } else { // If the scan result doesn't have value for a timestamp, we add 0 to the returned result for that timestamp. builder.addData(resultTimeStamp, 0); } resultTimeStamp += query.getResolution(); } }
new MetricDataQuery(new MetricDataQuery(query, "system.process.events.processed", AggregationFunction.SUM), ImmutableList.of(Constants.Metrics.Tag.FLOWLET_QUEUE)); Map<String, Long> processedPerQueue = getTotalsWithSingleGroupByTag(groupByQueueName); Map<String, String> sliceByTags = Maps.newHashMap(query.getSliceByTags()); written = getTotals(new MetricDataQuery(new MetricDataQuery(query, sliceByTags), "system.process.events.out", AggregationFunction.SUM)); sliceByTags.put(Constants.Metrics.Tag.NAMESPACE, query.getSliceByTags().get(Constants.Metrics.Tag.NAMESPACE)); written = getTotals(new MetricDataQuery(new MetricDataQuery(query, sliceByTags), "system.collect.events", AggregationFunction.SUM)); } else {
public JsonElement executeQuery(MetricDataQuery query) throws Exception { if (query.getResolution() != Integer.MAX_VALUE) { TimeSeriesResponse.Builder builder = TimeSeriesResponse.builder(query.getStartTs(), query.getEndTs()); if (query.getMetrics().containsKey("system.process.busyness")) { computeProcessBusyness(query, builder); } else { PeekingIterator<TimeValue> timeValueItor = Iterators.peekingIterator(queryTimeSeries(query)); long resultTimeStamp = (query.getStartTs() / query.getResolution()) * query.getResolution(); for (int i = 0; i < query.getLimit(); i++) { if (timeValueItor.hasNext() && timeValueItor.peek().getTimestamp() == resultTimeStamp) { builder.addData(resultTimeStamp, timeValueItor.next().getValue()); resultTimeStamp += query.getResolution(); if (query.getMetrics().containsKey("system.process.events.pending")) { resultObj = computeFlowletPending(query); } else {
public MetricDataQuery(MetricDataQuery query, Map<String, String> sliceByTagValues) { this(query.startTs, query.endTs, query.resolution, query.limit, query.metrics, sliceByTagValues, query.groupByTags, query.getInterpolator()); }
public MetricDataQuery(MetricDataQuery query, List<String> groupByTags) { this(query.startTs, query.endTs, query.resolution, query.limit, query.metrics, query.sliceByTagValues, groupByTags, query.getInterpolator()); }
private MetricDataQuery getTotalCounterQuery(Map<String, String> context, String metricName) { return new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, context, new ArrayList<String>()); }
private CubeQuery buildCubeQuery(MetricDataQuery query) { String aggregation = getAggregation(query); return new CubeQuery(aggregation, query.getStartTs(), query.getEndTs(), query.getResolution(), query.getLimit(), query.getMetrics(), query.getSliceByTags(), query.getGroupByTags(), query.getInterpolator()); }
public MetricDataQuery(MetricDataQuery query, Map<String, String> sliceByTagValues, List<String> groupByTags) { this(query.startTs, query.endTs, query.resolution, query.limit, query.metrics, sliceByTagValues, groupByTags, query.getInterpolator()); }
public MetricDataQuery build() { Map<String, AggregationFunction> metrics = ImmutableMap.of(scope + "." + metricName, AggregationFunction.SUM); return new MetricDataQuery(startTs, endTs, resolution, limit, metrics, sliceByTagValues, new ArrayList<String>(), interpolator); }
public MetricDataQuery(MetricDataQuery query, String metricName, AggregationFunction func) { this(query.startTs, query.endTs, query.resolution, query.limit, ImmutableMap.of(metricName, func), query.sliceByTagValues, query.groupByTags, query.getInterpolator()); }
public MetricDataQuery build() { Map<String, AggregationFunction> metrics = ImmutableMap.of(scope + "." + metricName, AggregationFunction.SUM); return new MetricDataQuery(startTs, endTs, resolution, limit, metrics, sliceByTagValues, new ArrayList<String>(), interpolator); }
private void getAggregates(Map<String, String> tags, Map<String, String> metricsToCounters, Map<String, Long> result) { Map<String, AggregationFunction> metrics = Maps.newHashMap(); // all map-reduce metrics are gauges for (String metric : metricsToCounters.keySet()) { metrics.put(metric, AggregationFunction.LATEST); } MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metrics, tags, ImmutableList.<String>of()); Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery); // initialize elements to zero for (String counterName : metricsToCounters.values()) { result.put(counterName, 0L); } for (MetricTimeSeries metricTimeSeries : query) { List<TimeValue> timeValues = metricTimeSeries.getTimeValues(); TimeValue timeValue = Iterables.getOnlyElement(timeValues); result.put(metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue()); } }
private void queryGroupedAggregates(Map<String, String> tags, Table<String, String, Long> allTaskMetrics, Map<String, String> metricsToCounters) { Map<String, AggregationFunction> metrics = Maps.newHashMap(); // all map-reduce metrics are gauges for (String metric : metricsToCounters.keySet()) { metrics.put(metric, AggregationFunction.LATEST); } MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metrics, tags, ImmutableList.of(Constants.Metrics.Tag.INSTANCE_ID)); Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery); for (MetricTimeSeries metricTimeSeries : query) { List<TimeValue> timeValues = metricTimeSeries.getTimeValues(); TimeValue timeValue = Iterables.getOnlyElement(timeValues); String taskId = metricTimeSeries.getTagValues().get(Constants.Metrics.Tag.INSTANCE_ID); allTaskMetrics.put(taskId, metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue()); } } }
private void getAggregates(Map<String, String> tags, Map<String, String> metricsToCounters, Map<String, Long> result) { Map<String, AggregationFunction> metrics = Maps.newHashMap(); // all map-reduce metrics are gauges for (String metric : metricsToCounters.keySet()) { metrics.put(metric, AggregationFunction.LATEST); } MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metrics, tags, ImmutableList.<String>of()); Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery); // initialize elements to zero for (String counterName : metricsToCounters.values()) { result.put(counterName, 0L); } for (MetricTimeSeries metricTimeSeries : query) { List<TimeValue> timeValues = metricTimeSeries.getTimeValues(); TimeValue timeValue = Iterables.getOnlyElement(timeValues); result.put(metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue()); } }
private void queryGroupedAggregates(Map<String, String> tags, Table<String, String, Long> allTaskMetrics, Map<String, String> metricsToCounters) { Map<String, AggregationFunction> metrics = Maps.newHashMap(); // all map-reduce metrics are gauges for (String metric : metricsToCounters.keySet()) { metrics.put(metric, AggregationFunction.LATEST); } MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metrics, tags, ImmutableList.of(Constants.Metrics.Tag.INSTANCE_ID)); Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery); for (MetricTimeSeries metricTimeSeries : query) { List<TimeValue> timeValues = metricTimeSeries.getTimeValues(); TimeValue timeValue = Iterables.getOnlyElement(timeValues); String taskId = metricTimeSeries.getTagValues().get(Constants.Metrics.Tag.INSTANCE_ID); allTaskMetrics.put(taskId, metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue()); } } }
MetricDataQuery metricDataQuery = new MetricDataQuery( 0L, TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis()), Integer.MAX_VALUE, "system.collect.bytes",
private Map<String, Long> getSparkDetails(ProgramId sparkProgram, String runId) { Map<String, String> context = new HashMap<>(); context.put(Constants.Metrics.Tag.NAMESPACE, sparkProgram.getNamespace()); context.put(Constants.Metrics.Tag.APP, sparkProgram.getApplication()); context.put(Constants.Metrics.Tag.SPARK, sparkProgram.getProgram()); context.put(Constants.Metrics.Tag.RUN_ID, runId); List<TagValue> tags = new ArrayList<>(); for (Map.Entry<String, String> entry : context.entrySet()) { tags.add(new TagValue(entry.getKey(), entry.getValue())); } MetricSearchQuery metricSearchQuery = new MetricSearchQuery(0, 0, Integer.MAX_VALUE, tags); Collection<String> metricNames = metricStore.findMetricNames(metricSearchQuery); Map<String, Long> overallResult = new HashMap<>(); for (String metricName : metricNames) { Collection<MetricTimeSeries> resultPerQuery = metricStore.query( new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, context, new ArrayList<String>())); for (MetricTimeSeries metricTimeSeries : resultPerQuery) { overallResult.put(metricTimeSeries.getMetricName(), metricTimeSeries.getTimeValues().get(0).getValue()); } } return overallResult; }