Refine search
@Override public void collectLatency(ServiceLatencyProvider provider) { final ServiceMetricType type = provider.getServiceMetricType(); final Set<MetricType> metrics = AwsSdkMetrics.getPredefinedMetrics(); if (metrics.contains(type)) { final Dimension dim = new Dimension() .withName(Dimensions.MetricType.name()) .withValue(type.name()); final MetricDatum datum = new MetricDatum() .withMetricName(type.getServiceName()) .withDimensions(dim) .withUnit(StandardUnit.Milliseconds) .withValue(provider.getDurationMilli()); safeAddMetricsToQueue(datum); } }
@Override public void addValue(String metric, double value) { assert metric != null; final long timeInSeconds = System.currentTimeMillis() / 1000; if (lastTime != timeInSeconds) { lastTimestamp = new Date(); lastTime = timeInSeconds; } // http://docs.amazonwebservices.com/AmazonCloudWatch/latest/APIReference/API_MetricDatum.html // In theory, large values are rejected, but the maximum is so large that we don't bother to verify. final MetricDatum metricDatum = new MetricDatum().withMetricName(prefix + metric) .withDimensions(dimensions).withTimestamp(lastTimestamp).withValue(value); //.withUnit("None") synchronized (buffer) { buffer.add(metricDatum); } }
private MetricDatum processResult(Result result) { // Sometimes the attribute name and the key of the value are the same MetricDatum metricDatum = new MetricDatum(); if (result.getValuePath().isEmpty()) { metricDatum.setMetricName(result.getAttributeName()); } else { metricDatum.setMetricName(result.getAttributeName() + "_" + KeyUtils.getValuePathString(result)); } metricDatum.setDimensions(dimensions); // Converts the Objects to Double-values for CloudWatch metricDatum.setValue(toDoubleConverter.apply(result.getValue())); metricDatum.setTimestamp(new Date()); return metricDatum; }
/** * Returns a metric datum cloned from the given one. * Made package private only for testing purposes. */ final MetricDatum cloneMetricDatum(MetricDatum md) { return new MetricDatum() .withDimensions(md.getDimensions()) // a new collection is created .withMetricName(md.getMetricName()) .withStatisticValues(md.getStatisticValues()) .withTimestamp(md.getTimestamp()) .withUnit(md.getUnit()) .withValue(md.getValue()); }
Double value = datum.getValue(); if (value == null) { return; List<Dimension> dims = datum.getDimensions(); Collections.sort(dims, DimensionComparator.INSTANCE); String metricName = datum.getMetricName(); String key = metricName + Jackson.toJsonString(dims); MetricDatum statDatum = uniqueMetrics.get(key); if (statDatum == null) { statDatum = new MetricDatum() .withDimensions(datum.getDimensions()) .withMetricName(metricName) .withUnit(datum.getUnit()) .withStatisticValues(new StatisticSet() .withMaximum(value) .withMinimum(value) .withSampleCount(0.0) .withSum(0.0)) StatisticSet stat = statDatum.getStatisticValues(); stat.setSampleCount(stat.getSampleCount() + 1.0); stat.setSum(stat.getSum() + value);
@Override public void write(ArrayList<Stat> stats, long invokeTimeMs, Set<Tag> tags) { Date dt = new Date(); dt.setTime(invokeTimeMs); metricDims.addAll(parentDims); MetricDatum metric = new MetricDatum(); metric.setMetricName(stat.getName()); metric.setUnit(StandardUnit.None); metric.setTimestamp(dt); metric.setDimensions(metricDims); metric.setValue((double) stat.getValue());
return; MetricDatum datum = new MetricDatum(); datum.setMetricName(context.getProperty(METRIC_NAME).evaluateAttributeExpressions(flowFile).getValue()); final String valueString = context.getProperty(VALUE).evaluateAttributeExpressions(flowFile).getValue(); if (valueString != null) { datum.setValue(Double.parseDouble(valueString)); } else { StatisticSet statisticSet = new StatisticSet(); statisticSet.setMaximum(Double.parseDouble(context.getProperty(MAXIMUM).evaluateAttributeExpressions(flowFile).getValue())); statisticSet.setMinimum(Double.parseDouble(context.getProperty(MINIMUM).evaluateAttributeExpressions(flowFile).getValue())); statisticSet.setSampleCount(Double.parseDouble(context.getProperty(SAMPLECOUNT).evaluateAttributeExpressions(flowFile).getValue())); statisticSet.setSum(Double.parseDouble(context.getProperty(SUM).evaluateAttributeExpressions(flowFile).getValue())); datum.setStatisticValues(statisticSet); datum.setTimestamp(new Date(Long.parseLong(timestamp))); datum.setUnit(unit); final String propertyValue = context.getProperty(propertyName).evaluateAttributeExpressions(flowFile).getValue(); if (StringUtils.isNotBlank(propertyValue)) { dimensions.add(new Dimension().withName(propertyName).withValue(propertyValue)); datum.withDimensions(dimensions);
requests = new ArrayList<>(); Date now = new Date(); request.withMetricData(new MetricDatum() .withUnit(StandardUnit.None) .withTimestamp(now) .withDimensions(dimensions) .withMetricName(name) .withStatisticValues(value) );
Date now = new Date(); for (MetricDatum datum : nonEmptyData) { datum.withTimestamp(now);
public MetricDatum unmarshall(StaxUnmarshallerContext context) throws Exception { MetricDatum metricDatum = new MetricDatum(); int originalDepth = context.getCurrentDepth(); int targetDepth = originalDepth + 1; metricDatum.setMetricName(StringStaxUnmarshaller.getInstance().unmarshall(context)); continue; metricDatum.withDimensions(new ArrayList<Dimension>()); continue; metricDatum.withDimensions(DimensionStaxUnmarshaller.getInstance().unmarshall(context)); continue; metricDatum.setTimestamp(DateStaxUnmarshallerFactory.getInstance("iso8601").unmarshall(context)); continue; metricDatum.setValue(DoubleStaxUnmarshaller.getInstance().unmarshall(context)); continue; metricDatum.setStatisticValues(StatisticSetStaxUnmarshaller.getInstance().unmarshall(context)); continue; metricDatum.withValues(new ArrayList<Double>()); continue; metricDatum.withValues(DoubleStaxUnmarshaller.getInstance().unmarshall(context)); continue; metricDatum.withCounts(new ArrayList<Double>());
public MetricDatum unmarshall(StaxUnmarshallerContext context) throws Exception { MetricDatum metricDatum = new MetricDatum(); metricDatum.setMetricName(StringStaxUnmarshaller.getInstance().unmarshall( context)); continue; metricDatum.withDimensions(DimensionStaxUnmarshaller.getInstance().unmarshall( context)); continue; .setTimestamp(DateStaxUnmarshaller.getInstance().unmarshall(context)); continue; metricDatum.setValue(DoubleStaxUnmarshaller.getInstance().unmarshall(context)); continue; metricDatum.setStatisticValues(StatisticSetStaxUnmarshaller.getInstance() .unmarshall(context)); continue; metricDatum.setUnit(StringStaxUnmarshaller.getInstance().unmarshall(context)); continue;
.withMetricData(new MetricDatum() .withMetricName("request-error") .withValue(Double.valueOf(numberOfRecords)))); cloudWatchClient.putMetricDataAsync(new PutMetricDataRequest() .withNamespace("rakam-webhook") .withMetricData(new MetricDatum() .withMetricName("request-success") .withValue(Double.valueOf(numberOfRecords)))); .withMetricData(new MetricDatum() .withMetricName("request-latency") .withValue(Double.valueOf(execute.receivedResponseAtMillis() - execute.sentRequestAtMillis())))); cloudWatchClient.putMetricDataAsync(new PutMetricDataRequest() .withNamespace("rakam-webhook") .withMetricData(new MetricDatum() .withMetricName("request-error") .withValue(Double.valueOf(numberOfRecords))));
/** * Publish relevant CloudWatch metrics. */ protected void emitCloudWatchMetrics() { if (null != getCloudwatch()) { // TODO Emit CloudWatch metrics about the size of the queue of writes MetricDatum recordsProcessedDatum = new MetricDatum().withValue(getProcessedRecords()); PutMetricDataRequest request = new PutMetricDataRequest().withMetricData(recordsProcessedDatum); getCloudwatch().putMetricData(request); } }
private PutMetricDataRequest newPutMetricDataRequest( Collection<MetricDatum> data, final String namespace, final Dimension... extraDims) { if (extraDims != null) { // Need to add some extra dimensions. // To do so, we copy the metric data to avoid mutability problems. Collection<MetricDatum> newData = new ArrayList<MetricDatum>(data.size()); for (MetricDatum md: data) { MetricDatum newMD = cloneMetricDatum(md); for (Dimension dim: extraDims) newMD.withDimensions(dim); // add the extra dimensions to the new metric datum newData.add(newMD); } data = newData; } return new PutMetricDataRequest() .withNamespace(namespace) .withMetricData(data) .withRequestMetricCollector(RequestMetricCollector.NONE) ; }
/** * <p> * The unit of the metric. * </p> * * @param unit * The unit of the metric. * @see StandardUnit */ public void setUnit(StandardUnit unit) { withUnit(unit); }
@Override public MetricDatum apply(MetricDatum datum) { return datum.withValue(value.doubleValue()); } }));
/** * <p> * The dimensions associated with the metric. * </p> * * @param dimensions * The dimensions associated with the metric. * @return Returns a reference to this object so that method calls can be chained together. */ public MetricDatum withDimensions(java.util.Collection<Dimension> dimensions) { setDimensions(dimensions); return this; }
@Override public void write(ArrayList<Stat> stats, long invokeTimeMs, Set<Tag> tags) { Date dt = new Date(); dt.setTime(invokeTimeMs); metricDims.addAll(parentDims); MetricDatum metric = new MetricDatum(); metric.setMetricName(stat.getName()); metric.setUnit(StandardUnit.None); metric.setTimestamp(dt); metric.setDimensions(metricDims); metric.setValue((double) stat.getValue());
return; MetricDatum datum = new MetricDatum(); datum.setMetricName(context.getProperty(METRIC_NAME).evaluateAttributeExpressions(flowFile).getValue()); final String valueString = context.getProperty(VALUE).evaluateAttributeExpressions(flowFile).getValue(); if (valueString != null) { datum.setValue(Double.parseDouble(valueString)); } else { StatisticSet statisticSet = new StatisticSet(); statisticSet.setMaximum(Double.parseDouble(context.getProperty(MAXIMUM).evaluateAttributeExpressions(flowFile).getValue())); statisticSet.setMinimum(Double.parseDouble(context.getProperty(MINIMUM).evaluateAttributeExpressions(flowFile).getValue())); statisticSet.setSampleCount(Double.parseDouble(context.getProperty(SAMPLECOUNT).evaluateAttributeExpressions(flowFile).getValue())); statisticSet.setSum(Double.parseDouble(context.getProperty(SUM).evaluateAttributeExpressions(flowFile).getValue())); datum.setStatisticValues(statisticSet); datum.setTimestamp(new Date(Long.parseLong(timestamp))); datum.setUnit(unit); final String propertyValue = context.getProperty(propertyName).evaluateAttributeExpressions(flowFile).getValue(); if (StringUtils.isNotBlank(propertyValue)) { dimensions.add(new Dimension().withName(propertyName).withValue(propertyValue)); datum.withDimensions(dimensions);