@Override public void count(String metricName, int delta) { delegate.count(metricName, delta); }
@Override public void count(String metricName, int delta) { delegate.count(metricName, delta); }
@Override public void count(String metricName, int delta) { delegate.count(metricName, delta); }
@Override public Integer call(Integer val) throws Exception { int newVal = val * 10; if (newVal > 30) { metrics.count("more.than.30", 1); } return newVal; } }).collect();
@Override public void count(String metricName, int delta) { getMetrics().count(metricName, delta); }
@Override public void count(String metricName, int delta) { getMetrics().count(metricName, delta); }
@Override public void count(String metricName, int delta) { getMetrics().count(metricName, delta); }
@Override public String call(Text input) throws Exception { String line = input.toString(); metrics.count("num.lines", 1); return line.replaceAll(",", " "); } }).collect();
@Path("greet") @GET public void greet(HttpServiceRequest request, HttpServiceResponder responder) { byte[] name = whom.read(""); String toGreet = name != null ? new String(name, Charsets.UTF_8) : "World"; if (toGreet.equals("Jane Doe")) { metrics.count("greetings.count.jane_doe", 1); } responder.sendString(String.format("Hello %s!", toGreet)); } }
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable val : values) { sum += val.get(); } result.set(sum); metrics.count("num.words", sum); context.write(Bytes.toBytes(key.toString()), Bytes.toBytes(String.valueOf(result.get()))); } }
public void map(byte[] key, TimeseriesTable.Entry value, Context context) throws IOException, InterruptedException { metrics.count("in.map", 1); for (byte[] tag : value.getTags()) { long val = Bytes.toLong(value.getValue()); if (55L == val) { throw new RuntimeException("Intentional exception: someone on purpose added bad data as input"); } context.write(new BytesWritable(tag), new LongWritable(val)); } counters.increment(new Increment("mapper", "records", 1L)); countersFromContext.increment(new Increment("mapper", "records", 1L)); metrics.count("in.map", 2); }
public void reduce(BytesWritable key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException { metrics.count("in.reduce", 1); long sum = 0; for (LongWritable val : values) { sum += val.get(); counters.increment(new Increment("reducer", "records", 1L)); countersFromContext.increment(new Increment("reducer", "records", 1L)); } byte[] tag = key.copyBytes(); context.write(tag, new TimeseriesTable.Entry(BY_TAGS, Bytes.toBytes(sum), System.currentTimeMillis(), tag)); metrics.count("in.reduce", 1); }
@Override public void initialize() throws Exception { MapReduceContext context = getContext(); metrics.count("beforeSubmit", 1); Job hadoopJob = context.getHadoopJob(); AggregateMetricsByTag.configureJob(hadoopJob); String metricName = context.getRuntimeArguments().get("metric"); Long startTs = Long.valueOf(context.getRuntimeArguments().get("startTs")); Long stopTs = Long.valueOf(context.getRuntimeArguments().get("stopTs")); String tag = context.getRuntimeArguments().get("tag"); context.addInput(Input.ofDataset("timeSeries", table.getInputSplits(2, Bytes.toBytes(metricName), startTs, stopTs, Bytes.toBytes(tag)))); beforeSubmitTable.write(Bytes.toBytes("beforeSubmit"), Bytes.toBytes("beforeSubmit:done")); String frequentFlushing = context.getRuntimeArguments().get("frequentFlushing"); if (frequentFlushing != null) { hadoopJob.getConfiguration().setInt("c.mapper.flush.freq", 1); hadoopJob.getConfiguration().setInt("c.reducer.flush.freq", 1); } metrics.count("beforeSubmit", 1); context.addOutput(Output.ofDataset("timeSeries")); }