@Override public String aggregateStats(String counterGrpName, String statType) { long value = 0; if (counters != null) { // In case of counters, aggregation is done by JobTracker / MR AM itself // so no need to aggregate, simply return the counter value for requested stat. value = counters.getGroup(counterGrpName).getCounter(statType); } return String.valueOf(value); }
assertEquals(4, counters.getCounter("text")); assertEquals(2, counters.getCounter("sequence_A")); assertEquals(4, counters.getCounter("sequence_B")); assertEquals(2, counters.getCounter("sequence_C"));
Counter counter = ctrItr.next(); String name = counter.getDisplayName(); String mapValue = decimal.format(mapGroup.getCounter(name)); String reduceValue = decimal.format(reduceGroup.getCounter(name)); String totalValue = decimal.format(counter.getValue()); buff.append(
for (Counters.Counter counter : totalGroup) { String name = counter.getDisplayName(); String mapValue = decimal.format(mapGroup.getCounter(name)); String reduceValue = decimal.format(reduceGroup.getCounter(name)); String totalValue = decimal.format(counter.getCounter());
long inputBytes = tip.getCounters() .getGroup("org.apache.hadoop.mapred.Task$Counter") .getCounter("Map input bytes"); switch (level) { case 0: jobCounters.incrCounter(Counter.LOCAL_MAP_INPUT_BYTES,
assertEquals(4, counters.getCounter("text")); assertEquals(2, counters.getCounter("sequence_A")); assertEquals(4, counters.getCounter("sequence_B")); assertEquals(2, counters.getCounter("sequence_C"));
JobStats jobStats = iter.next(); long evalRecords = jobStats.getHadoopCounters().getGroup(Constants.SHIFU_GROUP_COUNTER) .getCounter(Constants.COUNTER_RECORDS); LOG.info("Total valid eval records is : {}", evalRecords); .getCounter(Constants.COUNTER_POSTAGS); long pigNegTags = jobStats.getHadoopCounters().getGroup(Constants.SHIFU_GROUP_COUNTER) .getCounter(Constants.COUNTER_NEGTAGS); double pigPosWeightTags = jobStats.getHadoopCounters().getGroup(Constants.SHIFU_GROUP_COUNTER) .getCounter(Constants.COUNTER_WPOSTAGS) / (Constants.EVAL_COUNTER_WEIGHT_SCALE * 1.0d); double pigNegWeightTags = jobStats.getHadoopCounters().getGroup(Constants.SHIFU_GROUP_COUNTER) .getCounter(Constants.COUNTER_WNEGTAGS) / (Constants.EVAL_COUNTER_WEIGHT_SCALE * 1.0d); .getCounter(Constants.TOTAL_MODEL_RUNTIME);
assertEquals(2, counters.getCounter("text"));
&& jobStats.getHadoopCounters().getGroup(Constants.SHIFU_GROUP_COUNTER) != null) { long totalValidCount = jobStats.getHadoopCounters().getGroup(Constants.SHIFU_GROUP_COUNTER) .getCounter("TOTAL_VALID_COUNT"); .getCounter("INVALID_TAG");
JobStats jobStats = iter.next(); long evalRecords = jobStats.getHadoopCounters().getGroup(Constants.SHIFU_GROUP_COUNTER) .getCounter(Constants.COUNTER_RECORDS); LOG.info("Total valid eval records is : {}", evalRecords); .getCounter(Constants.COUNTER_POSTAGS); long pigNegTags = jobStats.getHadoopCounters().getGroup(Constants.SHIFU_GROUP_COUNTER) .getCounter(Constants.COUNTER_NEGTAGS); double pigPosWeightTags = jobStats.getHadoopCounters().getGroup(Constants.SHIFU_GROUP_COUNTER) .getCounter(Constants.COUNTER_WPOSTAGS) / (Constants.EVAL_COUNTER_WEIGHT_SCALE * 1.0d); double pigNegWeightTags = jobStats.getHadoopCounters().getGroup(Constants.SHIFU_GROUP_COUNTER) .getCounter(Constants.COUNTER_WNEGTAGS) / (Constants.EVAL_COUNTER_WEIGHT_SCALE * 1.0d); .getCounter("BAD_META_SCORE");
getCounter("COMBINE_INPUT_RECORDS"); long combinerOutputRecords = counters.getGroup( "org.apache.hadoop.mapreduce.TaskCounter"). getCounter("COMBINE_OUTPUT_RECORDS"); Assert.assertTrue(combinerInputRecords > 0); Assert.assertTrue(combinerInputRecords > combinerOutputRecords);
public static boolean isMainSuccessful(RunningJob runningJob) throws IOException { boolean succeeded = runningJob.isSuccessful(); if (succeeded) { Counters counters = runningJob.getCounters(); if (counters != null) { Counters.Group group = counters.getGroup(LauncherAMUtils.COUNTER_GROUP); if (group != null) { succeeded = group.getCounter(LauncherAMUtils.COUNTER_LAUNCHER_ERROR) == 0; } } } return succeeded; }
private static Long get(FlowStepStats step, String group, String value) throws IOException { if (step instanceof HadoopStepStats) { HadoopStepStats hadoopStep = (HadoopStepStats)step; org.apache.hadoop.mapred.Counters.Group counterGroup = hadoopStep.getRunningJob().getCounters().getGroup(group); if (counterGroup != null) { return counterGroup.getCounter(value); } LOG.info("Counter " + group + ":" + value + " not set."); return 0l; } else { return step.getCounterValue(group, value); } }
public static boolean isMainSuccessful(RunningJob runningJob) throws IOException { boolean succeeded = runningJob.isSuccessful(); if (succeeded) { Counters counters = runningJob.getCounters(); if (counters != null) { Counters.Group group = counters.getGroup(LauncherAMUtils.COUNTER_GROUP); if (group != null) { succeeded = group.getCounter(LauncherAMUtils.COUNTER_LAUNCHER_ERROR) == 0; } } } return succeeded; }
public static void verifyCounters(RunningJob runningJob, int expected) throws IOException { assertEquals(expected, runningJob.getCounters().getCounter(EnumCounter.MAP_RECORDS)); assertEquals(expected, runningJob.getCounters().getGroup("StringCounter") .getCounter("MapRecords")); }
public static void verifyCounters(RunningJob runningJob, int expected) throws IOException { assertEquals(expected, runningJob.getCounters().getCounter(EnumCounter.MAP_RECORDS)); assertEquals(expected, runningJob.getCounters().getGroup("StringCounter") .getCounter("MapRecords")); }
public static boolean hasCredentials(RunningJob runningJob) throws IOException { boolean output = false; Counters counters = runningJob.getCounters(); if (counters != null) { Counters.Group group = counters.getGroup(COUNTER_GROUP); if (group != null) { output = group.getCounter(COUNTER_OUTPUT_DATA) > 0; } } return output; } }