private static Counters createDeprecatedCounters() { Counters counters = new Counters(); // Deprecated counter: make sure it is only printed once counters.findCounter("org.apache.hadoop.mapred.Task$Counter", "MAP_INPUT_RECORDS").setValue(1); counters.findCounter("File System Counters", "FILE: Number of bytes read").setValue(1); return counters; }
public Counters getJobCounters(JobID jobId) throws IOException, InterruptedException { // FIXME needs counters support from DAG // with a translation layer on client side Counters empty = new Counters(); return empty; }
public MapReduceDriver(final Mapper<K1, V1, K2, V2> m, final Reducer<K2, V2, K3, V3> r) { myMapper = m; myReducer = r; counters = new Counters(); }
public static void clearCounters() { COUNTERS = new Counters(); } }
private static Counters createCounters() { Counters counters = new Counters(); counters.findCounter("group1", "counter1").setValue(5); counters.findCounter("group1", "counter2").setValue(10); counters.findCounter("group2", "counter1").setValue(15); return counters; }
/** * Construct a driver without specifying a Combiner nor a Reducer. Note that * these can be set with the appropriate set methods and that at least the * Reducer must be set. */ public MultipleInputsMapReduceDriver() { setCounters(new Counters()); }
public Counter getCounter(Enum<?> name) { return new Counters().findCounter(name); } public Counter getCounter(String group, String name) {
public org.apache.hadoop.mapreduce.Counters getJobCounters( org.apache.hadoop.mapreduce.JobID id) { Job job = jobs.get(JobID.downgrade(id)); return new org.apache.hadoop.mapreduce.Counters(job.getCurrentCounters()); }
public org.apache.hadoop.mapreduce.Counters getJobCounters( org.apache.hadoop.mapreduce.JobID id) { Job job = jobs.get(JobID.downgrade(id)); return new org.apache.hadoop.mapreduce.Counters(job.getCurrentCounters()); }
public org.apache.hadoop.mapreduce.Counters getJobCounters( org.apache.hadoop.mapreduce.JobID id) { Job job = jobs.get(JobID.downgrade(id)); return new org.apache.hadoop.mapreduce.Counters(job.getCurrentCounters()); }
public org.apache.hadoop.mapreduce.Counters getJobCounters( org.apache.hadoop.mapreduce.JobID id) { Job job = jobs.get(JobID.downgrade(id)); return new org.apache.hadoop.mapreduce.Counters(job.getCurrentCounters()); }
@Override public Entry<String,Counters> next() { Counters cntrs = new Counters(); ByteArrayInputStream input = new ByteArrayInputStream(getNextCounterData()); DataInputStream dataInput = new DataInputStream(input); try { cntrs.readFields(dataInput); } catch (IOException e) { throw new RuntimeException(e); } return Maps.immutableEntry(getNextIdentifier(), cntrs); }
public org.apache.hadoop.mapreduce.Counters getJobCounters( org.apache.hadoop.mapreduce.JobID id) { Job job = jobs.get(JobID.downgrade(id)); return new org.apache.hadoop.mapreduce.Counters(job.getCurrentCounters()); }
private Counters getCounters() { Counters c = new Counters(); Map<String, Map<String, Long>> values = counters.value(); for (Map.Entry<String, Map<String, Long>> e : values.entrySet()) { CounterGroup cg = c.getGroup(e.getKey()); for (Map.Entry<String, Long> f : e.getValue().entrySet()) { cg.findCounter(f.getKey()).setValue(f.getValue()); } } return c; }
private Counters getCounters() { Counters c = new Counters(); Map<String, Map<String, Long>> values = counters.value(); for (Map.Entry<String, Map<String, Long>> e : values.entrySet()) { CounterGroup cg = c.getGroup(e.getKey()); for (Map.Entry<String, Long> f : e.getValue().entrySet()) { cg.findCounter(f.getKey()).setValue(f.getValue()); } } return c; }
public static Counters getCounters( Collection<Task> tasks) { List<Task> completedTasks = new ArrayList<Task>(); for (Task task : tasks) { if (task.getCounters() != null) { completedTasks.add(task); } } Counters counters = new Counters(); return JobImpl.incrTaskCounters(counters, completedTasks); }
private Job mockJob() { Job mockJob = mock(Job.class); when(mockJob.getAllCounters()).thenReturn(new Counters()); when(mockJob.getTotalMaps()).thenReturn(10); when(mockJob.getTotalReduces()).thenReturn(10); when(mockJob.getName()).thenReturn("mockjob"); return mockJob; }
@Override public Counters getCounters() { if (hasFailedTasks) { return null; } return new Counters( TypeConverter.fromYarn(report.getCounters())); }
@Override public Counters getCounters() { if (report != null && report.getCounters() != null) { return new Counters(TypeConverter.fromYarn(report.getCounters())); } return null; }