/** * Returns the counters for this job */ public Counters getCounters() throws IOException { Counters result = null; org.apache.hadoop.mapreduce.Counters temp = job.getCounters(); if(temp != null) { result = Counters.downgrade(temp); } return result; }
/** * Returns the counters for this job */ public Counters getCounters() throws IOException { try { return Counters.downgrade(job.getCounters()); } catch (InterruptedException ie) { throw new IOException(ie); } }
public Counters getCounters() { return Counters.downgrade(super.getTaskCounters()); }
public Counters getCounters() { return Counters.downgrade(super.getTaskCounters()); }
/** * Returns the counters for this job */ public Counters getCounters() throws IOException { Counters result = null; org.apache.hadoop.mapreduce.Counters temp = job.getCounters(); if(temp != null) { result = Counters.downgrade(temp); } return result; }
/** * Returns the counters for this job */ public Counters getCounters() throws IOException { Counters result = null; org.apache.hadoop.mapreduce.Counters temp = job.getCounters(); if(temp != null) { result = Counters.downgrade(temp); } return result; }
/** * Returns the counters for this job */ public Counters getCounters() throws IOException { Counters result = null; org.apache.hadoop.mapreduce.Counters temp = job.getCounters(); if(temp != null) { result = Counters.downgrade(temp); } return result; }
/** * @deprecated Use * {@link #getJobCounters(org.apache.hadoop.mapreduce.JobID)} instead */ @Deprecated public Counters getJobCounters(JobID jobid) { try { return Counters.downgrade( getJobCounters((org.apache.hadoop.mapreduce.JobID) jobid)); } catch (AccessControlException e) { return null; } catch (IOException e) { return null; } }
public Counters getCounters() { return Counters.downgrade(super.getTaskCounters()); }
protected long getCounter(org.apache.hadoop.mapreduce.Counters cntrs, String counterGroupName, String counterName) throws IOException { Counters counters = Counters.downgrade(cntrs); return counters.findCounter(counterGroupName, counterName).getValue(); }
protected long getCounter(org.apache.hadoop.mapreduce.Counters cntrs, String counterGroupName, String counterName) throws IOException { Counters counters = Counters.downgrade(cntrs); return counters.findCounter(counterGroupName, counterName).getValue(); }
protected long getCounter(org.apache.hadoop.mapreduce.Counters cntrs, String counterGroupName, String counterName) throws IOException { Counters counters = Counters.downgrade(cntrs); return counters.findCounter(counterGroupName, counterName).getValue(); }
protected long getCounter(org.apache.hadoop.mapreduce.Counters cntrs, String counterGroupName, String counterName) throws IOException { Counters counters = Counters.downgrade(cntrs); return counters.findCounter(counterGroupName, counterName).getValue(); }
protected long getCounter(org.apache.hadoop.mapreduce.Counters cntrs, String counterGroupName, String counterName) throws IOException { Counters counters = Counters.downgrade(cntrs); return counters.findCounter(counterGroupName, counterName).getValue(); }
@Test public void testNewCounterB() throws Exception { final Job job = createJob(); final Configuration conf = job.getConfiguration(); conf.setInt(JobContext.IO_SORT_FACTOR, 2); createWordsFile(new Path(IN_DIR, "input5_2k_4"), conf); removeWordsFile(new Path(IN_DIR, "input5_2k_5"), conf); org.apache.hadoop.mapreduce.lib.input.FileInputFormat.setInputPaths( job, IN_DIR); org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.setOutputPath( job, new Path(OUT_DIR, "outputN1")); assertTrue(job.waitForCompletion(true)); final Counters c1 = Counters.downgrade(job.getCounters()); validateCounters(c1, 131072, 20480, 81920); }
static TaskReport downgrade( org.apache.hadoop.mapreduce.TaskReport report) { return new TaskReport(TaskID.downgrade(report.getTaskID()), report.getProgress(), report.getState(), report.getDiagnostics(), report.getCurrentStatus(), report.getStartTime(), report.getFinishTime(), Counters.downgrade(report.getTaskCounters())); }
static TaskReport downgrade( org.apache.hadoop.mapreduce.TaskReport report) { return new TaskReport(TaskID.downgrade(report.getTaskID()), report.getProgress(), report.getState(), report.getDiagnostics(), report.getCurrentStatus(), report.getStartTime(), report.getFinishTime(), Counters.downgrade(report.getTaskCounters())); }
static TaskReport downgrade( org.apache.hadoop.mapreduce.TaskReport report) { return new TaskReport(TaskID.downgrade(report.getTaskID()), report.getProgress(), report.getState(), report.getDiagnostics(), report.getCurrentStatus(), report.getStartTime(), report.getFinishTime(), Counters.downgrade(report.getTaskCounters())); }
static TaskReport downgrade( org.apache.hadoop.mapreduce.TaskReport report) { return new TaskReport(TaskID.downgrade(report.getTaskID()), report.getProgress(), report.getState(), report.getDiagnostics(), report.getCurrentStatus(), report.getStartTime(), report.getFinishTime(), Counters.downgrade(report.getTaskCounters())); }
static TaskReport downgrade( org.apache.hadoop.mapreduce.TaskReport report) { return new TaskReport(TaskID.downgrade(report.getTaskId()), report.getProgress(), report.getState(), report.getDiagnostics(), report.getCurrentStatus(), report.getStartTime(), report.getFinishTime(), Counters.downgrade(report.getTaskCounters())); }