public synchronized Counters getJobCounters(JobID jobid) { JobInProgress job = jobs.get(jobid); if (job != null) { return job.getCounters(); } else { return completedJobStatusStore.readCounters(jobid); } } public synchronized TaskReport[] getMapTaskReports(JobID jobid) {
public Counters getJobCounters(JobID jobid) { JobInProgress job; synchronized (this) { job = jobs.get(jobid); } if (job != null) { if (!isJobInited(job)) { return EMPTY_COUNTERS; } return job.getCounters(); } return completedJobStatusStore.readCounters(jobid); }
Counters counters = job.getCounters(); if (counters != null) { return new org.apache.hadoop.mapreduce.Counters(counters);
job.getCounters().write(dataOut);
job.getCounters().write(dataOut);
job.getCounters().write(dataOut);
/** * The job is done since all it's component tasks are either * successful or have failed. * * @param metrics job-tracker metrics */ private void jobComplete(JobTrackerInstrumentation metrics) { // // All tasks are complete, then the job is done! // if (this.status.getRunState() == JobStatus.RUNNING ) { this.status.setRunState(JobStatus.SUCCEEDED); this.status.setCleanupProgress(1.0f); this.finishTime = System.currentTimeMillis(); LOG.info("Job " + this.status.getJobID() + " has completed successfully."); JobHistory.JobInfo.logFinished(this.status.getJobID(), finishTime, this.finishedMapTasks, this.finishedReduceTasks, failedMapTasks, failedReduceTasks, getCounters()); // Note that finalize will close the job history handles which garbage collect // might try to finalize garbageCollect(); metrics.completeJob(this.conf, this.status.getJobID()); } }
/** * Called periodically by JobTrackerMetrics to update the metrics for * this job. */ public void updateMetrics() { Counters counters = getCounters(); for (Counters.Group group : counters) { jobMetrics.setTag("group", group.getDisplayName()); for (Counters.Counter counter : group) { jobMetrics.setTag("counter", counter.getDisplayName()); jobMetrics.setMetric("value", (float) counter.getCounter()); jobMetrics.update(); } } }
/** * Called periodically by JobTrackerMetrics to update the metrics for * this job. */ public void updateMetrics() { Counters counters = getCounters(); for (Counters.Group group : counters) { jobMetrics.setTag("group", group.getDisplayName()); for (Counters.Counter counter : group) { jobMetrics.setTag("counter", counter.getDisplayName()); jobMetrics.setMetric("value", (float) counter.getCounter()); jobMetrics.update(); } } }
JobHistory.JobInfo.logFinished(profile.getJobID(), this.finishTime, 0, 0, 0, 0, getCounters());
private synchronized void collectJobCounters(JobID id) { JobInProgress job = tracker.jobs.get(id); if (job == null) { return; } Counters jobCounter = job.getCounters(); for (JobInProgress.Counter key : JobInProgress.Counter.values()) { countersToMetrics.findCounter(key). increment(jobCounter.findCounter(key).getValue()); } for (Task.Counter key : Task.Counter.values()) { countersToMetrics.findCounter(key). increment(jobCounter.findCounter(key).getValue()); } for (Counter counter : jobCounter.getGroup(Task.FILESYSTEM_COUNTER_GROUP)) { countersToMetrics.incrCounter( Task.FILESYSTEM_COUNTER_GROUP, counter.getName(), counter.getValue()); } } /*
Counters c = new Counters(jip.getCounters()); Counters jiCounters = jobInfo.getTotalCounters(); assertTrue("Counters of job obtained from history file did not " +
Counters counters = getCounters();
Counters totalCounters = job.getCounters();
new org.apache.hadoop.mapreduce.Counters(getMapCounters()), new org.apache.hadoop.mapreduce.Counters(getReduceCounters()), new org.apache.hadoop.mapreduce.Counters(getCounters()));
this.status.setCleanupProgress(1.0f); Counters counters = getCounters(); if (jobTerminationState == JobStatus.FAILED) { changeStateTo(JobStatus.FAILED);