@Override public GetCountersResponse getCounters(GetCountersRequest request) throws IOException { JobId jobId = request.getJobId(); Job job = verifyAndGetJob(jobId, true); GetCountersResponse response = recordFactory.newRecordInstance(GetCountersResponse.class); response.setCounters(TypeConverter.toYarn(job.getAllCounters())); return response; }
@Override public TaskCheckpointID getCheckpointID(TaskID taskId) { TaskId tid = TypeConverter.toYarn(taskId); return preemptionPolicy.getCheckpointID(tid); }
@Override public void setCheckpointID(TaskID taskId, TaskCheckpointID cid) { TaskId tid = TypeConverter.toYarn(taskId); preemptionPolicy.setCheckpointID(tid, cid); }
public static TaskAttemptId toYarn(TezTaskAttemptID taskAttemptId) { TaskAttemptID mrTaskAttemptId = IDConverter .toMRTaskAttemptId(taskAttemptId); TaskAttemptId mrv2TaskAttemptId = TypeConverter.toYarn(mrTaskAttemptId); return mrv2TaskAttemptId; }
@Override public void setJobPriority(JobID arg0, String arg1) throws IOException, InterruptedException { ApplicationId appId = TypeConverter.toYarn(arg0).getAppId(); try { resMgrDelegate.updateApplicationPriority(appId, Priority.newInstance(Integer.parseInt(arg1))); } catch (YarnException e) { throw new IOException(e); } }
public JobHistoryCopyService(ApplicationAttemptId applicationAttemptId, EventHandler handler) { super("JobHistoryCopyService"); this.applicationAttemptId = applicationAttemptId; this.jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); this.handler = handler; }
@Override public void fsError(TaskAttemptID taskAttemptID, String message) throws IOException { // This happens only in Child. LOG.fatal("Task: " + taskAttemptID + " - failed due to FSError: " + message); reportDiagnosticInfo(taskAttemptID, "FSError: " + message); org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID = TypeConverter.toYarn(taskAttemptID); context.getEventHandler().handle( new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_FAILMSG)); }
@Override public void done(TaskAttemptID taskAttemptID) throws IOException { LOG.info("Done acknowledgment from " + taskAttemptID.toString()); org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID = TypeConverter.toYarn(taskAttemptID); taskHeartbeatHandler.progressing(attemptID); context.getEventHandler().handle( new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_DONE)); }
public org.apache.hadoop.mapreduce.Counters getJobCounters(JobID arg0) throws IOException, InterruptedException { org.apache.hadoop.mapreduce.v2.api.records.JobId jobID = TypeConverter.toYarn(arg0); GetCountersRequest request = recordFactory.newRecordInstance(GetCountersRequest.class); request.setJobId(jobID); Counters cnt = ((GetCountersResponse) invoke("getCounters", GetCountersRequest.class, request)).getCounters(); return TypeConverter.fromYarn(cnt); }
public static TaskAttemptId toYarn( org.apache.hadoop.mapred.TaskAttemptID id) { TaskAttemptId taskAttemptId = recordFactory.newRecordInstance(TaskAttemptId.class); taskAttemptId.setTaskId(toYarn(id.getTaskID())); taskAttemptId.setId(id.getId()); return taskAttemptId; }
public org.apache.hadoop.mapreduce.Counters getJobCounters(JobID arg0) throws IOException, InterruptedException { org.apache.hadoop.mapreduce.v2.api.records.JobId jobID = TypeConverter.toYarn(arg0); GetCountersRequest request = recordFactory.newRecordInstance(GetCountersRequest.class); request.setJobId(jobID); Counters cnt = ((GetCountersResponse) invoke("getCounters", GetCountersRequest.class, request)).getCounters(); return TypeConverter.fromYarn(cnt); }
public static TaskAttemptId toYarn( org.apache.hadoop.mapred.TaskAttemptID id) { TaskAttemptId taskAttemptId = recordFactory.newRecordInstance(TaskAttemptId.class); taskAttemptId.setTaskId(toYarn(id.getTaskID())); taskAttemptId.setId(id.getId()); return taskAttemptId; }
public static TaskAttemptId toYarn( org.apache.hadoop.mapreduce.TaskAttemptID id) { TaskAttemptId taskAttemptId = recordFactory.newRecordInstance(TaskAttemptId.class); taskAttemptId.setTaskId(toYarn(id.getTaskID())); taskAttemptId.setId(id.getId()); return taskAttemptId; }
@Override public GetCountersResponse getCounters(GetCountersRequest request) throws IOException { JobId jobId = request.getJobId(); Job job = verifyAndGetJob(jobId, true); GetCountersResponse response = recordFactory.newRecordInstance(GetCountersResponse.class); response.setCounters(TypeConverter.toYarn(job.getAllCounters())); return response; }
public static TaskAttemptId toYarn( org.apache.hadoop.mapreduce.TaskAttemptID id) { TaskAttemptId taskAttemptId = recordFactory.newRecordInstance(TaskAttemptId.class); taskAttemptId.setTaskId(toYarn(id.getTaskID())); taskAttemptId.setId(id.getId()); return taskAttemptId; }
@Override protected void serviceStart() throws Exception { scheduler= createSchedulerProxy(); JobID id = TypeConverter.fromYarn(this.applicationId); JobId jobId = TypeConverter.toYarn(id); job = context.getJob(jobId); register(); startAllocatorThread(); super.serviceStart(); }
@Override protected void serviceStart() throws Exception { scheduler= createSchedulerProxy(); JobID id = TypeConverter.fromYarn(this.applicationId); JobId jobId = TypeConverter.toYarn(id); job = context.getJob(jobId); register(); startAllocatorThread(); super.serviceStart(); }
@Test public void testCountersOverRawCounters() { TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class); org.apache.hadoop.mapreduce.Counters rCounters = MockJobs.newCounters(); Counters altCounters = TypeConverter.toYarn(rCounters); report.setRawCounters(rCounters); report.setCounters(altCounters); Counters counters = report.getCounters(); assertNotEquals(null, counters); assertNotEquals(rCounters, altCounters); assertEquals(counters, altCounters); }