private static TaskID parseTaskIdFromTaskAttemptId(String taskAttemptId) { // Tez in particular uses an incorrect String task1244XXX instead of task_1244 which makes the parsing fail // this method try to cope with such issues and look at the numbers if possible if (taskAttemptId.startsWith("task")) { taskAttemptId = taskAttemptId.substring(4); } if (taskAttemptId.startsWith("_")) { taskAttemptId = taskAttemptId.substring(1); } List<String> tokenize = StringUtils.tokenize(taskAttemptId, "_"); // need at least 4 entries from 123123123123_0001_r_0000_4 if (tokenize.size() < 4) { LogFactory.getLog(HadoopCfgUtils.class).warn("Cannot parse task attempt (too little arguments) " + taskAttemptId); return null; } // we parse straight away - in case of an exception we can catch the new format try { return new TaskID(tokenize.get(0), Integer.parseInt(tokenize.get(1)), tokenize.get(2).startsWith("m"), Integer.parseInt(tokenize.get(3))); } catch (Exception ex) { LogFactory.getLog(HadoopCfgUtils.class).warn("Cannot parse task attempt " + taskAttemptId); return null; } } }
outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString());
/** * Creates Hadoop attempt ID. * * @return Attempt ID. */ public TaskAttemptID attemptId() { TaskID tid = new TaskID(jobCtx.getJobID(), taskType(taskInfo().type()), taskInfo().taskNumber()); return new TaskAttemptID(tid, taskInfo().attempt()); }
/** * Constructs a TaskId object from given parts. * @param jtIdentifier jobTracker identifier * @param jobId job number * @param type the TaskType * @param taskId taskId number * @param id the task attempt number */ public TaskAttemptID(String jtIdentifier, int jobId, TaskType type, int taskId, int id) { this(new TaskID(jtIdentifier, jobId, type, taskId), id); }
/** * Constructs a TaskId object from given parts. * @param jtIdentifier jobTracker identifier * @param jobId job number * @param type the TaskType * @param taskId taskId number * @param id the task attempt number */ public TaskAttemptID(String jtIdentifier, int jobId, TaskType type, int taskId, int id) { this(new TaskID(jtIdentifier, jobId, type, taskId), id); }
/** * Constructs a TaskId object from given parts. * @param jtIdentifier jobTracker identifier * @param jobId job number * @param isMap whether the tip is a map * @param taskId taskId number * @param id the task attempt number */ public TaskAttemptID(String jtIdentifier, int jobId, boolean isMap, int taskId, int id) { this(new TaskID(jtIdentifier, jobId, isMap, taskId), id); }
/** * Constructs a TaskId object from given parts. * @param jtIdentifier jobTracker identifier * @param jobId job number * @param type the TaskType * @param taskId taskId number * @param id the task attempt number */ public TaskAttemptID(String jtIdentifier, int jobId, TaskType type, int taskId, int id) { this(new TaskID(jtIdentifier, jobId, type, taskId), id); }
private static void addTaskInfo(JobHistoryParser.JobInfo job, TaskType type, int id, TaskStatus.State status) { JobHistoryParser.TaskInfo task = new JobHistoryParser.TaskInfo(); task.taskId = new TaskID(job.getJobId(), type, id); task.startTime = job.getLaunchTime() + id * 1000; task.finishTime = task.startTime + id * 1000; task.taskType = type; task.counters = createCounters(); task.status = status.name(); task.attemptsMap = new HashMap<>(); addTaskAttemptInfo(task, 1); job.tasksMap.put(task.getTaskId(), task); }
/** * test deprecated methods of TaskID * @throws IOException */ @SuppressWarnings("deprecation") @Test (timeout=5000) public void testDepricatedMethods() throws IOException { JobID jid = new JobID(); TaskID test = new TaskID(jid, true, 1); assertEquals(test.getTaskType(), TaskType.MAP); test = new TaskID(jid, false, 1); assertEquals(test.getTaskType(), TaskType.REDUCE); test = new TaskID("001", 1, false, 1); assertEquals(test.getTaskType(), TaskType.REDUCE); test = new TaskID("001", 1, true, 1); assertEquals(test.getTaskType(), TaskType.MAP); ByteArrayOutputStream out = new ByteArrayOutputStream(); test.write(new DataOutputStream(out)); TaskID ti = TaskID.read(new DataInputStream(new ByteArrayInputStream(out .toByteArray()))); assertEquals(ti.toString(), test.toString()); assertEquals("task_001_0001_m_000002", TaskID.getTaskIDsPattern("001", 1, true, 2)); assertEquals("task_003_0001_m_000004", TaskID.getTaskIDsPattern("003", 1, TaskType.MAP, 4)); assertEquals("003_0001_m_000004", TaskID.getTaskIDsPatternWOPrefix("003", 1, TaskType.MAP, 4).toString()); }
new org.apache.hadoop.mapred.TaskID( new JobID("test", 0), TaskType.MAP, 0), 0); TaskAttemptID succeedAttempt2ID = new TaskAttemptID( new org.apache.hadoop.mapred.TaskID( new JobID("test", 0), TaskType.MAP, 0), 1); TaskAttemptID succeedAttempt3ID = new TaskAttemptID( new org.apache.hadoop.mapred.TaskID( new JobID("test", 0), TaskType.MAP, 1), 0);
new org.apache.hadoop.mapred.TaskID( new JobID("test",0), TaskType.MAP, 0), 0); new org.apache.hadoop.mapred.TaskID( new JobID("test",0), TaskType.MAP, 1), 1);
public static org.apache.hadoop.mapred.TaskAttemptID createMockTaskAttemptID( long clusterId, int vertexIndex, int appId, int taskIndex, int taskAttemptNumber, boolean isMap) { return new org.apache.hadoop.mapred.TaskAttemptID( new org.apache.hadoop.mapred.TaskID(String.valueOf(clusterId) + String.valueOf(vertexIndex), appId, isMap ? TaskType.MAP : TaskType.REDUCE, taskIndex), taskAttemptNumber); }
context.getReduceShuffleBytes(), context.getFailedShuffleCounter()); TaskAttemptID attemptID0 = new TaskAttemptID( new org.apache.hadoop.mapred.TaskID( new JobID("test",0), TaskType.MAP, 0), 0); new org.apache.hadoop.mapred.TaskID( new JobID("test",0), TaskType.MAP, 1), 1); new org.apache.hadoop.mapred.TaskID( new JobID("test",0), TaskType.MAP, 2), 2); new org.apache.hadoop.mapred.TaskID( new JobID("test",0), TaskType.MAP, 3), 3); new org.apache.hadoop.mapred.TaskID( new JobID("test",0), TaskType.MAP, 4), 4); new org.apache.hadoop.mapred.TaskID( new JobID("test",0), TaskType.MAP, 5), 5); new org.apache.hadoop.mapred.TaskID( new JobID("test",0), TaskType.MAP, 6), 6); new org.apache.hadoop.mapred.TaskID( new JobID("test",0), TaskType.MAP, 7), 7); new org.apache.hadoop.mapred.TaskID( new JobID("test",0), TaskType.MAP, 8), 8);
@Deprecated public static TaskID read(DataInput in) throws IOException { TaskID tipId = new TaskID(); tipId.readFields(in); return tipId; }
@Deprecated public static TaskID read(DataInput in) throws IOException { TaskID tipId = new TaskID(); tipId.readFields(in); return tipId; }
@Deprecated public static TaskID read(DataInput in) throws IOException { TaskID tipId = new TaskID(); tipId.readFields(in); return tipId; }
public static TaskID read(DataInput in) throws IOException { TaskID tipId = new TaskID(); tipId.readFields(in); return tipId; }
public static org.apache.hadoop.mapred.TaskID createMockTaskAttemptIDFromTezTaskId(TezTaskID tezTaId, boolean isMap) { TezVertexID vId = tezTaId.getVertexID(); ApplicationId appId = vId.getDAGId().getApplicationId(); return new org.apache.hadoop.mapred.TaskID(String.valueOf(appId.getClusterTimestamp()) + String.valueOf(vId.getId()), appId.getId(), isMap ? TaskType.MAP : TaskType.REDUCE, tezTaId.getId()); }
public static org.apache.hadoop.mapred.TaskAttemptID createMockTaskAttemptIDFromTezTaskAttemptId(TezTaskAttemptID tezTaId, boolean isMap) { TezVertexID vId = tezTaId.getTaskID().getVertexID(); ApplicationId appId = vId.getDAGId().getApplicationId(); return new org.apache.hadoop.mapred.TaskAttemptID( new org.apache.hadoop.mapred.TaskID(String.valueOf(appId.getClusterTimestamp()) + String.valueOf(vId.getId()), appId.getId(), isMap ? TaskType.MAP : TaskType.REDUCE, tezTaId.getTaskID().getId()), tezTaId.getId()); }
public static TaskID toMRTaskId(TezTaskID taskid) { return new TaskID( toMRJobId(taskid.getVertexID().getDAGId()), taskid.getVertexID().getId() == 0 ? TaskType.MAP : TaskType.REDUCE, taskid.getId()); }