/** * @param jobConf Job configuration. * @param taskCtx Task context. * @param directWrite Direct write flag. * @param fileName File name. * @throws IOException In case of IO exception. */ HadoopV1OutputCollector(JobConf jobConf, HadoopTaskContext taskCtx, boolean directWrite, @Nullable String fileName, TaskAttemptID attempt) throws IOException { this.jobConf = jobConf; this.taskCtx = taskCtx; this.attempt = attempt; if (directWrite) { jobConf.set("mapreduce.task.attempt.id", attempt.toString()); OutputFormat outFormat = jobConf.getOutputFormat(); writer = outFormat.getRecordWriter(null, jobConf, fileName, Reporter.NULL); } else writer = null; }
/** * create the temporary output file for hadoop RecordWriter. * @param taskNumber The number of the parallel instance. * @param numTasks The number of parallel tasks. * @throws java.io.IOException */ @Override public void open(int taskNumber, int numTasks) throws IOException { // enforce sequential open() calls synchronized (OPEN_MUTEX) { if (Integer.toString(taskNumber + 1).length() > 6) { throw new IOException("Task id too large."); } TaskAttemptID taskAttemptID = TaskAttemptID.forName("attempt__0000_r_" + String.format("%" + (6 - Integer.toString(taskNumber + 1).length()) + "s", " ").replace(" ", "0") + Integer.toString(taskNumber + 1) + "_0"); this.jobConf.set("mapred.task.id", taskAttemptID.toString()); this.jobConf.setInt("mapred.task.partition", taskNumber + 1); // for hadoop 2.2 this.jobConf.set("mapreduce.task.attempt.id", taskAttemptID.toString()); this.jobConf.setInt("mapreduce.task.partition", taskNumber + 1); this.context = new TaskAttemptContextImpl(this.jobConf, taskAttemptID); this.outputCommitter = this.jobConf.getOutputCommitter(); JobContext jobContext = new JobContextImpl(this.jobConf, new JobID()); this.outputCommitter.setupJob(jobContext); this.recordWriter = this.mapredOutputFormat.getRecordWriter(null, this.jobConf, Integer.toString(taskNumber + 1), new HadoopDummyProgressable()); } }
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine;
/** * test without TASK_LOG_DIR * * @throws IOException */ @Test (timeout=50000) public void testTaskLogWithoutTaskLogDir() throws IOException { // TaskLog tasklog= new TaskLog(); System.clearProperty(YarnConfiguration.YARN_APP_CONTAINER_LOG_DIR); // test TaskLog assertEquals(TaskLog.getMRv2LogDir(), null); TaskAttemptID taid = mock(TaskAttemptID.class); JobID jid = new JobID("job", 1); when(taid.getJobID()).thenReturn(jid); when(taid.toString()).thenReturn("JobId"); File f = TaskLog.getTaskLogFile(taid, true, LogName.STDOUT); assertTrue(f.getAbsolutePath().endsWith("stdout")); }
when(taid.toString()).thenReturn("JobId");
@Override public boolean ping(TaskAttemptID taskAttemptID) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Ping from " + taskAttemptID.toString()); } return true; }
/** * Returns task id. * @return task id * @deprecated use {@link #getTaskAttemptId()} instead. */ @Deprecated public String getTaskId() { return getTaskAttemptId().toString(); }
/** * Returns task id. * @return task id * @deprecated use {@link #getTaskAttemptId()} instead. */ @Deprecated public String getTaskId() { return getTaskAttemptId().toString(); }
/** * Returns task id. * @return task id * @deprecated use {@link #getTaskAttemptId()} instead. */ @Deprecated public String getTaskId() { return getTaskAttemptId().toString(); }
private String getTaskCacheDirectory(TaskControllerContext context) { // In the case of JVM reuse, the task specific directory // is different from what is set with respect with // env.workDir. Hence building this from the taskId everytime. String taskId = context.task.getTaskID().toString(); File cacheDirForJob = context.env.workDir.getParentFile().getParentFile(); if(context.task.isTaskCleanupTask()) { taskId = taskId + TaskTracker.TASK_CLEANUP_SUFFIX; } return new File(cacheDirForJob, taskId).getAbsolutePath(); }
/** Return the path to a local map output index file created earlier * @param mapTaskId a map task id */ public Path getOutputIndexFile(TaskAttemptID mapTaskId) throws IOException { return lDirAlloc.getLocalPathToRead(TaskTracker.getIntermediateOutputDir( jobId.toString(), mapTaskId.toString()) + "/file.out.index", conf); }
/** Return a local map spill index file created earlier * @param mapTaskId a map task id * @param spillNumber the number */ public Path getSpillIndexFile(TaskAttemptID mapTaskId, int spillNumber) throws IOException { return lDirAlloc.getLocalPathToRead(TaskTracker.getIntermediateOutputDir( jobId.toString(), mapTaskId.toString()) + "/spill" + spillNumber + ".out.index", conf); }
/** Create a local map output index file name. * @param mapTaskId a map task id * @param size the size of the file */ public Path getOutputIndexFileForWrite(TaskAttemptID mapTaskId, long size) throws IOException { return lDirAlloc.getLocalPathForWrite(TaskTracker.getIntermediateOutputDir( jobId.toString(), mapTaskId.toString()) + "/file.out.index", size, conf); }
/** Return the path to local map output file created earlier * @param mapTaskId a map task id */ public Path getOutputFile(TaskAttemptID mapTaskId) throws IOException { return lDirAlloc.getLocalPathToRead(TaskTracker.getIntermediateOutputDir( jobId.toString(), mapTaskId.toString()) + "/file.out", conf); }
/** Create a local map output file name. * @param mapTaskId a map task id * @param size the size of the file */ public Path getOutputFileForWrite(TaskAttemptID mapTaskId, long size) throws IOException { return lDirAlloc.getLocalPathForWrite(TaskTracker.getIntermediateOutputDir( jobId.toString(), mapTaskId.toString()) + "/file.out", size, conf); }
/** Creates the working directory pathname for a task attempt. */ static File formWorkDir(LocalDirAllocator lDirAlloc, TaskAttemptID task, boolean isCleanup, JobConf conf) throws IOException { Path workDir = lDirAlloc.getLocalPathToRead(TaskTracker.getTaskWorkDir( conf.getUser(), task.getJobID().toString(), task.toString(), isCleanup), conf); return new File(workDir.toString()); }
/** * Localize the given JobConf to be specific for this task. */ public void localizeConfiguration(JobConf conf) throws IOException { conf.set(JobContext.TASK_ID, taskId.getTaskID().toString()); conf.set(JobContext.TASK_ATTEMPT_ID, taskId.toString()); conf.setBoolean(JobContext.TASK_ISMAP, isMapTask()); conf.setInt(JobContext.TASK_PARTITION, partition); conf.set(JobContext.ID, taskId.getJobID().toString()); }
/** * Localize the given JobConf to be specific for this task. */ public void localizeConfiguration(JobConf conf) throws IOException { conf.set(JobContext.TASK_ID, taskId.getTaskID().toString()); conf.set(JobContext.TASK_ATTEMPT_ID, taskId.toString()); conf.setBoolean(JobContext.TASK_ISMAP, isMapTask()); conf.setInt(JobContext.TASK_PARTITION, partition); conf.set(JobContext.ID, taskId.getJobID().toString()); }
@Override public void done(TaskAttemptID taskAttemptID) throws IOException { LOG.info("Done acknowledgment from " + taskAttemptID.toString()); org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID = TypeConverter.toYarn(taskAttemptID); taskHeartbeatHandler.progressing(attemptID); context.getEventHandler().handle( new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_DONE)); }
private String getJobId(TaskControllerContext context) { String taskId = context.task.getTaskID().toString(); TaskAttemptID tId = TaskAttemptID.forName(taskId); String jobId = tId.getJobID().toString(); return jobId; }