public static TaskAttemptContext createTaskAttemptContext(org.apache.hadoop.mapreduce.TaskAttemptContext context) { return createTaskAttemptContext(new JobConf(context.getConfiguration()), org.apache.hadoop.mapred.TaskAttemptID.forName(context.getTaskAttemptID().toString()), Reporter.NULL); }
/** * create the temporary output file for hadoop RecordWriter. * @param taskNumber The number of the parallel instance. * @param numTasks The number of parallel tasks. * @throws java.io.IOException */ @Override public void open(int taskNumber, int numTasks) throws IOException { // enforce sequential open() calls synchronized (OPEN_MUTEX) { if (Integer.toString(taskNumber + 1).length() > 6) { throw new IOException("Task id too large."); } TaskAttemptID taskAttemptID = TaskAttemptID.forName("attempt__0000_r_" + String.format("%" + (6 - Integer.toString(taskNumber + 1).length()) + "s", " ").replace(" ", "0") + Integer.toString(taskNumber + 1) + "_0"); this.jobConf.set("mapred.task.id", taskAttemptID.toString()); this.jobConf.setInt("mapred.task.partition", taskNumber + 1); // for hadoop 2.2 this.jobConf.set("mapreduce.task.attempt.id", taskAttemptID.toString()); this.jobConf.setInt("mapreduce.task.partition", taskNumber + 1); this.context = new TaskAttemptContextImpl(this.jobConf, taskAttemptID); this.outputCommitter = this.jobConf.getOutputCommitter(); JobContext jobContext = new JobContextImpl(this.jobConf, new JobID()); this.outputCommitter.setupJob(jobContext); this.recordWriter = this.mapredOutputFormat.getRecordWriter(null, this.jobConf, Integer.toString(taskNumber + 1), new HadoopDummyProgressable()); } }
TaskAttemptID taskid = TaskAttemptID.forName(job.get("mapred.task.id")); File stdout = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDOUT); File stderr = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDERR);
public static TaskID getTaskID(Configuration cfg) { // first try with the attempt since some Hadoop versions mix the two String taskAttemptId = HadoopCfgUtils.getTaskAttemptId(cfg); if (StringUtils.hasText(taskAttemptId)) { try { return TaskAttemptID.forName(taskAttemptId).getTaskID(); } catch (IllegalArgumentException ex) { // the task attempt is invalid (Tez in particular uses the wrong string - see #346) // try to fallback to task id return parseTaskIdFromTaskAttemptId(taskAttemptId); } } String taskIdProp = HadoopCfgUtils.getTaskId(cfg); // double-check task id bug in Hadoop 2.5.x if (StringUtils.hasText(taskIdProp) && !taskIdProp.contains("attempt")) { return TaskID.forName(taskIdProp); } return null; }
/** * Sets task id. * @param taskId * @deprecated use {@link #setTaskID(TaskAttemptID)} instead. */ @Deprecated public void setTaskId(String taskId) { this.taskId = TaskAttemptID.forName(taskId); }
/** * Sets task id. * @param taskId * @deprecated use {@link #setTaskID(TaskAttemptID)} instead. */ @Deprecated public void setTaskId(String taskId) { this.taskId = TaskAttemptID.forName(taskId); }
/** * Sets task id. * @param taskId * @deprecated use {@link #setTaskAttemptId(TaskAttemptID)} instead. */ @Deprecated public void setTaskId(String taskId) { this.setTaskAttemptId(TaskAttemptID.forName(taskId)); }
/** * Sets task id. * @param taskId * @deprecated use {@link #setTaskAttemptId(TaskAttemptID)} instead. */ @Deprecated public void setTaskId(String taskId) { this.setTaskAttemptId(TaskAttemptID.forName(taskId)); }
/** @deprecated Applications should rather use {@link #killTask(TaskAttemptID, boolean)}*/ @Deprecated public synchronized void killTask(String taskId, boolean shouldFail) throws IOException { killTask(TaskAttemptID.forName(taskId), shouldFail); }
/** @deprecated Applications should rather use {@link #killTask(TaskAttemptID, boolean)}*/ @Deprecated public synchronized void killTask(String taskId, boolean shouldFail) throws IOException { killTask(TaskAttemptID.forName(taskId), shouldFail); }
/** * Sets task id. * @param taskId * @deprecated use {@link #setTaskAttemptId(TaskAttemptID)} instead. */ @Deprecated public void setTaskId(String taskId) { this.setTaskAttemptId(TaskAttemptID.forName(taskId)); }
/** @deprecated Applications should rather use {@link #killTask(TaskAttemptID, boolean)}*/ @Deprecated public synchronized void killTask(String taskId, boolean shouldFail) throws IOException { killTask(TaskAttemptID.forName(taskId), shouldFail); }
public void map(LongWritable key, Text value, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException { System.err.println(taskid); this.reporter = reporter; if (TaskAttemptID.forName(taskid).getTaskID().getId() == taskWithCleanup) { if (taskid.endsWith("_0")) { System.exit(-1); } } }
public static TaskAttemptContext createTaskAttemptContext(org.apache.hadoop.mapreduce.TaskAttemptContext context) { return createTaskAttemptContext(new JobConf(context.getConfiguration()), org.apache.hadoop.mapred.TaskAttemptID.forName(context.getTaskAttemptID().toString()), Reporter.NULL); }
public static TaskAttemptContext createTaskAttemptContext(org.apache.hadoop.mapreduce.TaskAttemptContext context) { return createTaskAttemptContext(new JobConf(context.getConfiguration()), org.apache.hadoop.mapred.TaskAttemptID.forName(context.getTaskAttemptID().toString()), Reporter.NULL); }
@Override public void configure(JobConf conf) { TaskAttemptID id = TaskAttemptID.forName(conf.get("mapred.task.id")); shouldFail = id.getId() == 0 && id.getTaskID().getId() == 0; } }
@Override public void activateOptions() { synchronized (this) { if (maxEvents > 0) { tail = new LinkedList<LoggingEvent>(); } setFile(TaskLog.getTaskLogFile(TaskAttemptID.forName(taskId), TaskLog.LogName.SYSLOG).toString()); setAppend(true); super.activateOptions(); } }
@Override public void activateOptions() { synchronized (this) { if (maxEvents > 0) { tail = new LinkedList<LoggingEvent>(); } setFile(TaskLog.getTaskLogFile(TaskAttemptID.forName(taskId), TaskLog.LogName.SYSLOG).toString()); setAppend(true); super.activateOptions(); } }
private String getJobId(TaskControllerContext context) { String taskId = context.task.getTaskID().toString(); TaskAttemptID tId = TaskAttemptID.forName(taskId); String jobId = tId.getJobID().toString(); return jobId; }
private String readStdOut(JobConf conf) throws Exception { TaskAttemptID taskId = TaskAttemptID.forName(conf .get(MRJobConfig.TASK_ATTEMPT_ID)); File stdOut = TaskLog.getTaskLogFile(taskId, false, TaskLog.LogName.STDOUT); return readFile(stdOut); }