public TaskAttemptContextImpl(Configuration conf, TezCounters tezCounters, long clusterId, int vertexIndex, int appId, int taskIndex, int taskAttemptNumber, boolean isMap, Reporter reporter) { // TODO NEWTEZ Can the jt Identifier string be taskContext.getUniqueId ? this(conf, createMockTaskAttemptID(clusterId, vertexIndex, appId, taskIndex, taskAttemptNumber, isMap), tezCounters, reporter); }
/** * Set the current status of the task to the given string. */ @Override public void setStatus(String status) { setStatusString(status); // Nothing to do until InputContext supports some kind of custom string // diagnostics. } }
public MRReaderMapReduce(JobConf jobConf, InputSplit inputSplit, TezCounters tezCounters, TezCounter inputRecordCounter, long clusterId, int vertexIndex, int appId, int taskIndex, int taskAttemptNumber, InputContext context) throws IOException { super(context); this.inputRecordCounter = inputRecordCounter; this.taskAttemptContext = new TaskAttemptContextImpl(jobConf, tezCounters, clusterId, vertexIndex, appId, taskIndex, taskAttemptNumber, true, null); Class<? extends org.apache.hadoop.mapreduce.InputFormat<?, ?>> inputFormatClazz; try { inputFormatClazz = taskAttemptContext.getInputFormatClass(); } catch (ClassNotFoundException e) { throw new IOException("Unable to instantiate InputFormat class", e); } inputFormat = ReflectionUtils.newInstance(inputFormatClazz, jobConf); if (inputSplit != null) { this.inputSplit = inputSplit; setupNewRecordReader(); } }
public static TaskID toMRTaskIdForOutput(TezTaskID taskid) { return org.apache.tez.mapreduce.hadoop.mapreduce.TaskAttemptContextImpl .createMockTaskAttemptIDFromTezTaskId(taskid, (taskid.getVertexID().getId() == 0)); }
private TaskAttemptContext createTaskAttemptContext(TaskAttemptID attemptId) { return new TaskAttemptContextImpl(this.jobConf, attemptId, getContext().getCounters(), isMapperOutput, null); }
public static void setMRProperties( ProcessorContext context, Configuration config, boolean isMapperOutput ) { TaskAttemptID taskAttemptId = org.apache.tez.mapreduce.hadoop.mapreduce.TaskAttemptContextImpl .createMockTaskAttemptID( context.getApplicationId().getClusterTimestamp(), context.getTaskVertexIndex(), context.getApplicationId().getId(), context.getTaskIndex(), context.getTaskAttemptNumber(), isMapperOutput ); config.set( JobContext.TASK_ATTEMPT_ID, taskAttemptId.toString() ); config.set( JobContext.TASK_ID, taskAttemptId.getTaskID().toString() ); config.setBoolean( JobContext.TASK_ISMAP, isMapperOutput ); config.setInt( JobContext.TASK_PARTITION, taskAttemptId.getTaskID().getId() ); } }
public static void setMRProperties( ProcessorContext context, Configuration config, boolean isMapperOutput ) { TaskAttemptID taskAttemptId = org.apache.tez.mapreduce.hadoop.mapreduce.TaskAttemptContextImpl .createMockTaskAttemptID( context.getApplicationId().getClusterTimestamp(), context.getTaskVertexIndex(), context.getApplicationId().getId(), context.getTaskIndex(), context.getTaskAttemptNumber(), isMapperOutput ); config.set( JobContext.TASK_ATTEMPT_ID, taskAttemptId.toString() ); config.set( JobContext.TASK_ID, taskAttemptId.getTaskID().toString() ); config.setBoolean( JobContext.TASK_ISMAP, isMapperOutput ); config.setInt( JobContext.TASK_PARTITION, taskAttemptId.getTaskID().getId() ); } }
getContext().getDAGAttemptNumber()); TaskAttemptID taskAttemptId = org.apache.tez.mapreduce.hadoop.mapreduce.TaskAttemptContextImpl .createMockTaskAttemptID(getContext().getApplicationId().getClusterTimestamp(), getContext().getTaskVertexIndex(), getContext().getApplicationId().getId(), getContext().getTaskIndex(), getContext().getTaskAttemptNumber(), isMapperOutput);
.createMockTaskAttemptID(getContext().getApplicationId().getClusterTimestamp(), getContext().getTaskVertexIndex(), getContext().getApplicationId().getId(), getContext().getTaskIndex(), getContext().getTaskAttemptNumber(), isMapperOutput);