private int detectCurrentInstance(Configuration conf) { TaskID taskID = HadoopCfgUtils.getTaskID(conf); if (taskID == null) { log.warn(String.format("Cannot determine task id - redirecting writes in a random fashion")); return NO_TASK_ID; } return taskID.getId(); }
private int detectCurrentInstance(Configuration conf) { TaskID taskID = HadoopCfgUtils.getTaskID(conf); if (taskID == null) { log.warn(String.format("Cannot determine task id - redirecting writes in a random fashion")); return NO_TASK_ID; } return taskID.getId(); }
private int detectCurrentInstance(Configuration conf) { TaskID taskID = HadoopCfgUtils.getTaskID(conf); if (taskID == null) { log.warn(String.format("Cannot determine task id - redirecting writes in a random fashion")); return NO_TASK_ID; } return taskID.getId(); }
private int detectCurrentInstance(Configuration conf) { TaskID taskID = HadoopCfgUtils.getTaskID(conf); if (taskID == null) { log.warn(String.format("Cannot determine task id - redirecting writes in a random fashion")); return NO_TASK_ID; } return taskID.getId(); }
private int detectCurrentInstance(Configuration conf) { TaskID taskID = HadoopCfgUtils.getTaskID(conf); if (taskID == null) { log.warn(String.format("Cannot determine task id - redirecting writes in a random fashion")); return NO_TASK_ID; } return taskID.getId(); }
public int getTaskIDNum() { return taskReport.getTaskID().getId(); }
public int getTaskIDNum() { return taskReport.getTaskID().getId(); }
public void map(LongWritable key, Text value, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException { System.err.println(taskid); this.reporter = reporter; if (TaskAttemptID.forName(taskid).getTaskID().getId() == taskWithCleanup) { if (taskid.endsWith("_0")) { System.exit(-1); } } }
@Private public synchronized String getOutputName() { return "part-" + NUMBER_FORMAT.format(taskAttemptId.getTaskID().getId()); }
@Override public String generateFileName(JobContext context, String topic, String brokerId, int partitionId, int count, long offset, String encodedPartition) { //adding taskId to filename to avoid name collision int taskId = ((TaskAttemptContext) context).getTaskAttemptID().getTaskID().getId(); return super.generateFileName(context, topic, String.valueOf(System.currentTimeMillis() / 1000), taskId, count, 0, encodedPartition); }
private void clearUncleanTasks() { TaskAttemptID taskid = null; TaskInProgress tip = null; while (!mapCleanupTasks.isEmpty()) { taskid = mapCleanupTasks.remove(0); tip = maps[taskid.getTaskID().getId()]; updateTaskStatus(tip, tip.getTaskStatus(taskid)); } while (!reduceCleanupTasks.isEmpty()) { taskid = reduceCleanupTasks.remove(0); tip = reduces[taskid.getTaskID().getId()]; updateTaskStatus(tip, tip.getTaskStatus(taskid)); } }
@Override public String generateFileName(JobContext context, String topic, String brokerId, int partitionId, int count, long offset, String encodedPartition) { //adding taskId to filename to avoid name collision int taskId = ((TaskAttemptContext) context).getTaskAttemptID().getTaskID().getId(); return super.generateFileName(context, topic, String.valueOf(System.currentTimeMillis() / 1000), taskId, count, 0, encodedPartition); }
private void clearUncleanTasks() { TaskAttemptID taskid = null; TaskInProgress tip = null; while (!mapCleanupTasks.isEmpty()) { taskid = mapCleanupTasks.remove(0); tip = maps[taskid.getTaskID().getId()]; updateTaskStatus(tip, tip.getTaskStatus(taskid)); } while (!reduceCleanupTasks.isEmpty()) { taskid = reduceCleanupTasks.remove(0); tip = reduces[taskid.getTaskID().getId()]; updateTaskStatus(tip, tip.getTaskStatus(taskid)); } }
private void clearUncleanTasks() { TaskAttemptID taskid = null; TaskInProgress tip = null; while (!mapCleanupTasks.isEmpty()) { taskid = mapCleanupTasks.remove(0); tip = maps[taskid.getTaskID().getId()]; updateTaskStatus(tip, tip.getTaskStatus(taskid), null); } while (!reduceCleanupTasks.isEmpty()) { taskid = reduceCleanupTasks.remove(0); tip = reduces[taskid.getTaskID().getId()]; updateTaskStatus(tip, tip.getTaskStatus(taskid), null); } }
@Override public void configure(JobConf conf) { TaskAttemptID id = TaskAttemptID.forName(conf.get("mapred.task.id")); shouldFail = id.getId() == 0 && id.getTaskID().getId() == 0; } }
@Override public String getWorkingFileName(JobContext context, String topic, String brokerId, int partitionId, String encodedPartition) { //adding taskId to filename to avoid name collision int taskId = ((TaskAttemptContext) context).getTaskAttemptID().getTaskID().getId(); return super.getWorkingFileName(context, topic, "0", taskId, encodedPartition); }
@Override public String getWorkingFileName(JobContext context, String topic, String brokerId, int partitionId, String encodedPartition) { //adding taskId to filename to avoid name collision int taskId = ((TaskAttemptContext) context).getTaskAttemptID().getTaskID().getId(); return super.getWorkingFileName(context, topic, "0", taskId, encodedPartition); }
/** Create a local reduce input file name. * @param mapTaskId a map task id * @param reduceTaskId a reduce task id * @param size the size of the file */ public Path getInputFileForWrite(TaskID mapId, TaskAttemptID reduceTaskId, long size) throws IOException { // TODO *oom* should use a format here return lDirAlloc.getLocalPathForWrite(TaskTracker.getIntermediateOutputDir( jobId.toString(), reduceTaskId.toString()) + "/map_" + mapId.getId() + ".out", size, conf); }
public static void setMRProperties( ProcessorContext context, Configuration config, boolean isMapperOutput ) { TaskAttemptID taskAttemptId = org.apache.tez.mapreduce.hadoop.mapreduce.TaskAttemptContextImpl .createMockTaskAttemptID( context.getApplicationId().getClusterTimestamp(), context.getTaskVertexIndex(), context.getApplicationId().getId(), context.getTaskIndex(), context.getTaskAttemptNumber(), isMapperOutput ); config.set( JobContext.TASK_ATTEMPT_ID, taskAttemptId.toString() ); config.set( JobContext.TASK_ID, taskAttemptId.getTaskID().toString() ); config.setBoolean( JobContext.TASK_ISMAP, isMapperOutput ); config.setInt( JobContext.TASK_PARTITION, taskAttemptId.getTaskID().getId() ); } }
public static void setMRProperties( ProcessorContext context, Configuration config, boolean isMapperOutput ) { TaskAttemptID taskAttemptId = org.apache.tez.mapreduce.hadoop.mapreduce.TaskAttemptContextImpl .createMockTaskAttemptID( context.getApplicationId().getClusterTimestamp(), context.getTaskVertexIndex(), context.getApplicationId().getId(), context.getTaskIndex(), context.getTaskAttemptNumber(), isMapperOutput ); config.set( JobContext.TASK_ATTEMPT_ID, taskAttemptId.toString() ); config.set( JobContext.TASK_ID, taskAttemptId.getTaskID().toString() ); config.setBoolean( JobContext.TASK_ISMAP, isMapperOutput ); config.setInt( JobContext.TASK_PARTITION, taskAttemptId.getTaskID().getId() ); } }