public static boolean isReduce(TaskAttemptContext context) { TaskAttemptID taskAttemptID = context.getTaskAttemptID(); TaskType taskType = taskAttemptID.getTaskType(); return taskType == TaskType.REDUCE; }
public static boolean isMap(TaskAttemptContext context) { TaskAttemptID taskAttemptID = context.getTaskAttemptID(); TaskType taskType = taskAttemptID.getTaskType(); return taskType == TaskType.MAP; }
private List<Counter> getCounters(String name) { assert name != null; try { List<Counter> results = new ArrayList<>(); if (context.getTaskAttemptID().getTaskType() == TaskType.MAP) { results.add(context.getCounter(TaskCounter.MAP_OUTPUT_RECORDS)); } else { results.add(context.getCounter(TaskCounter.REDUCE_OUTPUT_RECORDS)); } results.add(context.getCounter(COUNTER_GROUP, name)); return results; } catch (RuntimeException e) { LOG.warn("Failed to create counters", e); return Collections.emptyList(); } }
public static HDFSPath getLogDir(TaskAttemptContext context) { TaskAttemptID taskAttemptID = context.getTaskAttemptID(); TaskType taskType = taskAttemptID.getTaskType(); Configuration conf = context.getConfiguration(); String filename = conf.get(SINGLEFILE); if (filename == null) return new HDFSPath(conf, conf.get(OUTPUTDIR)).getSubdir("_log"); return new HDFSPath(conf, filename).getParent().getSubdir("_log_" + filename.substring(filename.lastIndexOf('/') + 1)); }
private String getCounterName(TaskInputOutputContext context) { MapReduceMetrics.TaskType taskType = MapReduceMetrics.TaskType.from(context.getTaskAttemptID().getTaskType()); switch (taskType) { case Mapper: return TaskCounter.MAP_OUTPUT_RECORDS.name(); case Reducer: return TaskCounter.REDUCE_OUTPUT_RECORDS.name(); default: throw new IllegalArgumentException("Illegal task type: " + taskType); } } }
private String getCounterName(TaskInputOutputContext context) { MapReduceMetrics.TaskType taskType = MapReduceMetrics.TaskType.from(context.getTaskAttemptID().getTaskType()); switch (taskType) { case Mapper: return TaskCounter.MAP_OUTPUT_RECORDS.name(); case Reducer: return TaskCounter.REDUCE_OUTPUT_RECORDS.name(); default: throw new IllegalArgumentException("Illegal task type: " + taskType); } } }
public static int getTaskID(TaskAttemptContext context) { TaskAttemptID taskAttemptID = context.getTaskAttemptID(); TaskType taskType = taskAttemptID.getTaskType(); return taskAttemptID.getTaskID().getId(); }
public static String getLogFilename(TaskAttemptContext context) { TaskAttemptID taskAttemptID = context.getTaskAttemptID(); TaskType taskType = taskAttemptID.getTaskType(); int task = ContextTools.getTaskID(context); int attempt = ContextTools.getAttemptID(context); if (taskType == TaskType.MAP) { return sprintf("map.%05d", task); } else if (taskType == TaskType.REDUCE) { return sprintf("reduce.%05d", task); } else { return "other"; } }
public void map(Object key, Text value, Context context) throws IOException, InterruptedException { // Make one mapper slower for speculative execution TaskAttemptID taid = context.getTaskAttemptID(); long sleepTime = 100; Configuration conf = context.getConfiguration(); boolean test_speculate_map = conf.getBoolean(MRJobConfig.MAP_SPECULATIVE, false); // IF TESTING MAPPER SPECULATIVE EXECUTION: // Make the "*_m_000000_0" attempt take much longer than the others. // When speculative execution is enabled, this should cause the attempt // to be killed and restarted. At that point, the attempt ID will be // "*_m_000000_1", so sleepTime will still remain 100ms. if ( (taid.getTaskType() == TaskType.MAP) && test_speculate_map && (taid.getTaskID().getId() == 0) && (taid.getId() == 0)) { sleepTime = 10000; } try{ Thread.sleep(sleepTime); } catch(InterruptedException ie) { // Ignore } context.write(value, new IntWritable(1)); } }
/** * Mask the job ID part in a {@link TaskAttemptID}. * * @param attemptId * raw {@link TaskAttemptID} read from trace * @return masked {@link TaskAttemptID} with empty {@link JobID}. */ private TaskAttemptID maskAttemptID(TaskAttemptID attemptId) { JobID jobId = new JobID(); TaskType taskType = attemptId.getTaskType(); TaskID taskId = attemptId.getTaskID(); return new TaskAttemptID(jobId.getJtIdentifier(), jobId.getId(), taskType, taskId.getId(), attemptId.getId()); }
/** * Mask the job ID part in a {@link TaskAttemptID}. * * @param attemptId * raw {@link TaskAttemptID} read from trace * @return masked {@link TaskAttemptID} with empty {@link JobID}. */ private TaskAttemptID maskAttemptID(TaskAttemptID attemptId) { JobID jobId = new JobID(); TaskType taskType = attemptId.getTaskType(); TaskID taskId = attemptId.getTaskID(); return new TaskAttemptID(jobId.getJtIdentifier(), jobId.getId(), taskType, taskId.getId(), attemptId.getId()); }
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { // Make one reducer slower for speculative execution TaskAttemptID taid = context.getTaskAttemptID(); long sleepTime = 100; Configuration conf = context.getConfiguration(); boolean test_speculate_reduce = conf.getBoolean(MRJobConfig.REDUCE_SPECULATIVE, false); // IF TESTING REDUCE SPECULATIVE EXECUTION: // Make the "*_r_000000_0" attempt take much longer than the others. // When speculative execution is enabled, this should cause the attempt // to be killed and restarted. At that point, the attempt ID will be // "*_r_000000_1", so sleepTime will still remain 100ms. if ( (taid.getTaskType() == TaskType.REDUCE) && test_speculate_reduce && (taid.getTaskID().getId() == 0) && (taid.getId() == 0)) { sleepTime = 10000; } try{ Thread.sleep(sleepTime); } catch(InterruptedException ie) { // Ignore } context.write(key,new IntWritable(0)); } }
if (MapReduceMetrics.TaskType.hasType(taskAttemptId.getTaskType())) { taskType = MapReduceMetrics.TaskType.from(taskAttemptId.getTaskType());
if (MapReduceMetrics.TaskType.hasType(taskAttemptId.getTaskType())) { taskType = MapReduceMetrics.TaskType.from(taskAttemptId.getTaskType());
final Path stdoutPath = new Path(dirEntry.getValue(), TaskLog.LogName.STDOUT.toString()); if (useDefault || tid.getTaskType() == TaskType.MAP) { if (tid.getTaskID().getId() == PROFILED_TASK_ID) {
if (superExtraExplanatoryHappyPartitionerMode && TaskType.REDUCE == ctx.getTaskAttemptID().getTaskType()) { int reducerId = ctx.getTaskAttemptID().getTaskID().getId();