public static Path makeTmpPath( final Path basePath, final FileSystem fs, final DataSegment segmentTemplate, final TaskAttemptID taskAttemptID, DataSegmentPusher dataSegmentPusher ) { return new Path( prependFSIfNullScheme(fs, basePath), StringUtils.format( "./%s.%d", dataSegmentPusher.makeIndexPathName(segmentTemplate, JobHelper.INDEX_ZIP), taskAttemptID.getId() ) ); }
testing.injectedFailureCount = context.getTaskAttemptID().getId();
attemptNum = taskAttemptId.getId(); if (LOG.isDebugEnabled()) { LOG.debug("Setting attempt number to " + attemptNum + " from task attempt ID in conf: " +
public static TezTaskAttemptID fromMRTaskAttemptId( org.apache.hadoop.mapreduce.TaskAttemptID taskAttemptId) { return TezTaskAttemptID.getInstance( fromMRTaskId(taskAttemptId.getTaskID()), taskAttemptId.getId()); }
/** * Downgrade a new TaskAttemptID to an old one * @param old the new id * @return either old or a new TaskAttemptID constructed to match old */ public static TaskAttemptID downgrade(org.apache.hadoop.mapreduce.TaskAttemptID old) { if (old instanceof TaskAttemptID) { return (TaskAttemptID) old; } else { return new TaskAttemptID(TaskID.downgrade(old.getTaskID()), old.getId()); } }
/** * Downgrade a new TaskAttemptID to an old one * @param old the new id * @return either old or a new TaskAttemptID constructed to match old */ public static TaskAttemptID downgrade(org.apache.hadoop.mapreduce.TaskAttemptID old) { if (old instanceof TaskAttemptID) { return (TaskAttemptID) old; } else { return new TaskAttemptID(TaskID.downgrade(old.getTaskID()), old.getId()); } }
@Override public String toString() { return "TaskContext{" + "jobId=" + getJobId() + ", taskId=" + getTaskId() + ", attemptId=" + taskAttemptContext.getTaskAttemptID().getId() + '}'; } }
/** * Downgrade a new TaskAttemptID to an old one * @param old the new id * @return either old or a new TaskAttemptID constructed to match old */ public static TaskAttemptID downgrade(org.apache.hadoop.mapreduce.TaskAttemptID old) { if (old instanceof TaskAttemptID) { return (TaskAttemptID) old; } else { return new TaskAttemptID(TaskID.downgrade(old.getTaskID()), old.getId()); } }
/** * Downgrade a new TaskAttemptID to an old one * @param old the new id * @return either old or a new TaskAttemptID constructed to match old */ public static TaskAttemptID downgrade(org.apache.hadoop.mapreduce.TaskAttemptID old) { if (old instanceof TaskAttemptID) { return (TaskAttemptID) old; } else { return new TaskAttemptID(TaskID.downgrade(old.getTaskID()), old.getId()); } }
/** * Downgrade a new TaskAttemptID to an old one * @param old the new id * @return either old or a new TaskAttemptID constructed to match old */ public static TaskAttemptID downgrade(org.apache.hadoop.mapreduce.TaskAttemptID old) { if (old instanceof TaskAttemptID) { return (TaskAttemptID) old; } else { return new TaskAttemptID(TaskID.downgrade(old.getTaskID()), old.getId()); } }
/** * Downgrade a new TaskAttemptID to an old one * @param old the new id * @return either old or a new TaskAttemptID constructed to match old */ public static TaskAttemptID downgrade(org.apache.hadoop.mapreduce.TaskAttemptID old) { if (old instanceof TaskAttemptID) { return (TaskAttemptID) old; } else { return new TaskAttemptID(TaskID.downgrade(old.getTaskID()), old.getId()); } }
private Random createRandom(Context context) { long taskId = 0; if (context.getTaskAttemptID() != null) { // MRUnit returns null LOGGER.debug("context.getTaskAttemptID().getId(): {}", context.getTaskAttemptID().getId()); LOGGER.debug("context.getTaskAttemptID().getTaskID().getId(): {}", context.getTaskAttemptID().getTaskID().getId()); taskId = context.getTaskAttemptID().getTaskID().getId(); // taskId = 0, 1, ..., N } // create a good random seed, yet ensure deterministic PRNG sequence for easy reproducability return new Random(421439783L * (taskId + 1)); }
private Random createRandom(Context context) { long taskId = 0; if (context.getTaskAttemptID() != null) { // MRUnit returns null LOGGER.debug("context.getTaskAttemptID().getId(): {}", context.getTaskAttemptID().getId()); LOGGER.debug("context.getTaskAttemptID().getTaskID().getId(): {}", context.getTaskAttemptID().getTaskID().getId()); taskId = context.getTaskAttemptID().getTaskID().getId(); // taskId = 0, 1, ..., N } // create a good random seed, yet ensure deterministic PRNG sequence for easy reproducability return new Random(421439783L * (taskId + 1)); }
public static TaskAttemptId toYarn( org.apache.hadoop.mapreduce.TaskAttemptID id) { TaskAttemptId taskAttemptId = recordFactory.newRecordInstance(TaskAttemptId.class); taskAttemptId.setTaskId(toYarn(id.getTaskID())); taskAttemptId.setId(id.getId()); return taskAttemptId; }
public static TaskAttemptId toYarn( org.apache.hadoop.mapreduce.TaskAttemptID id) { TaskAttemptId taskAttemptId = recordFactory.newRecordInstance(TaskAttemptId.class); taskAttemptId.setTaskId(toYarn(id.getTaskID())); taskAttemptId.setId(id.getId()); return taskAttemptId; }
public static TaskAttemptId toYarn( org.apache.hadoop.mapreduce.TaskAttemptID id) { TaskAttemptId taskAttemptId = recordFactory.newRecordInstance(TaskAttemptId.class); taskAttemptId.setTaskId(toYarn(id.getTaskID())); taskAttemptId.setId(id.getId()); return taskAttemptId; }
public static TaskAttemptId toYarn( org.apache.hadoop.mapreduce.TaskAttemptID id) { TaskAttemptId taskAttemptId = recordFactory.newRecordInstance(TaskAttemptId.class); taskAttemptId.setTaskId(toYarn(id.getTaskID())); taskAttemptId.setId(id.getId()); return taskAttemptId; }
private static TaskAttemptContext getTaskContext(TaskAttemptContext baseContext, Job job) { org.apache.hadoop.mapreduce.TaskAttemptID baseTaskId = baseContext.getTaskAttemptID(); // Create a task ID context with our specialized job ID. org.apache.hadoop.mapreduce.TaskAttemptID taskId; taskId = new org.apache.hadoop.mapreduce.TaskAttemptID(job.getJobID().getJtIdentifier(), job.getJobID().getId(), baseTaskId.isMap(), baseTaskId.getTaskID().getId(), baseTaskId.getId()); return new TaskAttemptContextWrapper(baseContext, job.getConfiguration(), taskId); }
/** * Mask the job ID part in a {@link TaskAttemptID}. * * @param attemptId * raw {@link TaskAttemptID} read from trace * @return masked {@link TaskAttemptID} with empty {@link JobID}. */ private TaskAttemptID maskAttemptID(TaskAttemptID attemptId) { JobID jobId = new JobID(); TaskType taskType = attemptId.getTaskType(); TaskID taskId = attemptId.getTaskID(); return new TaskAttemptID(jobId.getJtIdentifier(), jobId.getId(), taskType, taskId.getId(), attemptId.getId()); }
@Test public void testTaskAttemptIdAcquire() { int tasksCount = 100; int taskId = 25; for (int i = 0; i < tasksCount; i++) { TaskAttemptID taskAttemptID = tested.acquireTaskAttemptIdLock(configuration, taskId); assertTrue(isFileExists(getTaskAttemptIdPath(taskId, taskAttemptID.getId()))); } }