/** * Creates setup task based on job information. * * @param jobId Job ID. * @return Setup task wrapped in collection. */ @Nullable private Collection<HadoopTaskInfo> setupTask(HadoopJobId jobId) { if (activeJobs.containsKey(jobId)) return null; else { initState(jobId); return Collections.singleton(new HadoopTaskInfo(SETUP, jobId, 0, 0, null)); } }
/** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { taskInfo = new HadoopTaskInfo(); taskInfo.readExternal(in); status = new HadoopTaskStatus(); status.readExternal(in); } }
/** * Creates reducer tasks based on job information. * * @param reducers Reducers (may be {@code null}). * @param job Job instance. * @return Collection of task infos. */ private Collection<HadoopTaskInfo> reducerTasks(int[] reducers, HadoopJobEx job) { UUID locNodeId = ctx.localNodeId(); HadoopJobId jobId = job.id(); JobLocalState state = activeJobs.get(jobId); Collection<HadoopTaskInfo> tasks = null; if (reducers != null) { if (state == null) state = initState(job.id()); for (int rdc : reducers) { if (state.addReducer(rdc)) { if (log.isDebugEnabled()) log.debug("Submitting REDUCE task for execution [locNodeId=" + locNodeId + ", rdc=" + rdc + ']'); HadoopTaskInfo taskInfo = new HadoopTaskInfo(REDUCE, jobId, rdc, 0, null); if (tasks == null) tasks = new ArrayList<>(); tasks.add(taskInfo); } } } return tasks; }
", split=" + split + ']'); HadoopTaskInfo taskInfo = new HadoopTaskInfo(MAP, jobId, meta.taskNumber(split), 0, split);
/** * Generates input data for reduce-like operation into mock context input and runs the operation. * * @param gridJob Job is to create reduce task from. * @param taskType Type of task - combine or reduce. * @param taskNum Number of task in job. * @param words Pairs of words and its counts. * @return Context with mock output. * @throws IgniteCheckedException If fails. */ private HadoopTestTaskContext runTaskWithInput(HadoopJobEx gridJob, HadoopTaskType taskType, int taskNum, String... words) throws IgniteCheckedException { HadoopTaskInfo taskInfo = new HadoopTaskInfo(taskType, gridJob.id(), taskNum, 0, null); HadoopTestTaskContext ctx = new HadoopTestTaskContext(taskInfo, gridJob); for (int i = 0; i < words.length; i+=2) { List<IntWritable> valList = new ArrayList<>(); for (int j = 0; j < Integer.parseInt(words[i + 1]); j++) valList.add(new IntWritable(1)); ctx.mockInput().put(new Text(words[i]), valList); } ctx.run(); return ctx; }
HadoopTaskContext taskCtx = job.getTaskContext(new HadoopTaskInfo(HadoopTaskType.MAP, null, 0, 0, null));
/** * Runs chain of map-combine task on file block. * * @param fileBlock block of input file to be processed. * @param gridJob Hadoop job implementation. * @return Context of combine task with mock output. * @throws IgniteCheckedException If fails. */ private HadoopTestTaskContext runMapCombineTask(HadoopFileBlock fileBlock, HadoopJobEx gridJob) throws IgniteCheckedException { HadoopTaskInfo taskInfo = new HadoopTaskInfo(HadoopTaskType.MAP, gridJob.id(), 0, 0, fileBlock); HadoopTestTaskContext mapCtx = new HadoopTestTaskContext(taskInfo, gridJob); mapCtx.run(); //Prepare input for combine taskInfo = new HadoopTaskInfo(HadoopTaskType.COMBINE, gridJob.id(), 0, 0, null); HadoopTestTaskContext combineCtx = new HadoopTestTaskContext(taskInfo, gridJob); combineCtx.makeTreeOfWritables(mapCtx.mockOutput()); combineCtx.run(); return combineCtx; }
HadoopTaskInfo taskInfo = new HadoopTaskInfo(HadoopTaskType.REDUCE, job.id(), rdc, 0, null);
HadoopTaskInfo info = new HadoopTaskInfo(COMMIT, jobId, 0, 0, null); HadoopTaskInfo info = new HadoopTaskInfo(ABORT, jobId, 0, 0, null);
HadoopTaskInfo taskInfo = new HadoopTaskInfo(HadoopTaskType.MAP, gridJob.id(), 0, 0, fileBlock1); ctx.taskInfo(new HadoopTaskInfo(HadoopTaskType.MAP, gridJob.id(), 0, 0, fileBlock2));
HadoopTaskInfo taskInfo = new HadoopTaskInfo(HadoopTaskType.REDUCE, gridJob.id(), 0, 0, null); reduceCtx.taskInfo(new HadoopTaskInfo(HadoopTaskType.COMMIT, gridJob.id(), 0, 0, null));
HadoopTaskInfo combineTaskInfo = new HadoopTaskInfo(COMBINE, info.jobId(), info.taskNumber(), info.attempt(), null);