/** * @param taskCtx Task context. * @return Input. * @throws IgniteCheckedException If failed. */ public HadoopTaskInput input(HadoopTaskContext taskCtx) throws IgniteCheckedException { switch (taskCtx.taskInfo().type()) { case REDUCE: int reducer = taskCtx.taskInfo().taskNumber(); HadoopMultimap m = locMaps.get(reducer); if (m != null) return m.input(taskCtx); return new HadoopTaskInput() { // Empty input. @Override public boolean next() { return false; } @Override public Object key() { throw new IllegalStateException(); } @Override public Iterator<?> values() { throw new IllegalStateException(); } @Override public void close() { // No-op. } }; default: throw new IllegalStateException("Illegal type: " + taskCtx.taskInfo().type()); } }
/** * @param taskCtx Task context. * @return Output. * @throws IgniteCheckedException If failed. */ public HadoopTaskOutput output(HadoopTaskContext taskCtx) throws IgniteCheckedException { switch (taskCtx.taskInfo().type()) { case MAP: assert !job.info().hasCombiner() : "The output creation is allowed if combiner has not been defined."; case COMBINE: return new PartitionedOutput(taskCtx); default: throw new IllegalStateException("Illegal type: " + taskCtx.taskInfo().type()); } }
/** * @param taskCtx Task info. * @return Output. */ public HadoopTaskOutput output(HadoopTaskContext taskCtx) throws IgniteCheckedException { return job(taskCtx.taskInfo().jobId()).output(taskCtx); }
/** * @param taskCtx Task info. * @return Input. */ public HadoopTaskInput input(HadoopTaskContext taskCtx) throws IgniteCheckedException { return job(taskCtx.taskInfo().jobId()).input(taskCtx); }
/** {@inheritDoc} */ @Override public InputSplit getInputSplit() { if (inputSplit == null) { HadoopInputSplit split = ctx.taskInfo().inputSplit(); if (split == null) return null; if (split instanceof HadoopFileBlock) { HadoopFileBlock fileBlock = (HadoopFileBlock)split; inputSplit = new FileSplit(new Path(fileBlock.file()), fileBlock.start(), fileBlock.length(), null); } else { try { inputSplit = (InputSplit) ((HadoopV2TaskContext)ctx).getNativeSplit(split); } catch (IgniteCheckedException e) { throw new IllegalStateException(e); } } } return inputSplit; }
/** * @param ctx Task context. * @return Task input. * @throws IgniteCheckedException If failed. */ private HadoopTaskInput createInputInternal(HadoopTaskContext ctx) throws IgniteCheckedException { switch (ctx.taskInfo().type()) { case SETUP: case MAP: case COMMIT: case ABORT: return null; case COMBINE: assert combinerInput != null; return combinerInput.input(ctx); default: return createInput(ctx); } }
/** * @param perfCntr Performance counter. * @throws IgniteCheckedException If failed. */ private void runTask(HadoopPerformanceCounter perfCntr) throws IgniteCheckedException { if (cancelled) throw new HadoopTaskCancelledException("Task cancelled."); try (HadoopTaskOutput out = createOutputInternal(ctx); HadoopTaskInput in = createInputInternal(ctx)) { ctx.input(in); ctx.output(out); perfCntr.onTaskStart(ctx.taskInfo(), U.currentTimeMillis()); ctx.run(); } }
/** * @param ctx Task info. * @return Task output. * @throws IgniteCheckedException If failed. */ private HadoopTaskOutput createOutputInternal(HadoopTaskContext ctx) throws IgniteCheckedException { switch (ctx.taskInfo().type()) { case SETUP: case REDUCE: case COMMIT: case ABORT: return null; case MAP: if (job.info().hasCombiner()) { assert combinerInput == null; combinerInput = get(job.info(), SHUFFLE_COMBINER_NO_SORTING, false) ? new HadoopHashMultimap(job.info(), mem, get(job.info(), COMBINER_HASHMAP_SIZE, 8 * 1024)): new HadoopSkipList(job.info(), mem); // TODO replace with red-black tree return combinerInput.startAdding(ctx); } default: return createOutput(ctx); } }