/** * Creates setup task based on job information. * * @param jobId Job ID. * @return Setup task wrapped in collection. */ @Nullable private Collection<HadoopTaskInfo> setupTask(HadoopJobId jobId) { if (activeJobs.containsKey(jobId)) return null; else { initState(jobId); return Collections.singleton(new HadoopTaskInfo(SETUP, jobId, 0, 0, null)); } }
/** * Returns subdirectory of job working directory for task execution. * * @param workDir Work directory. * @param locNodeId Local node ID. * @param info Task info. * @return Working directory for task. * @throws IgniteCheckedException If Failed. */ public static File taskLocalDir(String workDir, UUID locNodeId, HadoopTaskInfo info) throws IgniteCheckedException { File jobLocDir = jobLocalDir(workDir, locNodeId, info.jobId()); return new File(jobLocDir, info.type() + "_" + info.taskNumber() + "_" + info.attempt()); }
@Override public void applyx(IgniteInternalFuture<?> f) { Throwable err = null; if (f != null) { try { f.get(); } catch (IgniteCheckedException e) { err = e; } } transform(jobId, new RemoveMappersProcessor(prev, taskInfo.inputSplit(), err)); } };
/** * Gets name for the task class loader. Task class loader * @param info The task info. * @param prefix Get only prefix (without task type and number) * @return The class loader name. */ public static String nameForTask(HadoopTaskInfo info, boolean prefix) { if (prefix) return "hadoop-task-" + info.jobId() + "-"; else return "hadoop-task-" + info.jobId() + "-" + info.type() + "-" + info.taskNumber(); }
/** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { taskInfo = new HadoopTaskInfo(); taskInfo.readExternal(in); status = new HadoopTaskStatus(); status.readExternal(in); } }
if (info.type() == MAP && job.info().hasCombiner()) { HadoopTaskInfo combineTaskInfo = new HadoopTaskInfo(COMBINE, info.jobId(), info.taskNumber(), info.attempt(), null); if (ctx.taskInfo().hasMapperIndex()) combineTaskInfo.mapperIndex(ctx.taskInfo().mapperIndex());
/** * Generate name that consists of some event information. * * @param info Task info. * @param evtType The type of the event. * @return String contains necessary event information. */ private String eventName(HadoopTaskInfo info, String evtType) { return eventName(info.type().toString(), info.taskNumber(), evtType); }
/** * Creates Hadoop attempt ID. * * @return Attempt ID. */ public TaskAttemptID attemptId() { TaskID tid = new TaskID(jobCtx.getJobID(), taskType(taskInfo().type()), taskInfo().taskNumber()); return new TaskAttemptID(tid, taskInfo().attempt()); }
/** * @param taskCtx Task context. * @return Output. * @throws IgniteCheckedException If failed. */ public HadoopTaskOutput output(HadoopTaskContext taskCtx) throws IgniteCheckedException { switch (taskCtx.taskInfo().type()) { case MAP: assert !job.info().hasCombiner() : "The output creation is allowed if combiner has not been defined."; case COMBINE: return new PartitionedOutput(taskCtx); default: throw new IllegalStateException("Illegal type: " + taskCtx.taskInfo().type()); } }
log.debug("Received task finished callback [info=" + info + ", status=" + status + ']'); JobLocalState state = activeJobs.get(info.jobId()); "Invalid task status [info=" + info + ", status=" + status + ']'; assert state != null || (ctx.jobUpdateLeader() && (info.type() == COMMIT || info.type() == ABORT)): "Missing local state for finished task [info=" + info + ", status=" + status + ']'; incrCntrs = new IncrementCountersProcessor(null, status.counters()); switch (info.type()) { case SETUP: { state.onSetupFinished(info, status, incrCntrs); IgniteInternalCache<HadoopJobId, HadoopJobMetadata> cache = finishedJobMetaCache(); cache.invokeAsync(info.jobId(), new UpdatePhaseProcessor(incrCntrs, PHASE_COMPLETE)). listen(failsLog);
/** * @param taskInfo Task info. * @param status Task status. * @param prev Previous closure. */ private void onReduceFinished(HadoopTaskInfo taskInfo, HadoopTaskStatus status, StackedProcessor prev) { HadoopJobId jobId = taskInfo.jobId(); if (status.state() == FAILED || status.state() == CRASHED) // Fail the whole job. transform(jobId, new RemoveReducerProcessor(prev, taskInfo.taskNumber(), status.failCause())); else transform(jobId, new RemoveReducerProcessor(prev, taskInfo.taskNumber())); }
/** * @param taskCtx Task info. * @return Output. */ public HadoopTaskOutput output(HadoopTaskContext taskCtx) throws IgniteCheckedException { return job(taskCtx.taskInfo().jobId()).output(taskCtx); }
", split=" + split + ']'); HadoopTaskInfo taskInfo = new HadoopTaskInfo(MAP, jobId, meta.taskNumber(split), 0, split); taskInfo.mapperIndex(mapperIdx++);
if (!reduce && taskCtx.taskInfo().hasMapperIndex()) HadoopMapperUtils.mapperIndex(taskCtx.taskInfo().mapperIndex()); else HadoopMapperUtils.clearMapperIndex();
final HadoopJobId jobId = taskInfo.jobId(); transform(jobId, new RemoveMappersProcessor(prev, taskInfo.inputSplit(), status.failCause()));
if (taskCtx.taskInfo().hasMapperIndex()) HadoopMapperUtils.mapperIndex(taskCtx.taskInfo().mapperIndex()); else HadoopMapperUtils.clearMapperIndex(); HadoopInputSplit split = info().inputSplit();
/** * Gets file name for that task result. * * @return File name. */ public String fileName() { NumberFormat numFormat = NumberFormat.getInstance(); numFormat.setMinimumIntegerDigits(5); numFormat.setGroupingUsed(false); return "part-" + numFormat.format(info().taskNumber()); }
/** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { taskInfo.writeExternal(out); status.writeExternal(out); }
/** * Gets name for the task class loader. Task class loader * @param info The task info. * @param prefix Get only prefix (without task type and number) * @return The class loader name. */ public static String nameForTask(HadoopTaskInfo info, boolean prefix) { if (prefix) return "hadoop-task-" + info.jobId() + "-"; else return "hadoop-task-" + info.jobId() + "-" + info.type() + "-" + info.taskNumber(); }
/** {@inheritDoc} */ @Override public void cleanupTaskEnvironment(HadoopTaskInfo info) throws IgniteCheckedException { HadoopTaskContext ctx = ctxs.remove(new T2<>(info.type(), info.taskNumber())).get(); taskCtxClsPool.add(ctx.getClass()); File locDir = taskLocalDir(igniteWorkDirectory(), locNodeId, info); if (locDir.exists()) U.delete(locDir); }