/** * Gets name for the task class loader. Task class loader * @param info The task info. * @param prefix Get only prefix (without task type and number) * @return The class loader name. */ public static String nameForTask(HadoopTaskInfo info, boolean prefix) { if (prefix) return "hadoop-task-" + info.jobId() + "-"; else return "hadoop-task-" + info.jobId() + "-" + info.type() + "-" + info.taskNumber(); }
/** * @param taskCtx Task info. * @return Output. */ public HadoopTaskOutput output(HadoopTaskContext taskCtx) throws IgniteCheckedException { return job(taskCtx.taskInfo().jobId()).output(taskCtx); }
/** * @param taskCtx Task info. * @return Input. */ public HadoopTaskInput input(HadoopTaskContext taskCtx) throws IgniteCheckedException { return job(taskCtx.taskInfo().jobId()).input(taskCtx); }
/** * Returns subdirectory of job working directory for task execution. * * @param workDir Work directory. * @param locNodeId Local node ID. * @param info Task info. * @return Working directory for task. * @throws IgniteCheckedException If Failed. */ public static File taskLocalDir(String workDir, UUID locNodeId, HadoopTaskInfo info) throws IgniteCheckedException { File jobLocDir = jobLocalDir(workDir, locNodeId, info.jobId()); return new File(jobLocDir, info.type() + "_" + info.taskNumber() + "_" + info.attempt()); }
/** * @param taskInfo Task info. * @param status Task status. * @param prev Previous closure. */ private void onSetupFinished(final HadoopTaskInfo taskInfo, HadoopTaskStatus status, StackedProcessor prev) { final HadoopJobId jobId = taskInfo.jobId(); if (status.state() == FAILED || status.state() == CRASHED) transform(jobId, new CancelJobProcessor(prev, status.failCause())); else transform(jobId, new UpdatePhaseProcessor(prev, PHASE_MAP)); }
/** * @param task Task. */ private void startThread(final Callable<?> task) { String workerName; if (task instanceof HadoopRunnableTask) { final HadoopTaskInfo i = ((HadoopRunnableTask)task).taskInfo(); workerName = "Hadoop-task-" + i.jobId() + "-" + i.type() + "-" + i.taskNumber() + "-" + i.attempt(); } else workerName = task.toString(); GridWorker w = new GridWorker(igniteInstanceName, workerName, log, lsnr) { @Override protected void body() { try { task.call(); } catch (Exception e) { log.error("Failed to execute task: " + task, e); } } }; workers.add(w); if (shutdown) w.cancel(); new IgniteThread(w).start(); }
/** * @param taskInfo Task info. * @param status Task status. * @param prev Previous closure. */ private void onReduceFinished(HadoopTaskInfo taskInfo, HadoopTaskStatus status, StackedProcessor prev) { HadoopJobId jobId = taskInfo.jobId(); if (status.state() == FAILED || status.state() == CRASHED) // Fail the whole job. transform(jobId, new RemoveReducerProcessor(prev, taskInfo.taskNumber(), status.failCause())); else transform(jobId, new RemoveReducerProcessor(prev, taskInfo.taskNumber())); }
final HadoopJobId jobId = taskInfo.jobId();
/** {@inheritDoc} */ @Override public void prepareTaskEnvironment() throws IgniteCheckedException { File locDir; switch(taskInfo().type()) { case MAP: case REDUCE: job().prepareTaskEnvironment(taskInfo()); locDir = taskLocalDir(job.igniteWorkDirectory(), locNodeId, taskInfo()); break; default: locDir = jobLocalDir(job.igniteWorkDirectory(), locNodeId, taskInfo().jobId()); } ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(jobConf().getClassLoader()); try { FileSystem.get(jobConf()); LocalFileSystem locFs = FileSystem.getLocal(jobConf()); locFs.setWorkingDirectory(new Path(locDir.getAbsolutePath())); } catch (Throwable e) { if (e instanceof Error) throw (Error)e; throw transformException(e); } finally { HadoopCommonUtils.restoreContextClassLoader(oldLdr); } }
/** * @param taskInfo Task info. * @param status Task status. * @param prev Previous closure. */ private void onCombineFinished(HadoopTaskInfo taskInfo, HadoopTaskStatus status, final StackedProcessor prev) { final HadoopJobId jobId = taskInfo.jobId(); if (status.state() == FAILED || status.state() == CRASHED) // Fail the whole job. transform(jobId, new RemoveMappersProcessor(prev, currMappers, status.failCause())); else { ctx.shuffle().flush(jobId).listen(new CIX1<IgniteInternalFuture<?>>() { @Override public void applyx(IgniteInternalFuture<?> f) { Throwable err = null; if (f != null) { try { f.get(); } catch (IgniteCheckedException e) { err = e; } } transform(jobId, new RemoveMappersProcessor(prev, currMappers, err)); } }); } }
log.debug("Received task finished callback [info=" + info + ", status=" + status + ']'); JobLocalState state = activeJobs.get(info.jobId()); IgniteInternalCache<HadoopJobId, HadoopJobMetadata> cache = finishedJobMetaCache(); cache.invokeAsync(info.jobId(), new UpdatePhaseProcessor(incrCntrs, PHASE_COMPLETE)). listen(failsLog);
HadoopTaskInfo combineTaskInfo = new HadoopTaskInfo(COMBINE, info.jobId(), info.taskNumber(), info.attempt(), null);
/** * Gets name for the task class loader. Task class loader * @param info The task info. * @param prefix Get only prefix (without task type and number) * @return The class loader name. */ public static String nameForTask(HadoopTaskInfo info, boolean prefix) { if (prefix) return "hadoop-task-" + info.jobId() + "-"; else return "hadoop-task-" + info.jobId() + "-" + info.type() + "-" + info.taskNumber(); }