/** * Gets file name for that task result. * * @return File name. */ public String fileName() { NumberFormat numFormat = NumberFormat.getInstance(); numFormat.setMinimumIntegerDigits(5); numFormat.setGroupingUsed(false); return "part-" + numFormat.format(info().taskNumber()); }
/** {@inheritDoc} */ @Override public void cleanupTaskEnvironment(HadoopTaskInfo info) throws IgniteCheckedException { HadoopTaskContext ctx = ctxs.remove(new T2<>(info.type(), info.taskNumber())).get(); taskCtxClsPool.add(ctx.getClass()); File locDir = taskLocalDir(igniteWorkDirectory(), locNodeId, info); if (locDir.exists()) U.delete(locDir); }
/** * Generate name that consists of some event information. * * @param info Task info. * @param evtType The type of the event. * @return String contains necessary event information. */ private String eventName(HadoopTaskInfo info, String evtType) { return eventName(info.type().toString(), info.taskNumber(), evtType); }
/** * Gets name for the task class loader. Task class loader * @param info The task info. * @param prefix Get only prefix (without task type and number) * @return The class loader name. */ public static String nameForTask(HadoopTaskInfo info, boolean prefix) { if (prefix) return "hadoop-task-" + info.jobId() + "-"; else return "hadoop-task-" + info.jobId() + "-" + info.type() + "-" + info.taskNumber(); }
/** * Returns subdirectory of job working directory for task execution. * * @param workDir Work directory. * @param locNodeId Local node ID. * @param info Task info. * @return Working directory for task. * @throws IgniteCheckedException If Failed. */ public static File taskLocalDir(String workDir, UUID locNodeId, HadoopTaskInfo info) throws IgniteCheckedException { File jobLocDir = jobLocalDir(workDir, locNodeId, info.jobId()); return new File(jobLocDir, info.type() + "_" + info.taskNumber() + "_" + info.attempt()); }
/** * @param taskInfo Task info. * @param status Task status. * @param prev Previous closure. */ private void onReduceFinished(HadoopTaskInfo taskInfo, HadoopTaskStatus status, StackedProcessor prev) { HadoopJobId jobId = taskInfo.jobId(); if (status.state() == FAILED || status.state() == CRASHED) // Fail the whole job. transform(jobId, new RemoveReducerProcessor(prev, taskInfo.taskNumber(), status.failCause())); else transform(jobId, new RemoveReducerProcessor(prev, taskInfo.taskNumber())); }
T2<HadoopTaskType, Integer> locTaskId = new T2<>(info.type(), info.taskNumber());
/** * @param taskCtx Task context. * @return Input. * @throws IgniteCheckedException If failed. */ public HadoopTaskInput input(HadoopTaskContext taskCtx) throws IgniteCheckedException { switch (taskCtx.taskInfo().type()) { case REDUCE: int reducer = taskCtx.taskInfo().taskNumber(); HadoopMultimap m = locMaps.get(reducer); if (m != null) return m.input(taskCtx); return new HadoopTaskInput() { // Empty input. @Override public boolean next() { return false; } @Override public Object key() { throw new IllegalStateException(); } @Override public Iterator<?> values() { throw new IllegalStateException(); } @Override public void close() { // No-op. } }; default: throw new IllegalStateException("Illegal type: " + taskCtx.taskInfo().type()); } }
/** * @param task Task. */ private void startThread(final Callable<?> task) { String workerName; if (task instanceof HadoopRunnableTask) { final HadoopTaskInfo i = ((HadoopRunnableTask)task).taskInfo(); workerName = "Hadoop-task-" + i.jobId() + "-" + i.type() + "-" + i.taskNumber() + "-" + i.attempt(); } else workerName = task.toString(); GridWorker w = new GridWorker(igniteInstanceName, workerName, log, lsnr) { @Override protected void body() { try { task.call(); } catch (Exception e) { log.error("Failed to execute task: " + task, e); } } }; workers.add(w); if (shutdown) w.cancel(); new IgniteThread(w).start(); }
/** * Creates Hadoop attempt ID. * * @return Attempt ID. */ public TaskAttemptID attemptId() { TaskID tid = new TaskID(jobCtx.getJobID(), taskType(taskInfo().type()), taskInfo().taskNumber()); return new TaskAttemptID(tid, taskInfo().attempt()); }
HadoopTaskInfo combineTaskInfo = new HadoopTaskInfo(COMBINE, info.jobId(), info.taskNumber(), info.attempt(), null);
/** * Gets name for the task class loader. Task class loader * @param info The task info. * @param prefix Get only prefix (without task type and number) * @return The class loader name. */ public static String nameForTask(HadoopTaskInfo info, boolean prefix) { if (prefix) return "hadoop-task-" + info.jobId() + "-"; else return "hadoop-task-" + info.jobId() + "-" + info.type() + "-" + info.taskNumber(); }