/** {@inheritDoc} */ @Override public boolean hasCombiner() { return info().hasCombiner(); }
/** {@inheritDoc} */ @Override public boolean hasReducer() { return info().hasReducer(); }
/** {@inheritDoc} */ @Override public String jobName() { return info().jobName(); }
if (job.info().credentials() == null) { String user = job.info().user(); UserGroupInformation ugi = HadoopUtils.createUGI(job.info().user(), job.info().credentials());
/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static boolean get(HadoopJobInfo jobInfo, HadoopJobProperty pty, boolean dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : Boolean.parseBoolean(res); } }
outputFormat = jobInfo.hasCombiner() || jobInfo.hasReducer() ? null : prepareWriter(jobCtx);
/** * Convert Hadoop job metadata to job status. * * @param meta Metadata. * @return Status. */ public static HadoopJobStatus status(HadoopJobMetadata meta) { HadoopJobInfo jobInfo = meta.jobInfo(); return new HadoopJobStatus( meta.jobId(), jobInfo.jobName(), jobInfo.user(), meta.pendingSplits() != null ? meta.pendingSplits().size() : 0, meta.pendingReducers() != null ? meta.pendingReducers().size() : 0, meta.mapReducePlan().mappers(), meta.mapReducePlan().reducers(), meta.phase(), meta.failCause() != null, meta.version() ); }
/** {@inheritDoc} */ @Override public String user() { return info().user(); } }
/** {@inheritDoc} */ @Override public int reducers() { return info().reducers(); }
job = jobInfo.createJob(jobCls, jobId, log, ctx.configuration().getNativeLibraryNames(), ctx.kernalContext().hadoopHelper());
/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static String get(HadoopJobInfo jobInfo, HadoopJobProperty pty, @Nullable String dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : res; }
collector = collector(jobConf, taskCtx0, !job.info().hasCombiner() && !job.info().hasReducer(), fileName(), taskCtx0.attemptId());
if (jobInfo.credentials() == null) rsrcMgr.prepareJobEnvironment(!external, jobLocalDir(igniteWorkDirectory(), locNodeId, jobId)); else { UserGroupInformation ugi = HadoopUtils.createUGI(jobInfo.user(), jobInfo.credentials());
/** {@inheritDoc} */ @Override public String user() { return info().user(); } }
/** {@inheritDoc} */ @Override public int reducers() { return info().reducers(); }
job = req.jobInfo().createJob(jobCls, req.jobId(), log, null, new HadoopHelperImpl());
/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static int get(HadoopJobInfo jobInfo, HadoopJobProperty pty, int dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : Integer.parseInt(res); }
/** * @param taskCtx Task context. * @return Output. * @throws IgniteCheckedException If failed. */ public HadoopTaskOutput output(HadoopTaskContext taskCtx) throws IgniteCheckedException { switch (taskCtx.taskInfo().type()) { case MAP: assert !job.info().hasCombiner() : "The output creation is allowed if combiner has not been defined."; case COMBINE: return new PartitionedOutput(taskCtx); default: throw new IllegalStateException("Illegal type: " + taskCtx.taskInfo().type()); } }
outputFormat = reduce || !taskCtx.job().info().hasReducer() ? prepareWriter(jobCtx) : null;