/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static int get(HadoopJobInfo jobInfo, HadoopJobProperty pty, int dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : Integer.parseInt(res); }
/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static boolean get(HadoopJobInfo jobInfo, HadoopJobProperty pty, boolean dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : Boolean.parseBoolean(res); } }
/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static String get(HadoopJobInfo jobInfo, HadoopJobProperty pty, @Nullable String dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : res; }
/** * Try initializing partially raw comparator for job. * * @param conf Configuration. */ private void initializePartiallyRawComparator(JobConf conf) { String clsName = conf.get(HadoopJobProperty.JOB_PARTIALLY_RAW_COMPARATOR.propertyName(), null); if (clsName == null) { Class keyCls = conf.getMapOutputKeyClass(); while (keyCls != null) { clsName = PARTIAL_COMPARATORS.get(keyCls.getName()); if (clsName != null) { conf.set(HadoopJobProperty.JOB_PARTIALLY_RAW_COMPARATOR.propertyName(), clsName); break; } keyCls = keyCls.getSuperclass(); } } } }
/** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public PartiallyOffheapRawComparatorEx<Object> partialRawSortComparator() { Class cls = jobCtx.getJobConf().getClass(HadoopJobProperty.JOB_PARTIALLY_RAW_COMPARATOR.propertyName(), null); if (cls == null) return null; Object res = ReflectionUtils.newInstance(cls, jobConf()); if (res instanceof PartiallyOffheapRawComparatorEx) return (PartiallyOffheapRawComparatorEx)res; else return new HadoopV2DelegatingPartiallyOffheapRawComparator<>((PartiallyRawComparator)res); }
/** {@inheritDoc} */ @Override public JobStatus getJobStatus(JobID jobId) throws IOException, InterruptedException { try { Long delay = conf.getLong(HadoopJobProperty.JOB_STATUS_POLL_DELAY.propertyName(), -1); HadoopJobStatus status; if (delay >= 0) status = execute(HadoopProtocolJobStatusTask.class, jobId.getJtIdentifier(), jobId.getId(), delay); else status = execute(HadoopProtocolJobStatusTask.class, jobId.getJtIdentifier(), jobId.getId()); if (status == null) throw new IOException("Job tracker doesn't have any information about the job: " + jobId); return processStatus(status); } catch (GridClientException e) { throw new IOException("Failed to get job status: " + jobId, e); } }
/** * Does actual test TeraSort job Through Ignite API * * @param gzip Whether to use GZIP. */ protected final void teraSort(boolean gzip) throws Exception { System.out.println("TeraSort ==============================================================="); getFileSystem().delete(new Path(sortOutDir), true); final JobConf jobConf = new JobConf(); jobConf.setUser(getUser()); jobConf.set("fs.defaultFS", getFsBase()); log().info("Desired number of reduces: " + numReduces()); jobConf.set("mapreduce.job.reduces", String.valueOf(numReduces())); log().info("Desired number of maps: " + numMaps()); final long splitSize = dataSizeBytes() / numMaps(); log().info("Desired split size: " + splitSize); // Force the split to be of the desired size: jobConf.set("mapred.min.split.size", String.valueOf(splitSize)); jobConf.set("mapred.max.split.size", String.valueOf(splitSize)); jobConf.setBoolean(HadoopJobProperty.SHUFFLE_MAPPER_STRIPED_OUTPUT.propertyName(), true); jobConf.setInt(HadoopJobProperty.SHUFFLE_MSG_SIZE.propertyName(), 4096); if (gzip) jobConf.setBoolean(HadoopJobProperty.SHUFFLE_MSG_GZIP.propertyName(), true); jobConf.set(HadoopJobProperty.JOB_PARTIALLY_RAW_COMPARATOR.propertyName(), TextPartiallyRawComparator.class.getName()); Job job = setupConfig(jobConf); HadoopJobId jobId = new HadoopJobId(UUID.randomUUID(), 1); IgniteInternalFuture<?> fut = grid(0).hadoop().submit(jobId, createJobInfo(job.getConfiguration(), null)); fut.get(); }
U.warn(log, JOB_SHARED_CLASSLOADER.propertyName() + " job property is set to true; please disable " + "it if job tasks rely on mutable static state.");
jobConf.set(HadoopJobProperty.SHUFFLE_MAPPER_STRIPED_OUTPUT.propertyName(), "true"); else jobConf.set(HadoopJobProperty.SHUFFLE_MAPPER_STRIPED_OUTPUT.propertyName(), "false");
/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static String get(HadoopJobInfo jobInfo, HadoopJobProperty pty, @Nullable String dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : res; }
/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static int get(HadoopJobInfo jobInfo, HadoopJobProperty pty, int dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : Integer.parseInt(res); }
/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static boolean get(HadoopJobInfo jobInfo, HadoopJobProperty pty, boolean dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : Boolean.parseBoolean(res); } }