/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static int get(HadoopJobInfo jobInfo, HadoopJobProperty pty, int dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : Integer.parseInt(res); }
/** * @param jobInfo Job info. * @param mem Memory. */ protected HadoopMultimapBase(HadoopJobInfo jobInfo, GridUnsafeMemory mem) { assert jobInfo != null; assert mem != null; this.mem = mem; pageSize = get(jobInfo, SHUFFLE_OFFHEAP_PAGE_SIZE, DFLT_OFFHEAP_PAGE_SIZE); }
if (HadoopJobProperty.get(jobInfo, JOB_SHARED_CLASSLOADER, true)) { U.warn(log, JOB_SHARED_CLASSLOADER.propertyName() + " job property is set to true; please disable " + "it if job tasks rely on mutable static state.");
/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static boolean get(HadoopJobInfo jobInfo, HadoopJobProperty pty, boolean dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : Boolean.parseBoolean(res); } }
/** * @param maps Maps. * @param idx Index. * @return Map. */ private HadoopMultimap getOrCreateMap(AtomicReferenceArray<HadoopMultimap> maps, int idx) { HadoopMultimap map = maps.get(idx); if (map == null) { // Create new map. map = get(job.info(), SHUFFLE_REDUCER_NO_SORTING, false) ? new HadoopConcurrentHashMultimap(job.info(), mem, get(job.info(), PARTITION_HASHMAP_SIZE, 8 * 1024)): new HadoopSkipList(job.info(), mem); if (!maps.compareAndSet(idx, null, map)) { map.close(); return maps.get(idx); } } return map; }
/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static String get(HadoopJobInfo jobInfo, HadoopJobProperty pty, @Nullable String dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : res; }
this.embedded = embedded; boolean stripeMappers0 = get(job.info(), SHUFFLE_MAPPER_STRIPED_OUTPUT, true); msgSize = get(job.info(), SHUFFLE_MSG_SIZE, DFLT_SHUFFLE_MSG_SIZE); msgGzip = get(job.info(), SHUFFLE_MSG_GZIP, DFLT_SHUFFLE_MSG_GZIP); msgs = new HadoopShuffleMessage[rmtMapsSize]; throttle = get(job.info(), SHUFFLE_JOB_THROTTLE, 0);
/** * Try initializing partially raw comparator for job. * * @param conf Configuration. */ private void initializePartiallyRawComparator(JobConf conf) { String clsName = conf.get(HadoopJobProperty.JOB_PARTIALLY_RAW_COMPARATOR.propertyName(), null); if (clsName == null) { Class keyCls = conf.getMapOutputKeyClass(); while (keyCls != null) { clsName = PARTIAL_COMPARATORS.get(keyCls.getName()); if (clsName != null) { conf.set(HadoopJobProperty.JOB_PARTIALLY_RAW_COMPARATOR.propertyName(), clsName); break; } keyCls = keyCls.getSuperclass(); } } } }
/** * @param ctx Task info. * @return Task output. * @throws IgniteCheckedException If failed. */ private HadoopTaskOutput createOutputInternal(HadoopTaskContext ctx) throws IgniteCheckedException { switch (ctx.taskInfo().type()) { case SETUP: case REDUCE: case COMMIT: case ABORT: return null; case MAP: if (job.info().hasCombiner()) { assert combinerInput == null; combinerInput = get(job.info(), SHUFFLE_COMBINER_NO_SORTING, false) ? new HadoopHashMultimap(job.info(), mem, get(job.info(), COMBINER_HASHMAP_SIZE, 8 * 1024)): new HadoopSkipList(job.info(), mem); // TODO replace with red-black tree return combinerInput.startAdding(ctx); } default: return createOutput(ctx); } }
/** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public PartiallyOffheapRawComparatorEx<Object> partialRawSortComparator() { Class cls = jobCtx.getJobConf().getClass(HadoopJobProperty.JOB_PARTIALLY_RAW_COMPARATOR.propertyName(), null); if (cls == null) return null; Object res = ReflectionUtils.newInstance(cls, jobConf()); if (res instanceof PartiallyOffheapRawComparatorEx) return (PartiallyOffheapRawComparatorEx)res; else return new HadoopV2DelegatingPartiallyOffheapRawComparator<>((PartiallyRawComparator)res); }
/** {@inheritDoc} */ @Override public JobStatus getJobStatus(JobID jobId) throws IOException, InterruptedException { try { Long delay = conf.getLong(HadoopJobProperty.JOB_STATUS_POLL_DELAY.propertyName(), -1); HadoopJobStatus status; if (delay >= 0) status = execute(HadoopProtocolJobStatusTask.class, jobId.getJtIdentifier(), jobId.getId(), delay); else status = execute(HadoopProtocolJobStatusTask.class, jobId.getJtIdentifier(), jobId.getId()); if (status == null) throw new IOException("Job tracker doesn't have any information about the job: " + jobId); return processStatus(status); } catch (GridClientException e) { throw new IOException("Failed to get job status: " + jobId, e); } }
/** * Does actual test TeraSort job Through Ignite API * * @param gzip Whether to use GZIP. */ protected final void teraSort(boolean gzip) throws Exception { System.out.println("TeraSort ==============================================================="); getFileSystem().delete(new Path(sortOutDir), true); final JobConf jobConf = new JobConf(); jobConf.setUser(getUser()); jobConf.set("fs.defaultFS", getFsBase()); log().info("Desired number of reduces: " + numReduces()); jobConf.set("mapreduce.job.reduces", String.valueOf(numReduces())); log().info("Desired number of maps: " + numMaps()); final long splitSize = dataSizeBytes() / numMaps(); log().info("Desired split size: " + splitSize); // Force the split to be of the desired size: jobConf.set("mapred.min.split.size", String.valueOf(splitSize)); jobConf.set("mapred.max.split.size", String.valueOf(splitSize)); jobConf.setBoolean(HadoopJobProperty.SHUFFLE_MAPPER_STRIPED_OUTPUT.propertyName(), true); jobConf.setInt(HadoopJobProperty.SHUFFLE_MSG_SIZE.propertyName(), 4096); if (gzip) jobConf.setBoolean(HadoopJobProperty.SHUFFLE_MSG_GZIP.propertyName(), true); jobConf.set(HadoopJobProperty.JOB_PARTIALLY_RAW_COMPARATOR.propertyName(), TextPartiallyRawComparator.class.getName()); Job job = setupConfig(jobConf); HadoopJobId jobId = new HadoopJobId(UUID.randomUUID(), 1); IgniteInternalFuture<?> fut = grid(0).hadoop().submit(jobId, createJobInfo(job.getConfiguration(), null)); fut.get(); }
jobConf.set(HadoopJobProperty.SHUFFLE_MAPPER_STRIPED_OUTPUT.propertyName(), "true"); else jobConf.set(HadoopJobProperty.SHUFFLE_MAPPER_STRIPED_OUTPUT.propertyName(), "false");
/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static String get(HadoopJobInfo jobInfo, HadoopJobProperty pty, @Nullable String dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : res; }
/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static int get(HadoopJobInfo jobInfo, HadoopJobProperty pty, int dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : Integer.parseInt(res); }
/** * @param jobInfo Job info. * @param pty Property. * @param dflt Default value. * @return Property value. */ public static boolean get(HadoopJobInfo jobInfo, HadoopJobProperty pty, boolean dflt) { String res = jobInfo.property(pty.propertyName()); return res == null ? dflt : Boolean.parseBoolean(res); } }