public static void setMapRedWork(Configuration conf, MapredWork w, Path hiveScratchDir) { String useName = conf.get(INPUT_NAME); if (useName == null) { useName = "mapreduce:" + hiveScratchDir; } conf.set(INPUT_NAME, useName); setMapWork(conf, w.getMapWork(), hiveScratchDir, true); if (w.getReduceWork() != null) { conf.set(INPUT_NAME, useName); setReduceWork(conf, w.getReduceWork(), hiveScratchDir, true); } }
public static void setMapRedWork(Configuration conf, MapredWork w, Path hiveScratchDir) { String useName = conf.get(INPUT_NAME); if (useName == null) { useName = "mapreduce"; } conf.set(INPUT_NAME, useName); setMapWork(conf, w.getMapWork(), hiveScratchDir, true); if (w.getReduceWork() != null) { conf.set(INPUT_NAME, useName); setReduceWork(conf, w.getReduceWork(), hiveScratchDir, true); } }
@Override public void processRow(Object key, Object value) throws IOException { // reset the execContext for each new row execContext.resetRow(); try { // Since there is no concept of a group, we don't invoke // startGroup/endGroup for a mapper mo.process((Writable) value); if (isLogInfoEnabled) { logMemoryInfo(); } } catch (Throwable e) { abort = true; Utilities.setMapWork(jc, null); if (e instanceof OutOfMemoryError) { // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { String msg = "Error processing row: " + e; LOG.error(msg, e); throw new RuntimeException(msg, e); } } }
@Override public void processRow(Object key, Object value) throws IOException { if (!anyRow) { OperatorUtils.setChildrenCollector(mo.getChildOperators(), oc); anyRow = true; } // reset the execContext for each new row execContext.resetRow(); try { // Since there is no concept of a group, we don't invoke // startGroup/endGroup for a mapper mo.process((Writable) value); incrementRowNumber(); } catch (Throwable e) { abort = true; Utilities.setMapWork(jc, null); if (e instanceof OutOfMemoryError) { // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { String msg = "Error processing row: " + e; LOG.error(msg, e); throw new RuntimeException(msg, e); } } }
scratchDir, context, false); Utilities.setInputPaths(cloned, inputPaths); Utilities.setMapWork(cloned, (MapWork) work, scratchDir, false); Utilities.createTmpDirs(cloned, (MapWork) work); if (work instanceof MergeFileWork) {
Utilities.setMapWork(jconf, mapWork);
scratchDir, context, false); Utilities.setInputPaths(cloned, inputPaths); Utilities.setMapWork(cloned, mapWork, scratchDir, false); Utilities.createTmpDirs(cloned, mapWork); if (work instanceof MergeFileWork) {
Utilities.setMapWork(jconf, mapWork);
Utilities.setMapWork(jobConf, work); try { boolean sendSerializedEvents =
Utilities.setMapWork(jobConf, work); try { boolean sendSerializedEvents =
Utilities.setMapWork(conf, mapWork, mrScratchDir, false); Utilities.setMapWork(conf, mapWork, mrScratchDir, false);
protected static void initialVectorizedRowBatchCtx(Configuration conf) throws HiveException { MapWork mapWork = new MapWork(); VectorizedRowBatchCtx rbCtx = new VectorizedRowBatchCtx(); rbCtx.init(createStructObjectInspector(conf), new String[0]); mapWork.setVectorMode(true); mapWork.setVectorizedRowBatchCtx(rbCtx); Utilities.setMapWork(conf, mapWork); }
Utilities.setMapWork(jconf, mapWork);
Utilities.setMapWork(conf, mapWork, mrScratchDir, false); Utilities.setMapWork(conf, mapWork, mrScratchDir, false);
Utilities.setMapWork(jconf, mapWork);
Utilities.setMapWork(job, work, ctx.getMRTmpPath(), true);
Utilities.setMapWork(job, work, ctx.getMRTmpPath(), true);
mapWork.setVectorizedRowBatchCtx(vrbContext); HiveConf.setVar(conf, HiveConf.ConfVars.PLAN, "//tmp"); Utilities.setMapWork(conf, mapWork);
mapWork.setVectorizedRowBatchCtx(vrbContext); HiveConf.setVar(conf, HiveConf.ConfVars.PLAN, "//tmp"); Utilities.setMapWork(conf, mapWork);
public static void setMapRedWork(Configuration conf, MapredWork w, Path hiveScratchDir) { String useName = conf.get(INPUT_NAME); if (useName == null) { useName = "mapreduce"; } conf.set(INPUT_NAME, useName); setMapWork(conf, w.getMapWork(), hiveScratchDir, true); if (w.getReduceWork() != null) { conf.set(INPUT_NAME, useName); setReduceWork(conf, w.getReduceWork(), hiveScratchDir, true); } }