@Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { String database; if (context != null) { database = context.getJobConf().get("hive.current.database"); } else { database = SessionState.get().getCurrentDatabase(); } return PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.stringTypeInfo, new Text(database)); }
public <K, V> void init(JobConf job, OutputCollector<K, V> output, Reporter reporter) throws Exception { jc = job; MapredContext.init(false, new JobConf(jc)); MapredContext.get().setReporter(reporter); oc = output; rp = reporter; LOG.info("maximum memory = " + memoryMXBean.getHeapMemoryUsage().getMax()); try { LOG.info("conf classpath = " + Arrays.asList(((URLClassLoader) job.getClassLoader()).getURLs())); LOG.info("thread classpath = " + Arrays.asList(((URLClassLoader) Thread.currentThread() .getContextClassLoader()).getURLs())); } catch (Exception e) { LOG.info("cannot get classpath: " + e.getMessage()); } }
public static MapredContext init(boolean isMap, JobConf jobConf) { MapredContext context = HiveConf.getVar(jobConf, ConfVars.HIVE_EXECUTION_ENGINE).equals("tez") ? new TezContext(isMap, jobConf) : new MapredContext(isMap, jobConf); contexts.set(context); if (logger.isDebugEnabled()) { logger.debug("MapredContext initialized."); } return context; }
public void setup(GenericUDF genericUDF) { if (needConfigure(genericUDF)) { genericUDF.configure(this); } if (needClose(genericUDF)) { registerCloseable(genericUDF); } }
public void ensureOutputInitialize(OutputCollector output, Reporter reporter) { if (oc == null) { oc = output; rp = reporter; OperatorUtils.setChildrenCollector(mo.getChildOperators(), output); mo.setReporter(rp); MapredContext.get().setReporter(reporter); } }
@Override public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException { deferredChildren = new GenericUDF.DeferredObject[children.length]; List<GenericUDF.DeferredObject> childrenNeedingPrepare = new ArrayList<GenericUDF.DeferredObject>(children.length); for (int i = 0; i < deferredChildren.length; i++) { DeferredExprObject deferredExprObject = new DeferredExprObject(children[i], isEager); deferredChildren[i] = deferredExprObject; if (deferredExprObject.needsPrepare()) { childrenNeedingPrepare.add(deferredExprObject); } } this.childrenNeedingPrepare = childrenNeedingPrepare.toArray(new GenericUDF.DeferredObject[childrenNeedingPrepare.size()]); // Initialize all children first ObjectInspector[] childrenOIs = new ObjectInspector[children.length]; for (int i = 0; i < children.length; i++) { childrenOIs[i] = children[i].initialize(rowInspector); } MapredContext context = MapredContext.get(); if (context != null) { context.setup(genericUDF); } outputOI = genericUDF.initializeAndFoldConstants(childrenOIs); isConstant = ObjectInspectorUtils.isConstantObjectInspector(outputOI) && isConsistentWithinQuery(); return outputOI; }
@Override public void configure(JobConf job) { jc = job; MapredContext.init(true, new JobConf(jc)); statsAggKeyPrefix = HiveConf.getVar(job, HiveConf.ConfVars.HIVE_STATS_KEY_PREFIX); }
@Override public void cleanUpInputFileChangedOp() throws HiveException { loadHashTable(getExecContext(), MapredContext.get()); }
@Override public void close() throws IOException { try { // Only publish stats if this operator's flag was set to gather stats if (!exception) { publishStats(); } } catch (HiveException e) { this.exception = true; throw new RuntimeException(e); } finally { MapredContext.close(); } }
@Override public boolean connect(Configuration hconf) { MapredContext context = MapredContext.get(); if (context == null || context.getReporter() == null) { return false; } reporter = context.getReporter(); return true; }
protected static MapredContext instantiateMapredContext() { Configuration conf = UDFContext.getUDFContext().getJobConf(); boolean isMap = conf.getBoolean(MRConfiguration.TASK_IS_MAP, false); if (conf.get("exectype").startsWith("TEZ")) { isMap = true; HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_ENGINE, "tez"); } MapredContext context = MapredContext.init(isMap, new JobConf(UDFContext.getUDFContext().getJobConf())); context.setReporter(new HiveReporter(PigStatusReporter.getInstance())); return context; }
@Nullable protected final Reporter getReporter() { if (mapredContext == null) { return null; } return mapredContext.getReporter(); }
public static void close() { MapredContext context = contexts.get(); if (context != null) { context.closeAll(); } contexts.remove(); }
void setup(GenericUDTF genericUDTF) { if (needConfigure(genericUDTF)) { genericUDTF.configure(this); } // close is called by UDTFOperator }
OperatorUtils.setChildrenCollector(mo.getChildOperators(), output); mo.setReporter(rp); MapredContext.get().setReporter(reporter);
void setup(GenericUDAFEvaluator genericUDAF) { if (needConfigure(genericUDAF)) { genericUDAF.configure(this); } if (needClose(genericUDAF)) { registerCloseable(genericUDAF); } }
@Override public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException { deferredChildren = new GenericUDF.DeferredObject[children.length]; List<GenericUDF.DeferredObject> childrenNeedingPrepare = new ArrayList<GenericUDF.DeferredObject>(children.length); for (int i = 0; i < deferredChildren.length; i++) { DeferredExprObject deferredExprObject = new DeferredExprObject(children[i], isEager); deferredChildren[i] = deferredExprObject; if (deferredExprObject.needsPrepare()) { childrenNeedingPrepare.add(deferredExprObject); } } this.childrenNeedingPrepare = childrenNeedingPrepare.toArray(new GenericUDF.DeferredObject[childrenNeedingPrepare.size()]); // Initialize all children first ObjectInspector[] childrenOIs = new ObjectInspector[children.length]; for (int i = 0; i < children.length; i++) { childrenOIs[i] = children[i].initialize(rowInspector); } MapredContext context = MapredContext.get(); if (context != null) { context.setup(genericUDF); } outputOI = genericUDF.initializeAndFoldConstants(childrenOIs); isConstant = ObjectInspectorUtils.isConstantObjectInspector(outputOI) && isDeterministic(); return outputOI; }
MapredContext.init(false, new JobConf(jc));
@Override public void cleanUpInputFileChangedOp() throws HiveException { loadHashTable(getExecContext(), MapredContext.get()); }
MapredContext.close(); Utilities.clearWorkMap(jc);