public <K, V> void init(JobConf job, OutputCollector<K, V> output, Reporter reporter) throws Exception { jc = job; MapredContext.init(false, new JobConf(jc)); MapredContext.get().setReporter(reporter); oc = output; rp = reporter; LOG.info("maximum memory = " + memoryMXBean.getHeapMemoryUsage().getMax()); try { LOG.info("conf classpath = " + Arrays.asList(((URLClassLoader) job.getClassLoader()).getURLs())); LOG.info("thread classpath = " + Arrays.asList(((URLClassLoader) Thread.currentThread() .getContextClassLoader()).getURLs())); } catch (Exception e) { LOG.info("cannot get classpath: " + e.getMessage()); } }
public <K, V> void init(JobConf job, OutputCollector<K, V> output, Reporter reporter) throws Exception { jc = job; MapredContext.init(false, new JobConf(jc)); MapredContext.get().setReporter(reporter); oc = output; rp = reporter; LOG.info("maximum memory = " + memoryMXBean.getHeapMemoryUsage().getMax()); MemoryInfoLogger memoryInfoLogger = new MemoryInfoLogger(); memoryInfoLogger.run(); try { LOG.info("conf classpath = " + Arrays.asList(((URLClassLoader) job.getClassLoader()).getURLs())); LOG.info("thread classpath = " + Arrays .asList(((URLClassLoader) Thread.currentThread().getContextClassLoader()).getURLs())); } catch (Exception e) { LOG.info("cannot get classpath: " + e.getMessage()); } }
@Override public void cleanUpInputFileChangedOp() throws HiveException { loadHashTable(getExecContext(), MapredContext.get()); }
@Override public void cleanUpInputFileChangedOp() throws HiveException { loadHashTable(getExecContext(), MapredContext.get()); }
@Override public void initializeLocalWork(Configuration hconf) throws HiveException { Operator<? extends OperatorDesc> parent = null; for (Operator<? extends OperatorDesc> parentOp : parentOperators) { if (parentOp != null) { parent = parentOp; break; } } if (parent == null) { throw new HiveException("No valid parents."); } if (parentOperators.size() == 1) { Map<Integer, DummyStoreOperator> dummyOps = ((TezContext) (MapredContext.get())).getDummyOpsMap(); for (Entry<Integer, DummyStoreOperator> connectOp : dummyOps.entrySet()) { if (connectOp.getValue().getChildOperators() == null || connectOp.getValue().getChildOperators().isEmpty()) { parentOperators.add(connectOp.getKey(), connectOp.getValue()); connectOp.getValue().getChildOperators().add(this); } } } super.initializeLocalWork(hconf); }
@Override public void initializeLocalWork(Configuration hconf) throws HiveException { Operator<? extends OperatorDesc> parent = null; for (Operator<? extends OperatorDesc> parentOp : parentOperators) { if (parentOp != null) { parent = parentOp; break; } } if (parent == null) { throw new HiveException("No valid parents."); } if (parentOperators.size() == 1) { Map<Integer, DummyStoreOperator> dummyOps = ((TezContext) (MapredContext.get())).getDummyOpsMap(); for (Entry<Integer, DummyStoreOperator> connectOp : dummyOps.entrySet()) { if (connectOp.getValue().getChildOperators() == null || connectOp.getValue().getChildOperators().isEmpty()) { parentOperators.add(connectOp.getKey(), connectOp.getValue()); connectOp.getValue().getChildOperators().add(this); } } } super.initializeLocalWork(hconf); }
public void ensureOutputInitialize(OutputCollector output, Reporter reporter) { if (oc == null) { oc = output; rp = reporter; OperatorUtils.setChildrenCollector(mo.getChildOperators(), output); mo.setReporter(rp); MapredContext.get().setReporter(reporter); } }
loadHashTable(getExecContext(), MapredContext.get()); } else { if (LOG.isDebugEnabled()) {
loadHashTable(getExecContext(), MapredContext.get()); } else { if (LOG.isDebugEnabled()) {
@Override public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException { deferredChildren = new GenericUDF.DeferredObject[children.length]; List<GenericUDF.DeferredObject> childrenNeedingPrepare = new ArrayList<GenericUDF.DeferredObject>(children.length); for (int i = 0; i < deferredChildren.length; i++) { DeferredExprObject deferredExprObject = new DeferredExprObject(children[i], isEager); deferredChildren[i] = deferredExprObject; if (deferredExprObject.needsPrepare()) { childrenNeedingPrepare.add(deferredExprObject); } } this.childrenNeedingPrepare = childrenNeedingPrepare.toArray(new GenericUDF.DeferredObject[childrenNeedingPrepare.size()]); // Initialize all children first ObjectInspector[] childrenOIs = new ObjectInspector[children.length]; for (int i = 0; i < children.length; i++) { childrenOIs[i] = children[i].initialize(rowInspector); } MapredContext context = MapredContext.get(); if (context != null) { context.setup(genericUDF); } outputOI = genericUDF.initializeAndFoldConstants(childrenOIs); isConstant = ObjectInspectorUtils.isConstantObjectInspector(outputOI) && isConsistentWithinQuery(); return outputOI; }
@Override public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException { deferredChildren = new GenericUDF.DeferredObject[children.length]; List<GenericUDF.DeferredObject> childrenNeedingPrepare = new ArrayList<GenericUDF.DeferredObject>(children.length); for (int i = 0; i < deferredChildren.length; i++) { DeferredExprObject deferredExprObject = new DeferredExprObject(children[i], isEager); deferredChildren[i] = deferredExprObject; if (deferredExprObject.needsPrepare()) { childrenNeedingPrepare.add(deferredExprObject); } } this.childrenNeedingPrepare = childrenNeedingPrepare.toArray(new GenericUDF.DeferredObject[childrenNeedingPrepare.size()]); // Initialize all children first ObjectInspector[] childrenOIs = new ObjectInspector[children.length]; for (int i = 0; i < children.length; i++) { childrenOIs[i] = children[i].initialize(rowInspector); } MapredContext context = MapredContext.get(); if (context != null) { context.setup(genericUDF); } outputOI = genericUDF.initializeAndFoldConstants(childrenOIs); isConstant = ObjectInspectorUtils.isConstantObjectInspector(outputOI) && isDeterministic(); return outputOI; }
@Override protected void initializeOp(Configuration hconf) throws HiveException { super.initializeOp(hconf); genericUDTF = conf.getGenericUDTF(); collector = new UDTFCollector(this); genericUDTF.setCollector(collector); udtfInputOI = (StructObjectInspector) inputObjInspectors[0]; objToSendToUDTF = new Object[udtfInputOI.getAllStructFieldRefs().size()]; MapredContext context = MapredContext.get(); if (context != null) { context.setup(genericUDTF); } StructObjectInspector udtfOutputOI = genericUDTF.initialize(udtfInputOI); if (conf.isOuterLV()) { outerObj = Arrays.asList(new Object[udtfOutputOI.getAllStructFieldRefs().size()]); } // Since we're passing the object output by the UDTF directly to the next // operator, we can use the same OI. outputObjInspector = udtfOutputOI; // Set up periodic progress reporting in case the UDTF doesn't output rows // for a while if (HiveConf.getBoolVar(hconf, HiveConf.ConfVars.HIVEUDTFAUTOPROGRESS)) { autoProgressor = new AutoProgressor(this.getClass().getName(), reporter, Utilities.getDefaultNotificationInterval(hconf), HiveConf.getTimeVar( hconf, HiveConf.ConfVars.HIVES_AUTO_PROGRESS_TIMEOUT, TimeUnit.MILLISECONDS)); autoProgressor.go(); } }
private void initializeMapRecordSources() throws Exception { int size = mergeMapOpList.size() + 1; // the +1 is for the main map operator itself sources = new MapRecordSource[size]; position = mapOp.getConf().getTag(); sources[position] = new MapRecordSource(); KeyValueReader reader = null; if (mainWorkMultiMRInput != null) { reader = getKeyValueReader(mainWorkMultiMRInput.getKeyValueReaders(), mapOp); } else { reader = legacyMRInput.getReader(); } sources[position].init(jconf, mapOp, reader); for (AbstractMapOperator mapOp : mergeMapOpList) { int tag = mapOp.getConf().getTag(); sources[tag] = new MapRecordSource(); String inputName = mapOp.getConf().getName(); MultiMRInput multiMRInput = multiMRInputMap.get(inputName); Collection<KeyValueReader> kvReaders = multiMRInput.getKeyValueReaders(); l4j.debug("There are " + kvReaders.size() + " key-value readers for input " + inputName); if (kvReaders.size() > 0) { reader = getKeyValueReader(kvReaders, mapOp); sources[tag].init(jconf, mapOp, reader); } } ((TezContext) MapredContext.get()).setRecordSources(sources); }
OperatorUtils.setChildrenCollector(mo.getChildOperators(), output); mo.setReporter(rp); MapredContext.get().setReporter(reporter);
@Override protected void initializeOp(Configuration hconf) throws HiveException { super.initializeOp(hconf); genericUDTF = conf.getGenericUDTF(); collector = new UDTFCollector(this); genericUDTF.setCollector(collector); udtfInputOI = (StructObjectInspector) inputObjInspectors[0]; objToSendToUDTF = new Object[udtfInputOI.getAllStructFieldRefs().size()]; MapredContext context = MapredContext.get(); if (context != null) { context.setup(genericUDTF); } StructObjectInspector udtfOutputOI = genericUDTF.initialize(udtfInputOI); if (conf.isOuterLV()) { outerObj = Arrays.asList(new Object[udtfOutputOI.getAllStructFieldRefs().size()]); } // Since we're passing the object output by the UDTF directly to the next // operator, we can use the same OI. outputObjInspector = udtfOutputOI; // Set up periodic progress reporting in case the UDTF doesn't output rows // for a while if (HiveConf.getBoolVar(hconf, HiveConf.ConfVars.HIVEUDTFAUTOPROGRESS)) { autoProgressor = new AutoProgressor(this.getClass().getName(), reporter, Utilities.getDefaultNotificationInterval(hconf), HiveConf.getTimeVar( hconf, HiveConf.ConfVars.HIVES_AUTO_PROGRESS_TIMEOUT, TimeUnit.MILLISECONDS)); autoProgressor.go(); } }
private void initializeMapRecordSources() throws Exception { int size = mergeMapOpList.size() + 1; // the +1 is for the main map operator itself sources = new MapRecordSource[size]; position = mapOp.getConf().getTag(); sources[position] = new MapRecordSource(); KeyValueReader reader = null; if (mainWorkMultiMRInput != null) { reader = getKeyValueReader(mainWorkMultiMRInput.getKeyValueReaders(), mapOp); } else { reader = legacyMRInput.getReader(); } sources[position].init(jconf, mapOp, reader); for (AbstractMapOperator mapOp : mergeMapOpList) { int tag = mapOp.getConf().getTag(); sources[tag] = new MapRecordSource(); String inputName = mapOp.getConf().getName(); MultiMRInput multiMRInput = multiMRInputMap.get(inputName); Collection<KeyValueReader> kvReaders = multiMRInput.getKeyValueReaders(); l4j.debug("There are " + kvReaders.size() + " key-value readers for input " + inputName); if (kvReaders.size() > 0) { reader = getKeyValueReader(kvReaders, mapOp); sources[tag].init(jconf, mapOp, reader); } } ((TezContext) MapredContext.get()).setRecordSources(sources); }
public void init() throws HiveException, UDFArgumentException { genericUDF = expr.getGenericUDF(); deferredChildren = new GenericUDF.DeferredObject[expr.getChildren().size()]; childrenOIs = new ObjectInspector[expr.getChildren().size()]; writers = VectorExpressionWriterFactory.getExpressionWriters(expr.getChildren()); for (int i = 0; i < childrenOIs.length; i++) { childrenOIs[i] = writers[i].getObjectInspector(); } MapredContext context = MapredContext.get(); if (context != null) { context.setup(genericUDF); } outputTypeInfo = expr.getTypeInfo(); outputVectorAssignRow = new VectorAssignRow(); outputVectorAssignRow.init(outputTypeInfo, outputColumnNum); genericUDF.initialize(childrenOIs); // Initialize constant arguments for (int i = 0; i < argDescs.length; i++) { if (argDescs[i].isConstant()) { argDescs[i].prepareConstant(); } } }
MapredContext.get().setReporter(reporter);
public void init() throws HiveException, UDFArgumentException { genericUDF = expr.getGenericUDF(); deferredChildren = new GenericUDF.DeferredObject[expr.getChildren().size()]; childrenOIs = new ObjectInspector[expr.getChildren().size()]; writers = VectorExpressionWriterFactory.getExpressionWriters(expr.getChildren()); for (int i = 0; i < childrenOIs.length; i++) { childrenOIs[i] = writers[i].getObjectInspector(); } MapredContext context = MapredContext.get(); if (context != null) { context.setup(genericUDF); } outputOI = VectorExpressionWriterFactory.genVectorExpressionWritable(expr) .getObjectInspector(); genericUDF.initialize(childrenOIs); // Initialize constant arguments for (int i = 0; i < argDescs.length; i++) { if (argDescs[i].isConstant()) { argDescs[i].prepareConstant(); } } }
MapredContext.get().setReporter(reporter);