@Override public void initializeMapOperator(Configuration hconf) throws HiveException { super.initializeMapOperator(hconf); oneRootOperator.initialize(hconf, new ObjectInspector[] {tableStandardStructObjectInspector}); }
/** * Collects all the parent's output object inspectors and calls actual * initialization method. * * @param hconf * @param inputOI * OI of the row that this parent will pass to this op * @param parentId * parent operator id * @throws HiveException */ protected void initialize(Configuration hconf, ObjectInspector inputOI, int parentId) throws HiveException { LOG.debug("Initializing Child : {}", this); if (parentId >= inputObjInspectors.length) { // Determine the next power of 2 larger than the requested index int newLength = 2; while (parentId >= newLength) { newLength <<= 1; } inputObjInspectors = Arrays.copyOf(inputObjInspectors, newLength); } inputObjInspectors[parentId] = inputOI; // call the actual operator initialization function initialize(hconf, null); }
@Override public void initializeMapOperator(Configuration hconf) throws HiveException { super.initializeMapOperator(hconf); oneRootOperator.initialize(hconf, new ObjectInspector[] {tableStandardStructObjectInspector}); }
/** * Calls initialize on each of the children with outputObjetInspector as the * output row format. */ protected void initializeChildren(Configuration hconf) throws HiveException { state = State.INIT; LOG.debug("Operator Initialized: {}", this); if (CollectionUtils.isEmpty(childOperators)) { return; } LOG.debug("Initializing Children: {}", this); for (int i = 0; i < childOperatorsArray.length; i++) { childOperatorsArray[i].initialize(hconf, outputObjInspector, childOperatorsTag[i]); if (reporter != null) { childOperatorsArray[i].setReporter(reporter); } } }
public void initializeMapOperator(Configuration hconf) throws HiveException { super.initializeMapOperator(hconf); cntr = 1; logEveryNRows = HiveConf.getLongVar(hconf, HiveConf.ConfVars.HIVE_LOG_N_RECORDS); for (Entry<Operator<?>, StructObjectInspector> entry : childrenOpToOI.entrySet()) { Operator<?> child = entry.getKey(); child.initialize(hconf, new ObjectInspector[] {entry.getValue()}); } }
public void initializeMapOperator(Configuration hconf) throws HiveException { super.initializeMapOperator(hconf); cntr = 1; logEveryNRows = HiveConf.getLongVar(hconf, HiveConf.ConfVars.HIVE_LOG_N_RECORDS); for (Entry<Operator<?>, StructObjectInspector> entry : childrenOpToOI.entrySet()) { Operator<?> child = entry.getKey(); child.initialize(hconf, new ObjectInspector[] {entry.getValue()}); } }
/** * Collects all the parent's output object inspectors and calls actual * initialization method. * * @param hconf * @param inputOI * OI of the row that this parent will pass to this op * @param parentId * parent operator id * @throws HiveException */ protected void initialize(Configuration hconf, ObjectInspector inputOI, int parentId) throws HiveException { if (isLogDebugEnabled) { LOG.debug("Initializing child " + id + " " + getName()); } // Double the size of the array if needed if (parentId >= inputObjInspectors.length) { int newLength = inputObjInspectors.length * 2; while (parentId >= newLength) { newLength *= 2; } inputObjInspectors = Arrays.copyOf(inputObjInspectors, newLength); } inputObjInspectors[parentId] = inputOI; // call the actual operator initialization function initialize(hconf, null); }
/** * Calls initialize on each of the children with outputObjetInspector as the * output row format. */ @Override protected void initializeChildren(Configuration hconf) throws HiveException { state = State.INIT; if (LOG.isInfoEnabled()) { LOG.info("Operator " + id + " " + getName() + " initialized"); } if (childOperators == null || childOperators.isEmpty()) { return; } if (LOG.isInfoEnabled()) { LOG.info("Initializing children of " + id + " " + getName()); } childOperatorsArray[0].initialize(hconf, outputObjectInspectors); if (reporter != null) { childOperatorsArray[0].setReporter(reporter); } }
/** * Calls initialize on each of the children with outputObjetInspector as the * output row format. */ protected void initializeChildren(Configuration hconf) throws HiveException { state = State.INIT; if (isLogDebugEnabled) { LOG.debug("Operator " + id + " " + getName() + " initialized"); } if (childOperators == null || childOperators.isEmpty()) { return; } if (isLogDebugEnabled) { LOG.debug("Initializing children of " + id + " " + getName()); } for (int i = 0; i < childOperatorsArray.length; i++) { childOperatorsArray[i].initialize(hconf, outputObjInspector, childOperatorsTag[i]); if (reporter != null) { childOperatorsArray[i].setReporter(reporter); } } }
/** * Calls initialize on each of the children with outputObjetInspector as the * output row format. */ @Override protected void initializeChildren(Configuration hconf) throws HiveException { state = State.INIT; if (isLogInfoEnabled) { LOG.info("Operator " + id + " " + getName() + " initialized"); } if (childOperators == null || childOperators.isEmpty()) { return; } if (isLogInfoEnabled) { LOG.info("Initializing children of " + id + " " + getName()); } childOperatorsArray[0].initialize(hconf, outputObjectInspectors); if (reporter != null) { childOperatorsArray[0].setReporter(reporter); } }
for (int j = 0; j < ois.length; j++) { if (ois[j] != null) { childOperatorsArray[i].initialize(hconf, ois[j], j); childOperatorsArray[i].initialize(hconf, childInputObjInspectors[i]); } else { continue;
forwardOp.initialize(jobConf, new ObjectInspector[] {objectInspector}); l4j.info("fetchoperator for " + entry.getKey() + " initialized");
for (int j = 0; j < ois.length; j++) { if (ois[j] != null) { childOperatorsArray[i].initialize(hconf, ois[j], j); childOperatorsArray[i].initialize(hconf, childInputObjInspectors[i]); } else { continue;
reducer.initialize(jc, rowObjectInspector); } catch (Throwable e) { abort = true;
for (Operator<? extends OperatorDesc> dummyOp : dummyOps){ dummyOp.passExecContext(execContext); dummyOp.initialize(jc,null);
source.initialize(conf, new ObjectInspector[]{fetch.getOutputObjectInspector()}); totalRows = 0; ExecMapper.setDone(false);
source.initialize(conf, new ObjectInspector[]{fetch.getOutputObjectInspector()}); totalRows = 0; ExecMapper.setDone(false);
for (Operator<? extends OperatorDesc> dummyOp : dummyOps) { dummyOp.setExecContext(execContext); dummyOp.initialize(jc, null);
/** * This method drives the test. It takes the data from getBaseTable() and * feeds it through a SELECT operator with a COLLECT operator after. Each * row that is produced by the collect operator is compared to getExpectedResult() * and if every row is the expected result the method completes without asserting. * @throws HiveException */ public final void testUdf() throws HiveException { InspectableObject [] data = getBaseTable(); List<ExprNodeDesc> expressionList = getExpressionList(); SelectDesc selectCtx = new SelectDesc(expressionList, OperatorTestUtils.createOutputColumnNames(expressionList)); Operator<SelectDesc> op = OperatorFactory.get(new CompilationOpContext(), SelectDesc.class); op.setConf(selectCtx); CollectDesc cd = new CollectDesc(Integer.valueOf(10)); CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(cd, op); op.initialize(new JobConf(OperatorTestUtils.class), new ObjectInspector[] {data[0].oi}); OperatorTestUtils.assertResults(op, cdop, data, getExpectedResult()); }
CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(cd, sop); op.initialize(new JobConf(TestOperators.class), new ObjectInspector[]{r[0].oi});