/** * This function will be called by GroupByOperator when it sees a new input * row. * * @param agg * The object to store the aggregation result. * @param parameters * The row, can be inspected by the OIs passed in init(). */ public void aggregate(AggregationBuffer agg, Object[] parameters) throws HiveException { if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) { iterate(agg, parameters); } else { assert (parameters.length == 1); merge(agg, parameters[0]); } }
/** * This function will be called by GroupByOperator when it sees a new input * row. * * @param agg * The object to store the aggregation result. * @param parameters * The row, can be inspected by the OIs passed in init(). */ public void aggregate(AggregationBuffer agg, Object[] parameters) throws HiveException { if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) { iterate(agg, parameters); } else { assert (parameters.length == 1); merge(agg, parameters[0]); } }
wrappedEval.iterate(fb, parameters);
wrappedEval.iterate(fb, parameters);
@Override public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException { SumAvgStreamingState ss = (SumAvgStreamingState) agg; wrappedEval.iterate(ss.wrappedBuf, parameters); // We need to insert 'null' before processing first row for the case: X preceding and y preceding if (ss.numRows == 0) { for (int i = wFrameDef.getEnd().getRelativeOffset(); i < 0; i++) { ss.results.add(null); } } // Generate the result for the windowing ending at the current row if (ss.hasResultReady()) { ss.results.add(getNextResult(ss)); } if (!wFrameDef.isStartUnbounded() && ss.numRows + 1 >= wFrameDef.getStart().getRelativeOffset()) { ss.intermediateVals.add(getCurrentIntermediateResult(ss)); } ss.numRows++; }
@Override public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException { SumAvgStreamingState ss = (SumAvgStreamingState) agg; wrappedEval.iterate(ss.wrappedBuf, parameters); // We need to insert 'null' before processing first row for the case: X preceding and y preceding if (ss.numRows == 0) { for (int i = wFrameDef.getEnd().getRelativeOffset(); i < 0; i++) { ss.results.add(null); } } // Generate the result for the windowing ending at the current row if (ss.hasResultReady()) { ss.results.add(getNextResult(ss)); } if (!wFrameDef.isStartUnbounded() && ss.numRows + 1 >= wFrameDef.getStart().getRelativeOffset()) { ss.intermediateVals.add(getCurrentIntermediateResult(ss)); } ss.numRows++; }
private List<Object> runPartial1(List<Object[]> values) throws Exception { List<Object> ret = new ArrayList<>(); int batchSize = 1; Iterator<Object[]> iter = values.iterator(); do { GenericUDAFEvaluator eval = evaluatorFactory.getEvaluator(info); eval.init(GenericUDAFEvaluator.Mode.PARTIAL1, info.getParameterObjectInspectors()); AggregationBuffer buf = eval.getNewAggregationBuffer(); for (int i = 0; i < batchSize - 1 && iter.hasNext(); i++) { eval.iterate(buf, iter.next()); } batchSize <<= 1; ret.add(eval.terminatePartial(buf)); // back-check to force at least 1 output; and this should have a partial which is empty } while (iter.hasNext()); return ret; }
wrappedEval.iterate(lb, parameters);
wrappedEval.iterate(lb, parameters);
eval1.iterate(buffer1, new Object[]{100d, 200d}); eval1.iterate(buffer1, new Object[]{150d, 210d}); eval1.iterate(buffer1, new Object[]{200d, 220d}); Object object1 = eval1.terminatePartial(buffer1); eval2.iterate(buffer2, new Object[]{250d, 230d}); eval2.iterate(buffer2, new Object[]{250d, 240d}); eval2.iterate(buffer2, new Object[]{300d, 250d}); eval2.iterate(buffer2, new Object[]{350d, 260d}); Object object2 = eval2.terminatePartial(buffer2);
private Object runComplete(List<Object[]> values) throws SemanticException, HiveException { GenericUDAFEvaluator eval = evaluatorFactory.getEvaluator(info); eval.init(GenericUDAFEvaluator.Mode.COMPLETE, info.getParameterObjectInspectors()); AggregationBuffer agg = eval.getNewAggregationBuffer(); for (Object[] parameters : values) { eval.iterate(agg, parameters); } return eval.terminate(agg); }
/** * This function will be called by GroupByOperator when it sees a new input * row. * * @param agg * The object to store the aggregation result. * @param parameters * The row, can be inspected by the OIs passed in init(). */ public void aggregate(AggregationBuffer agg, Object[] parameters) throws HiveException { if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) { iterate(agg, parameters); } else { assert (parameters.length == 1); merge(agg, parameters[0]); } }
/** * This function will be called by GroupByOperator when it sees a new input * row. * * @param agg * The object to store the aggregation result. * @param parameters * The row, can be inspected by the OIs passed in init(). */ public void aggregate(AggregationBuffer agg, Object[] parameters) throws HiveException { if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) { iterate(agg, parameters); } else { assert (parameters.length == 1); merge(agg, parameters[0]); } }
@Override public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException { SumAvgStreamingState ss = (SumAvgStreamingState) agg; wrappedEval.iterate(ss.wrappedBuf, parameters); if (ss.numRows >= ss.numFollowing) { ss.results.add(getNextResult(ss)); } if (ss.numPreceding != BoundarySpec.UNBOUNDED_AMOUNT) { ss.intermediateVals.add(getCurrentIntermediateResult(ss)); } ss.numRows++; }
@Override public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException { State s = (State) agg; LastValueBuffer lb = (LastValueBuffer) s.wrappedBuf; /* * on firstRow invoke underlying evaluator to initialize skipNulls flag. */ if (lb.firstRow) { wrappedEval.iterate(lb, parameters); } Object o = ObjectInspectorUtils.copyToStandardObject(parameters[0], inputOI(), ObjectInspectorCopyOption.WRITABLE); if (!lb.skipNulls || o != null) { s.lastValue = o; s.lastIdx = s.numRows; } else if (lb.skipNulls && s.lastIdx != -1) { if (s.numPreceding != BoundarySpec.UNBOUNDED_AMOUNT && s.numRows > s.lastIdx + s.numPreceding + s.numFollowing) { s.lastValue = null; s.lastIdx = -1; } } if (s.numRows >= (s.numFollowing)) { s.results.add(s.lastValue); } s.numRows++; }
wrappedEval.iterate(fb, parameters);
getPartial1Evaluator().iterate(acc, params); } catch (HiveException e) { throw new RuntimeException(e);
@Override public Object exec(Tuple input) throws IOException { try { if (!inited) { schemaAndEvaluatorInfo.init(getInputSchema(), instantiateUDAF(funcName), Mode.COMPLETE, constantsInfo); inited = true; } AggregationBuffer agg = schemaAndEvaluatorInfo.evaluator.getNewAggregationBuffer(); DataBag bg = (DataBag) input.get(0); Tuple tp = null; for (Iterator<Tuple> it = bg.iterator(); it.hasNext();) { tp = it.next(); List inputs = schemaAndEvaluatorInfo.inputObjectInspector.getStructFieldsDataAsList(tp); schemaAndEvaluatorInfo.evaluator.iterate(agg, inputs.toArray()); } Object returnValue = schemaAndEvaluatorInfo.evaluator.terminate(agg); Object result = HiveUtils.convertHiveToPig(returnValue, schemaAndEvaluatorInfo.outputObjectInspector, null); return result; } catch (Exception e) { throw new IOException(e); } }
@Override public Tuple exec(Tuple input) throws IOException { try { if (!inited) { schemaAndEvaluatorInfo.init(getInputSchema(), instantiateUDAF(funcName), Mode.PARTIAL1, constantsInfo); inited = true; } DataBag b = (DataBag)input.get(0); AggregationBuffer agg = schemaAndEvaluatorInfo.evaluator.getNewAggregationBuffer(); for (Iterator<Tuple> it = b.iterator(); it.hasNext();) { Tuple t = it.next(); List inputs = schemaAndEvaluatorInfo.inputObjectInspector.getStructFieldsDataAsList(t); schemaAndEvaluatorInfo.evaluator.iterate(agg, inputs.toArray()); } Object returnValue = schemaAndEvaluatorInfo.evaluator.terminatePartial(agg); Tuple result = tf.newTuple(); result.append(HiveUtils.convertHiveToPig(returnValue, schemaAndEvaluatorInfo.intermediateOutputObjectInspector, null)); return result; } catch (Exception e) { throw new IOException(e); } } }
evaluator.iterate(buffer, inputArray);