/** * This function will be called by GroupByOperator when it sees a new input * row. * * @param agg * The object to store the aggregation result. */ public Object evaluate(AggregationBuffer agg) throws HiveException { if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) { return terminatePartial(agg); } else { return terminate(agg); } }
@Override public Object terminate(AggregationBuffer agg) throws HiveException { SumAvgStreamingState ss = (SumAvgStreamingState) agg; Object o = wrappedEval.terminate(ss.wrappedBuf); // After all the rows are processed, continue to generate results for the rows that results haven't generated. // For the case: X following and Y following, process first Y-X results and then insert X nulls. // For the case X preceding and Y following, process Y results. for (int i = Math.max(0, wFrameDef.getStart().getRelativeOffset()); i < wFrameDef.getEnd().getRelativeOffset(); i++) { if (ss.hasResultReady()) { ss.results.add(getNextResult(ss)); } ss.numRows++; } for (int i = 0; i < wFrameDef.getStart().getRelativeOffset(); i++) { if (ss.hasResultReady()) { ss.results.add(null); } ss.numRows++; } return o; }
/** * This function will be called by GroupByOperator when it sees a new input * row. * * @param agg * The object to store the aggregation result. */ public Object evaluate(AggregationBuffer agg) throws HiveException { if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) { return terminatePartial(agg); } else { return terminate(agg); } }
@Override public Object terminate(AggregationBuffer agg) throws HiveException { SumAvgStreamingState ss = (SumAvgStreamingState) agg; Object o = wrappedEval.terminate(ss.wrappedBuf); // After all the rows are processed, continue to generate results for the rows that results haven't generated. // For the case: X following and Y following, process first Y-X results and then insert X nulls. // For the case X preceding and Y following, process Y results. for (int i = Math.max(0, wFrameDef.getStart().getRelativeOffset()); i < wFrameDef.getEnd().getRelativeOffset(); i++) { if (ss.hasResultReady()) { ss.results.add(getNextResult(ss)); } ss.numRows++; } for (int i = 0; i < wFrameDef.getStart().getRelativeOffset(); i++) { if (ss.hasResultReady()) { ss.results.add(null); } ss.numRows++; } return o; }
result = evaluator.terminatePartial(aggregationBuffer); } else { result = evaluator.terminate(aggregationBuffer);
if (fnEval != null && fnEval instanceof ISupportStreamingModeForWindowing) { fnEval.terminate(streamingState.aggBuffers[i]);
eval2.merge(buffer3, object2); Object result = eval2.terminate(buffer3); assertEquals("0.987829161147262", String.valueOf(result));
private Object runComplete(List<Object[]> values) throws SemanticException, HiveException { GenericUDAFEvaluator eval = evaluatorFactory.getEvaluator(info); eval.init(GenericUDAFEvaluator.Mode.COMPLETE, info.getParameterObjectInspectors()); AggregationBuffer agg = eval.getNewAggregationBuffer(); for (Object[] parameters : values) { eval.iterate(agg, parameters); } return eval.terminate(agg); }
/** * This function will be called by GroupByOperator when it sees a new input * row. * * @param agg * The object to store the aggregation result. */ public Object evaluate(AggregationBuffer agg) throws HiveException { if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) { return terminatePartial(agg); } else { return terminate(agg); } }
private Object runPartialFinal(List<Object[]> values) throws Exception { GenericUDAFEvaluator eval = evaluatorFactory.getEvaluator(info); eval.init(GenericUDAFEvaluator.Mode.FINAL, partialOIs); AggregationBuffer buf = eval.getNewAggregationBuffer(); for (Object partialResult : runPartial1(values)) { eval.merge(buf, partialResult); } return eval.terminate(buf); }
/** * This function will be called by GroupByOperator when it sees a new input * row. * * @param agg * The object to store the aggregation result. */ public Object evaluate(AggregationBuffer agg) throws HiveException { if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) { return terminatePartial(agg); } else { return terminate(agg); } }
if (fnEval != null && fnEval instanceof ISupportStreamingModeForWindowing) { fnEval.terminate(streamingState.aggBuffers[i]);
private Object runPartial2Final(List<Object[]> values) throws Exception { GenericUDAFEvaluator eval = evaluatorFactory.getEvaluator(info); eval.init(GenericUDAFEvaluator.Mode.FINAL, partialOIs); AggregationBuffer buf = eval.getNewAggregationBuffer(); for (Object partialResult : runPartial2(runPartial1(values))) { eval.merge(buf, partialResult); } return eval.terminate(buf); }
fn.terminate(agg);
@Override public Object terminate(AggregationBuffer agg) throws HiveException { SumAvgStreamingState ss = (SumAvgStreamingState) agg; Object o = wrappedEval.terminate(ss.wrappedBuf); for (int i = 0; i < ss.numFollowing; i++) { ss.results.add(getNextResult(ss)); ss.numRows++; } return o; }
@Override public BaseRow getValue(GenericUDAFEvaluator.AggregationBuffer accumulator) { try { Object result = getFinalEvaluator().terminate(accumulator); Object flinkResult = HiveInspectors.unwrap(result, returnInspector); GenericRow value = new GenericRow(1); value.update(0, flinkResult); return value; } catch (HiveException e) { throw new RuntimeException(e); } } }
@Override public Object exec(Tuple input) throws IOException { try { if (!inited) { schemaAndEvaluatorInfo.init(getInputSchema(), instantiateUDAF(funcName), Mode.COMPLETE, constantsInfo); inited = true; } AggregationBuffer agg = schemaAndEvaluatorInfo.evaluator.getNewAggregationBuffer(); DataBag bg = (DataBag) input.get(0); Tuple tp = null; for (Iterator<Tuple> it = bg.iterator(); it.hasNext();) { tp = it.next(); List inputs = schemaAndEvaluatorInfo.inputObjectInspector.getStructFieldsDataAsList(tp); schemaAndEvaluatorInfo.evaluator.iterate(agg, inputs.toArray()); } Object returnValue = schemaAndEvaluatorInfo.evaluator.terminate(agg); Object result = HiveUtils.convertHiveToPig(returnValue, schemaAndEvaluatorInfo.outputObjectInspector, null); return result; } catch (Exception e) { throw new IOException(e); } }
@Override public Object exec(Tuple input) throws IOException { try { if (!inited) { schemaAndEvaluatorInfo.init(getInputSchema(), instantiateUDAF(funcName), Mode.FINAL, constantsInfo); schemaAndEvaluatorInfo.evaluator.configure(instantiateMapredContext()); inited = true; } DataBag b = (DataBag)input.get(0); AggregationBuffer agg = schemaAndEvaluatorInfo.evaluator.getNewAggregationBuffer(); for (Iterator<Tuple> it = b.iterator(); it.hasNext();) { Tuple t = it.next(); schemaAndEvaluatorInfo.evaluator.merge(agg, t.get(0)); } Object returnValue = schemaAndEvaluatorInfo.evaluator.terminate(agg); Object result = HiveUtils.convertHiveToPig(returnValue, schemaAndEvaluatorInfo.outputObjectInspector, null); return result; } catch (Exception e) { throw new IOException(e); } } }
fnEval.terminate(streamingState.aggBuffers[i]);
return converter.convert(evaluator.terminate(buffer));