if (hashAggr) { if (newEntryForHashAggr) { aggregationEvaluators[ai].aggregate(aggs[ai], o); aggregationParameterObjectInspectors[ai], lastInvoke[ai], aggregationParameterStandardObjectInspectors[ai]) != 0) { aggregationEvaluators[ai].aggregate(aggs[ai], o); for (int pi = 0; pi < o.length; pi++) { lastInvoke[ai][pi] = ObjectInspectorUtils.copyToStandardObject( aggregationEvaluators[ai].aggregate(aggs[ai], o); o[pi] = aggregationParameterFields[pos][pi].evaluate(row); aggregationEvaluators[pos].aggregate(aggs[pos], o); aggregationEvaluators[i].aggregate(aggs[i], o); lastInvoke[i], aggregationParameterStandardObjectInspectors[i]) != 0) { aggregationEvaluators[i].aggregate(aggs[i], o); for (int pi = 0; pi < o.length; pi++) { lastInvoke[i][pi] = ObjectInspectorUtils.copyToStandardObject( aggregationEvaluators[pos].aggregate(aggs[pos], o); aggregationEvaluators[ai].aggregate(aggs[ai], o);
if (hashAggr) { if (newEntryForHashAggr) { aggregationEvaluators[ai].aggregate(aggs[ai], o); aggregationParameterObjectInspectors[ai], lastInvoke[ai], aggregationParameterStandardObjectInspectors[ai]) != 0) { aggregationEvaluators[ai].aggregate(aggs[ai], o); for (int pi = 0; pi < o.length; pi++) { lastInvoke[ai][pi] = ObjectInspectorUtils.copyToStandardObject( aggregationEvaluators[ai].aggregate(aggs[ai], o); o[pi] = aggregationParameterFields[pos][pi].evaluate(row); aggregationEvaluators[pos].aggregate(aggs[pos], o); aggregationEvaluators[i].aggregate(aggs[i], o); lastInvoke[i], aggregationParameterStandardObjectInspectors[i]) != 0) { aggregationEvaluators[i].aggregate(aggs[i], o); for (int pi = 0; pi < o.length; pi++) { lastInvoke[i][pi] = ObjectInspectorUtils.copyToStandardObject( aggregationEvaluators[pos].aggregate(aggs[pos], o); aggregationEvaluators[ai].aggregate(aggs[ai], o);
/** * Calculate the partial result sum + count giving a parition range * @return a 2-element Object array of [count long, sum ResultType] */ private Object[] calcPartialResult(PTFPartitionIterator<Object> pItr, LeadLagInfo leadLagInfo) throws HiveException { // To handle the case like SUM(LAG(f)) over(), aggregation function includes // LAG/LEAD call PTFOperator.connectLeadLagFunctionsToPartition(leadLagInfo, pItr); AggregationBuffer aggBuffer = wrappedEvaluator.getNewAggregationBuffer(); Object[] argValues = new Object[parameters == null ? 0 : parameters.size()]; while(pItr.hasNext()) { Object row = pItr.next(); int i = 0; if ( parameters != null ) { for(PTFExpressionDef param : parameters) { argValues[i++] = param.getExprEvaluator().evaluate(row); } } wrappedEvaluator.aggregate(aggBuffer, argValues); } // The object [count LongWritable, sum ResultType] is reused during evaluating Object[] partial = (Object[])wrappedEvaluator.terminatePartial(aggBuffer); return new Object[] {((LongWritable)partial[0]).get(), ObjectInspectorUtils.copyToStandardObject(partial[1], outputOI)}; }
/** * Calculate the partial result sum + count giving a parition range * @return a 2-element Object array of [count long, sum ResultType] */ private Object[] calcPartialResult(PTFPartitionIterator<Object> pItr, LeadLagInfo leadLagInfo) throws HiveException { // To handle the case like SUM(LAG(f)) over(), aggregation function includes // LAG/LEAD call PTFOperator.connectLeadLagFunctionsToPartition(leadLagInfo, pItr); AggregationBuffer aggBuffer = wrappedEvaluator.getNewAggregationBuffer(); Object[] argValues = new Object[parameters == null ? 0 : parameters.size()]; while(pItr.hasNext()) { Object row = pItr.next(); int i = 0; if ( parameters != null ) { for(PTFExpressionDef param : parameters) { argValues[i++] = param.getExprEvaluator().evaluate(row); } } wrappedEvaluator.aggregate(aggBuffer, argValues); } // The object [count LongWritable, sum ResultType] is reused during evaluating Object[] partial = (Object[])wrappedEvaluator.terminatePartial(aggBuffer); return new Object[] {((LongWritable)partial[0]).get(), ObjectInspectorUtils.copyToStandardObject(partial[1], outputOI)}; }
wFn.getWFnEval().aggregate(aggBuffers[j], args[j]); Object out = ((ISupportStreamingModeForWindowing) wFn.getWFnEval()) .getNextResult(aggBuffers[j]);
o[pi] = null; aggregationEvaluators[ai].aggregate(aggregations[ai], o);
fnEval.aggregate(streamingState.aggBuffers[i], streamingState.funcArgs[i]); Object out = ((ISupportStreamingModeForWindowing) fnEval) .getNextResult(streamingState.aggBuffers[i]);
fnEval.aggregate(streamingState.aggBuffers[i], streamingState.funcArgs[i]); Object out = ((ISupportStreamingModeForWindowing) fnEval) .getNextResult(streamingState.aggBuffers[i]);
/** * Given a partition iterator, calculate the function value * @param pItr the partition pointer * @return the function value * @throws HiveException */ protected Object calcFunctionValue(PTFPartitionIterator<Object> pItr, LeadLagInfo leadLagInfo) throws HiveException { // To handle the case like SUM(LAG(f)) over(), aggregation function includes // LAG/LEAD call PTFOperator.connectLeadLagFunctionsToPartition(leadLagInfo, pItr); AggregationBuffer aggBuffer = wrappedEvaluator.getNewAggregationBuffer(); Object[] argValues = new Object[parameters == null ? 0 : parameters.size()]; while(pItr.hasNext()) { Object row = pItr.next(); int i = 0; if ( parameters != null ) { for(PTFExpressionDef param : parameters) { argValues[i++] = param.getExprEvaluator().evaluate(row); } } wrappedEvaluator.aggregate(aggBuffer, argValues); } // The object is reused during evaluating, make a copy here return ObjectInspectorUtils.copyToStandardObject(wrappedEvaluator.evaluate(aggBuffer), outputOI); }
wFn.getWFnEval().aggregate(aggBuffers[j], args[j]); Object out = ((ISupportStreamingModeForWindowing) wFn.getWFnEval()) .getNextResult(aggBuffers[j]);
/** * Given a partition iterator, calculate the function value * @param pItr the partition pointer * @return the function value * @throws HiveException */ protected Object calcFunctionValue(PTFPartitionIterator<Object> pItr, LeadLagInfo leadLagInfo) throws HiveException { // To handle the case like SUM(LAG(f)) over(), aggregation function includes // LAG/LEAD call PTFOperator.connectLeadLagFunctionsToPartition(leadLagInfo, pItr); AggregationBuffer aggBuffer = wrappedEvaluator.getNewAggregationBuffer(); Object[] argValues = new Object[parameters == null ? 0 : parameters.size()]; while(pItr.hasNext()) { Object row = pItr.next(); int i = 0; if ( parameters != null ) { for(PTFExpressionDef param : parameters) { argValues[i++] = param.getExprEvaluator().evaluate(row); } } wrappedEvaluator.aggregate(aggBuffer, argValues); } // The object is reused during evaluating, make a copy here return ObjectInspectorUtils.copyToStandardObject(wrappedEvaluator.evaluate(aggBuffer), outputOI); }
while (inVals.hasNext()) { typeHandler.set(inVals.next(), in[0]); fn.aggregate(agg, in); Object out = oS.getNextResult(agg); if (out != null) {
parameterArray[0] = row[1]; evaluator.aggregate(aggregationBuffer, parameterArray);
if (hashAggr) { if (newEntryForHashAggr) { aggregationEvaluators[ai].aggregate(aggs[ai], o); aggregationParameterObjectInspectors[ai], lastInvoke[ai], aggregationParameterStandardObjectInspectors[ai]) != 0) { aggregationEvaluators[ai].aggregate(aggs[ai], o); for (int pi = 0; pi < o.length; pi++) { lastInvoke[ai][pi] = ObjectInspectorUtils.copyToStandardObject( aggregationEvaluators[ai].aggregate(aggs[ai], o); o[pi] = aggregationParameterFields[pos][pi].evaluate(row); aggregationEvaluators[pos].aggregate(aggs[pos], o); aggregationEvaluators[i].aggregate(aggs[i], o); lastInvoke[i], aggregationParameterStandardObjectInspectors[i]) != 0) { aggregationEvaluators[i].aggregate(aggs[i], o); for (int pi = 0; pi < o.length; pi++) { lastInvoke[i][pi] = ObjectInspectorUtils.copyToStandardObject( aggregationEvaluators[pos].aggregate(aggs[pos], o); aggregationEvaluators[ai].aggregate(aggs[ai], o);
if (hashAggr) { if (newEntryForHashAggr) { aggregationEvaluators[ai].aggregate(aggs[ai], o); aggregationParameterObjectInspectors[ai], lastInvoke[ai], aggregationParameterStandardObjectInspectors[ai]) != 0) { aggregationEvaluators[ai].aggregate(aggs[ai], o); for (int pi = 0; pi < o.length; pi++) { lastInvoke[ai][pi] = ObjectInspectorUtils.copyToStandardObject( aggregationEvaluators[ai].aggregate(aggs[ai], o); o[pi] = aggregationParameterFields[pos][pi].evaluate(row); aggregationEvaluators[pos].aggregate(aggs[pos], o); aggregationEvaluators[i].aggregate(aggs[i], o); lastInvoke[i], aggregationParameterStandardObjectInspectors[i]) != 0) { aggregationEvaluators[i].aggregate(aggs[i], o); for (int pi = 0; pi < o.length; pi++) { lastInvoke[i][pi] = ObjectInspectorUtils.copyToStandardObject( aggregationEvaluators[pos].aggregate(aggs[pos], o); aggregationEvaluators[ai].aggregate(aggs[ai], o);
o[pi] = null; aggregationEvaluators[ai].aggregate(aggregations[ai], o);
o[pi] = null; aggregationEvaluators[ai].aggregate(aggregations[ai], o);
wFn.getWFnEval().aggregate(aggBuffers[j], args[j]); Object out = ((ISupportStreamingModeForWindowing) wFn.getWFnEval()) .getNextResult(aggBuffers[j]);
fnEval.aggregate(streamingState.aggBuffers[i], streamingState.funcArgs[i]); Object out = ((ISupportStreamingModeForWindowing) fnEval) .getNextResult(streamingState.aggBuffers[i]);
Object evaluateWindowFunction(WindowFunctionDef wFn, PTFPartitionIterator<Object> pItr) throws HiveException { GenericUDAFEvaluator fEval = wFn.getWFnEval(); Object[] args = new Object[wFn.getArgs() == null ? 0 : wFn.getArgs().size()]; AggregationBuffer aggBuffer = fEval.getNewAggregationBuffer(); while(pItr.hasNext()) { Object row = pItr.next(); int i =0; if ( wFn.getArgs() != null ) { for(PTFExpressionDef arg : wFn.getArgs()) { args[i++] = arg.getExprEvaluator().evaluate(row); } } fEval.aggregate(aggBuffer, args); } Object out = fEval.evaluate(aggBuffer); out = ObjectInspectorUtils.copyToStandardObject(out, wFn.getOI()); return out; }