/** * Forward a record of keys and aggregation results. * * @param keys * The keys in the record * @throws HiveException */ private void forward(Object[] keys, AggregationBuffer[] aggs) throws HiveException { if (forwardCache == null) { forwardCache = new Object[outputKeyLength + aggs.length]; } for (int i = 0; i < outputKeyLength; i++) { forwardCache[i] = keys[i]; } for (int i = 0; i < aggs.length; i++) { forwardCache[outputKeyLength + i] = aggregationEvaluators[i].evaluate(aggs[i]); } forward(forwardCache, outputObjInspector); }
/** * Forward a record of keys and aggregation results. * * @param keys * The keys in the record * @throws HiveException */ private void forward(Object[] keys, AggregationBuffer[] aggs) throws HiveException { if (forwardCache == null) { forwardCache = new Object[outputKeyLength + aggs.length]; } for (int i = 0; i < outputKeyLength; i++) { forwardCache[i] = keys[i]; } for (int i = 0; i < aggs.length; i++) { forwardCache[outputKeyLength + i] = aggregationEvaluators[i].evaluate(aggs[i]); } forward(forwardCache, outputObjInspector); }
/** * Given a partition iterator, calculate the function value * @param pItr the partition pointer * @return the function value * @throws HiveException */ protected Object calcFunctionValue(PTFPartitionIterator<Object> pItr, LeadLagInfo leadLagInfo) throws HiveException { // To handle the case like SUM(LAG(f)) over(), aggregation function includes // LAG/LEAD call PTFOperator.connectLeadLagFunctionsToPartition(leadLagInfo, pItr); AggregationBuffer aggBuffer = wrappedEvaluator.getNewAggregationBuffer(); Object[] argValues = new Object[parameters == null ? 0 : parameters.size()]; while(pItr.hasNext()) { Object row = pItr.next(); int i = 0; if ( parameters != null ) { for(PTFExpressionDef param : parameters) { argValues[i++] = param.getExprEvaluator().evaluate(row); } } wrappedEvaluator.aggregate(aggBuffer, argValues); } // The object is reused during evaluating, make a copy here return ObjectInspectorUtils.copyToStandardObject(wrappedEvaluator.evaluate(aggBuffer), outputOI); }
/** * Given a partition iterator, calculate the function value * @param pItr the partition pointer * @return the function value * @throws HiveException */ protected Object calcFunctionValue(PTFPartitionIterator<Object> pItr, LeadLagInfo leadLagInfo) throws HiveException { // To handle the case like SUM(LAG(f)) over(), aggregation function includes // LAG/LEAD call PTFOperator.connectLeadLagFunctionsToPartition(leadLagInfo, pItr); AggregationBuffer aggBuffer = wrappedEvaluator.getNewAggregationBuffer(); Object[] argValues = new Object[parameters == null ? 0 : parameters.size()]; while(pItr.hasNext()) { Object row = pItr.next(); int i = 0; if ( parameters != null ) { for(PTFExpressionDef param : parameters) { argValues[i++] = param.getExprEvaluator().evaluate(row); } } wrappedEvaluator.aggregate(aggBuffer, argValues); } // The object is reused during evaluating, make a copy here return ObjectInspectorUtils.copyToStandardObject(wrappedEvaluator.evaluate(aggBuffer), outputOI); }
/** * Forward a record of keys and aggregation results. * * @param keys * The keys in the record * @throws HiveException */ private void forward(Object[] keys, AggregationBuffer[] aggs) throws HiveException { if (forwardCache == null) { forwardCache = new Object[outputKeyLength + aggs.length]; } for (int i = 0; i < outputKeyLength; i++) { forwardCache[i] = keys[i]; } for (int i = 0; i < aggs.length; i++) { forwardCache[outputKeyLength + i] = aggregationEvaluators[i].evaluate(aggs[i]); } forward(forwardCache, outputObjInspector); }
/** * Forward a record of keys and aggregation results. * * @param keys * The keys in the record * @throws HiveException */ protected void forward(Object[] keys, AggregationBuffer[] aggs) throws HiveException { int totalFields = keys.length+ aggs.length; if (forwardCache == null) { forwardCache = new Object[totalFields]; } for (int i = 0; i < keys.length; i++) { forwardCache[i] = keys[i]; } for (int i = 0; i < aggs.length; i++) { forwardCache[keys.length + i] = aggregationEvaluators[i] .evaluate(aggs[i]); } forward(forwardCache, outputObjInspector); }
Object evaluateWindowFunction(WindowFunctionDef wFn, PTFPartitionIterator<Object> pItr) throws HiveException { GenericUDAFEvaluator fEval = wFn.getWFnEval(); Object[] args = new Object[wFn.getArgs() == null ? 0 : wFn.getArgs().size()]; AggregationBuffer aggBuffer = fEval.getNewAggregationBuffer(); while(pItr.hasNext()) { Object row = pItr.next(); int i =0; if ( wFn.getArgs() != null ) { for(PTFExpressionDef arg : wFn.getArgs()) { args[i++] = arg.getExprEvaluator().evaluate(row); } } fEval.aggregate(aggBuffer, args); } Object out = fEval.evaluate(aggBuffer); out = ObjectInspectorUtils.copyToStandardObject(out, wFn.getOI()); return out; }