protected AggregationBuffer[] newAggregations() throws HiveException { AggregationBuffer[] aggs = new AggregationBuffer[aggregationEvaluators.length]; for (int i = 0; i < aggregationEvaluators.length; i++) { aggs[i] = aggregationEvaluators[i].getNewAggregationBuffer(); // aggregationClasses[i].reset(aggs[i]); } return aggs; }
protected AggregationBuffer[] newAggregations() throws HiveException { AggregationBuffer[] aggs = new AggregationBuffer[aggregationEvaluators.length]; for (int i = 0; i < aggregationEvaluators.length; i++) { aggs[i] = aggregationEvaluators[i].getNewAggregationBuffer(); // aggregationClasses[i].reset(aggs[i]); } return aggs; }
@Override public AggregationBuffer getNewAggregationBuffer() throws HiveException { AggregationBuffer underlying = wrappedEval.getNewAggregationBuffer(); return new SumAvgStreamingState(underlying); }
@Override public AggregationBuffer getNewAggregationBuffer() throws HiveException { AggregationBuffer underlying = wrappedEval.getNewAggregationBuffer(); return new State(underlying); }
@Override public AggregationBuffer getNewAggregationBuffer() throws HiveException { AggregationBuffer underlying = wrappedEval.getNewAggregationBuffer(); return new State(underlying); }
@Override public AggregationBuffer getNewAggregationBuffer() throws HiveException { AggregationBuffer underlying = wrappedEval.getNewAggregationBuffer(); return new State(underlying); }
@Override public AggregationBuffer getNewAggregationBuffer() throws HiveException { AggregationBuffer underlying = wrappedEval.getNewAggregationBuffer(); return new State(underlying); }
@Override public AggregationBuffer getNewAggregationBuffer() throws HiveException { AggregationBuffer underlying = wrappedEval.getNewAggregationBuffer(); return new State(underlying); }
@Override public AggregationBuffer getNewAggregationBuffer() throws HiveException { AggregationBuffer underlying = wrappedEval.getNewAggregationBuffer(); return new SumAvgStreamingState(underlying); }
@Override public AggregationBuffer getNewAggregationBuffer() throws HiveException { AggregationBuffer underlying = wrappedEval.getNewAggregationBuffer(); return new State(underlying); }
void reset(WindowTableFunctionDef tabDef) throws HiveException { int numFns = tabDef.getWindowFunctions().size(); rollingPart.reset(); for (int i = 0; i < fnOutputs.length; i++) { fnOutputs[i].clear(); } for (int i = 0; i < numFns; i++) { WindowFunctionDef wFn = tabDef.getWindowFunctions().get(i); aggBuffers[i] = wFn.getWFnEval().getNewAggregationBuffer(); } if ( rnkLimit != null ) { rnkLimit.reset(); } }
void reset(WindowTableFunctionDef tabDef) throws HiveException { int numFns = tabDef.getWindowFunctions().size(); rollingPart.reset(); for (int i = 0; i < fnOutputs.length; i++) { fnOutputs[i].clear(); } for (int i = 0; i < numFns; i++) { WindowFunctionDef wFn = tabDef.getWindowFunctions().get(i); aggBuffers[i] = wFn.getWFnEval().getNewAggregationBuffer(); } if ( rnkLimit != null ) { rnkLimit.reset(); } }
WindowingIterator(PTFPartition iPart, ArrayList<Object> output, List<?>[] outputFromPivotFunctions, int[] wFnsToProcess) { this.iPart = iPart; this.output = output; this.outputFromPivotFunctions = outputFromPivotFunctions; this.wFnsToProcess = wFnsToProcess; this.currIdx = 0; wTFnDef = (WindowTableFunctionDef) getTableDef(); ptfDesc = getQueryDef(); inputOI = iPart.getOutputOI(); aggBuffers = new AggregationBuffer[wTFnDef.getWindowFunctions().size()]; args = new Object[wTFnDef.getWindowFunctions().size()][]; try { for (int j : wFnsToProcess) { WindowFunctionDef wFn = wTFnDef.getWindowFunctions().get(j); aggBuffers[j] = wFn.getWFnEval().getNewAggregationBuffer(); args[j] = new Object[wFn.getArgs() == null ? 0 : wFn.getArgs().size()]; } } catch (HiveException he) { throw new RuntimeException(he); } if ( WindowingTableFunction.this.rnkLimitDef != null ) { rnkLimit = new RankLimit(WindowingTableFunction.this.rnkLimitDef); } }
private List<Object> runPartial2(List<Object> values) throws Exception { List<Object> ret = new ArrayList<>(); int batchSize = 1; Iterator<Object> iter = values.iterator(); do { GenericUDAFEvaluator eval = evaluatorFactory.getEvaluator(info); eval.init(GenericUDAFEvaluator.Mode.PARTIAL2, partialOIs); AggregationBuffer buf = eval.getNewAggregationBuffer(); for (int i = 0; i < batchSize - 1 && iter.hasNext(); i++) { eval.merge(buf, iter.next()); } batchSize <<= 1; ret.add(eval.terminatePartial(buf)); // back-check to force at least 1 output; and this should have a partial which is empty } while (iter.hasNext()); return ret; } }
private List<Object> runPartial1(List<Object[]> values) throws Exception { List<Object> ret = new ArrayList<>(); int batchSize = 1; Iterator<Object[]> iter = values.iterator(); do { GenericUDAFEvaluator eval = evaluatorFactory.getEvaluator(info); eval.init(GenericUDAFEvaluator.Mode.PARTIAL1, info.getParameterObjectInspectors()); AggregationBuffer buf = eval.getNewAggregationBuffer(); for (int i = 0; i < batchSize - 1 && iter.hasNext(); i++) { eval.iterate(buf, iter.next()); } batchSize <<= 1; ret.add(eval.terminatePartial(buf)); // back-check to force at least 1 output; and this should have a partial which is empty } while (iter.hasNext()); return ret; }
/** * Calculate the partial result sum + count giving a parition range * @return a 2-element Object array of [count long, sum ResultType] */ private Object[] calcPartialResult(PTFPartitionIterator<Object> pItr, LeadLagInfo leadLagInfo) throws HiveException { // To handle the case like SUM(LAG(f)) over(), aggregation function includes // LAG/LEAD call PTFOperator.connectLeadLagFunctionsToPartition(leadLagInfo, pItr); AggregationBuffer aggBuffer = wrappedEvaluator.getNewAggregationBuffer(); Object[] argValues = new Object[parameters == null ? 0 : parameters.size()]; while(pItr.hasNext()) { Object row = pItr.next(); int i = 0; if ( parameters != null ) { for(PTFExpressionDef param : parameters) { argValues[i++] = param.getExprEvaluator().evaluate(row); } } wrappedEvaluator.aggregate(aggBuffer, argValues); } // The object [count LongWritable, sum ResultType] is reused during evaluating Object[] partial = (Object[])wrappedEvaluator.terminatePartial(aggBuffer); return new Object[] {((LongWritable)partial[0]).get(), ObjectInspectorUtils.copyToStandardObject(partial[1], outputOI)}; }
@SuppressWarnings("unchecked") StreamingState(Configuration cfg, StructObjectInspector inputOI, boolean isMapSide, WindowTableFunctionDef tabDef, int precedingSpan, int followingSpan) throws HiveException { AbstractSerDe serde = isMapSide ? tabDef.getInput().getOutputShape().getSerde() : tabDef.getRawInputShape().getSerde(); StructObjectInspector outputOI = isMapSide ? tabDef.getInput() .getOutputShape().getOI() : tabDef.getRawInputShape().getOI(); rollingPart = PTFPartition.createRolling(cfg, serde, inputOI, outputOI, precedingSpan, followingSpan); int numFns = tabDef.getWindowFunctions().size(); fnOutputs = new ArrayList[numFns]; aggBuffers = new AggregationBuffer[numFns]; funcArgs = new Object[numFns][]; for (int i = 0; i < numFns; i++) { fnOutputs[i] = new ArrayList<Object>(); WindowFunctionDef wFn = tabDef.getWindowFunctions().get(i); funcArgs[i] = new Object[wFn.getArgs() == null ? 0 : wFn.getArgs().size()]; aggBuffers[i] = wFn.getWFnEval().getNewAggregationBuffer(); } if ( WindowingTableFunction.this.rnkLimitDef != null ) { rnkLimit = new RankLimit(WindowingTableFunction.this.rnkLimitDef); } }
private Object runComplete(List<Object[]> values) throws SemanticException, HiveException { GenericUDAFEvaluator eval = evaluatorFactory.getEvaluator(info); eval.init(GenericUDAFEvaluator.Mode.COMPLETE, info.getParameterObjectInspectors()); AggregationBuffer agg = eval.getNewAggregationBuffer(); for (Object[] parameters : values) { eval.iterate(agg, parameters); } return eval.terminate(agg); }
private Object runPartialFinal(List<Object[]> values) throws Exception { GenericUDAFEvaluator eval = evaluatorFactory.getEvaluator(info); eval.init(GenericUDAFEvaluator.Mode.FINAL, partialOIs); AggregationBuffer buf = eval.getNewAggregationBuffer(); for (Object partialResult : runPartial1(values)) { eval.merge(buf, partialResult); } return eval.terminate(buf); }
private Object runPartial2Final(List<Object[]> values) throws Exception { GenericUDAFEvaluator eval = evaluatorFactory.getEvaluator(info); eval.init(GenericUDAFEvaluator.Mode.FINAL, partialOIs); AggregationBuffer buf = eval.getNewAggregationBuffer(); for (Object partialResult : runPartial2(runPartial1(values))) { eval.merge(buf, partialResult); } return eval.terminate(buf); }