@Override public void init(AggregationDesc desc) throws HiveException { resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( desc.getParameters().get(0)); }
@Override public void init(AggregationDesc desc) throws HiveException { resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( desc.getParameters().get(0)); }
@Override public void init(AggregationDesc desc) throws HiveException { resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( desc.getParameters().get(0)); }
@Override public void init(AggregationDesc desc) throws HiveException { resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( desc.getParameters().get(0)); }
@Override public void init(AggregationDesc desc) throws HiveException { resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( desc.getParameters().get(0)); }
@Override public void init(AggregationDesc desc) throws HiveException { resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( desc.getParameters().get(0)); }
@Override public void init(AggregationDesc desc) throws HiveException { resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( desc.getParameters().get(0)); }
@Override public void init(AggregationDesc desc) throws HiveException { resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( desc.getParameters().get(0)); }
@Override public void init(AggregationDesc desc) throws HiveException { resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( desc.getParameters().get(0)); }
@Override public void init(AggregationDesc desc) throws HiveException { resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( desc.getParameters().get(0)); }
private boolean checkAggregator(AggregationDesc agg) throws SemanticException { if (LOG.isDebugEnabled()) { LOG.debug(String.format("Checking '%s'", agg.getExprString())); } boolean result = checkExpressions(agg.getParameters()); FunctionInfo fi = FunctionRegistry.getFunctionInfo(agg.getGenericUDAFName()); result = result && (fi != null) && fi.isNative(); if (!result) { LOG.info("Aggregator is not native: " + agg.getExprString()); } return result; }
@Override public void init(AggregationDesc desc) throws HiveException { ExprNodeDesc inputExpr = desc.getParameters().get(0); DecimalTypeInfo tiInput = (DecimalTypeInfo) inputExpr.getTypeInfo(); this.inputScale = (short) tiInput.scale(); this.inputPrecision = (short) tiInput.precision(); initPartialResultInspector(); }
private boolean checkAggregator(AggregationDesc agg) throws SemanticException { if (LOG.isDebugEnabled()) { LOG.debug(String.format("Checking '%s'", agg.getExprString())); } boolean result = checkExpressions(agg.getParameters()); FunctionInfo fi = FunctionRegistry.getFunctionInfo(agg.getGenericUDAFName()); result = result && (fi != null) && fi.isNative(); if (!result) { LOG.info("Aggregator is not native: " + agg.getExprString()); } return result; }
private boolean validateAggregationDesc(AggregationDesc aggDesc, GroupByDesc.Mode groupByMode, boolean hasKeys) { String udfName = aggDesc.getGenericUDAFName().toLowerCase(); if (!supportedAggregationUdfs.contains(udfName)) { setExpressionIssue("Aggregation Function", "UDF " + udfName + " not supported"); return false; } // The planner seems to pull this one out. if (aggDesc.getDistinct()) { setExpressionIssue("Aggregation Function", "DISTINCT not supported"); return false; } ArrayList<ExprNodeDesc> parameters = aggDesc.getParameters(); if (parameters != null && !validateExprNodeDesc(parameters, "Aggregation Function UDF " + udfName + " parameter")) { return false; } return true; }
public List<String> genColLists( HashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtx) { List<String> colLists = new ArrayList<String>(); ArrayList<ExprNodeDesc> keys = conf.getKeys(); for (ExprNodeDesc key : keys) { colLists = Utilities.mergeUniqElems(colLists, key.getCols()); } ArrayList<AggregationDesc> aggrs = conf.getAggregators(); for (AggregationDesc aggr : aggrs) { ArrayList<ExprNodeDesc> params = aggr.getParameters(); for (ExprNodeDesc param : params) { colLists = Utilities.mergeUniqElems(colLists, param.getCols()); } } return colLists; }
public List<String> genColLists( HashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtx) { List<String> colLists = new ArrayList<String>(); ArrayList<ExprNodeDesc> keys = conf.getKeys(); for (ExprNodeDesc key : keys) { colLists = Utilities.mergeUniqElems(colLists, key.getCols()); } ArrayList<AggregationDesc> aggrs = conf.getAggregators(); for (AggregationDesc aggr : aggrs) { ArrayList<ExprNodeDesc> params = aggr.getParameters(); for (ExprNodeDesc param : params) { colLists = Utilities.mergeUniqElems(colLists, param.getCols()); } } return colLists; }
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FileSinkOperator FS = (FileSinkOperator) nd; int shift = stack.get(stack.size() - 2) instanceof SelectOperator ? 0 : 1; GroupByOperator cGBY = (GroupByOperator) stack.get(stack.size() - 3 + shift); ReduceSinkOperator RS = (ReduceSinkOperator) stack.get(stack.size() - 4 + shift); if (RS.getConf().getNumReducers() != 1 || !RS.getConf().getKeyCols().isEmpty()) { return null; } GroupByOperator pGBY = (GroupByOperator) stack.get(stack.size() - 5 + shift); Path fileName = FS.getConf().getFinalDirName(); TableDesc tsDesc = createIntermediateFS(pGBY, fileName); for (AggregationDesc aggregation : cGBY.getConf().getAggregators()) { List<ExprNodeDesc> parameters = aggregation.getParameters(); aggregation.setParameters(ExprNodeDescUtils.backtrack(parameters, cGBY, RS)); } pctx.setFetchTabledesc(tsDesc); pctx.setFetchSource(cGBY); pctx.setFetchSink(SimpleFetchOptimizer.replaceFSwithLS(FS, "NULL")); RS.setParentOperators(null); RS.setChildOperators(null); cGBY.setParentOperators(null); return null; }
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FileSinkOperator FS = (FileSinkOperator) nd; int shift = stack.get(stack.size() - 2) instanceof SelectOperator ? 0 : 1; GroupByOperator cGBY = (GroupByOperator) stack.get(stack.size() - 3 + shift); ReduceSinkOperator RS = (ReduceSinkOperator) stack.get(stack.size() - 4 + shift); if (RS.getConf().getNumReducers() != 1 || !RS.getConf().getKeyCols().isEmpty()) { return null; } GroupByOperator pGBY = (GroupByOperator) stack.get(stack.size() - 5 + shift); Path fileName = FS.getConf().getFinalDirName(); TableDesc tsDesc = createIntermediateFS(pGBY, fileName); for (AggregationDesc aggregation : cGBY.getConf().getAggregators()) { List<ExprNodeDesc> parameters = aggregation.getParameters(); aggregation.setParameters(ExprNodeDescUtils.backtrack(parameters, cGBY, RS)); } pctx.setFetchTabledesc(tsDesc); pctx.setFetchSource(cGBY); pctx.setFetchSink(SimpleFetchOptimizer.replaceFSwithLS(FS, "NULL")); RS.setParentOperators(null); RS.setChildOperators(null); cGBY.setParentOperators(null); return null; }
private static ImmutablePair<VectorAggregationDesc,String> getVectorAggregationDesc( AggregationDesc aggrDesc, VectorizationContext vContext) throws HiveException { String aggregateName = aggrDesc.getGenericUDAFName(); List<ExprNodeDesc> parameterList = aggrDesc.getParameters(); final int parameterCount = parameterList.size(); final GenericUDAFEvaluator.Mode udafEvaluatorMode = aggrDesc.getMode(); /* * Look at evaluator to get output type info. */ GenericUDAFEvaluator evaluator = aggrDesc.getGenericUDAFEvaluator(); ObjectInspector[] parameterObjectInspectors = new ObjectInspector[parameterCount]; for (int i = 0; i < parameterCount; i++) { TypeInfo typeInfo = parameterList.get(i).getTypeInfo(); parameterObjectInspectors[i] = TypeInfoUtils .getStandardWritableObjectInspectorFromTypeInfo(typeInfo); } // The only way to get the return object inspector (and its return type) is to // initialize it... ObjectInspector returnOI = evaluator.init( aggrDesc.getMode(), parameterObjectInspectors); final TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(returnOI.getTypeName()); return getVectorAggregationDesc( aggregateName, parameterList, evaluator, outputTypeInfo, udafEvaluatorMode, vContext); }
aggr.setParameters(ExprNodeDescUtils.backtrack(aggr.getParameters(), cGBYr, cRS));