private static boolean validGBParent(RelNode gbNode, RelNode parent) { boolean validParent = true; // TOODO: Verify GB having is not a seperate filter (if so we shouldn't // introduce derived table) if (parent instanceof Join || parent instanceof SetOp || parent instanceof Aggregate || (parent instanceof Filter && ((Aggregate) gbNode).getGroupSet().isEmpty())) { validParent = false; } if (parent instanceof Project) { for (RexNode child : parent.getChildExps()) { if (child instanceof RexOver || child instanceof RexWinAggCall) { // Hive can't handle select rank() over(order by sum(c1)/sum(c2)) from t1 group by c3 // but can handle select rank() over (order by c4) from // (select sum(c1)/sum(c2) as c4 from t1 group by c3) t2; // so introduce a project on top of this gby. return false; } } } return validParent; }
public static List<ExprNodeDesc> getExprNodes(List<Integer> inputRefs, RelNode inputRel, String inputTabAlias) { List<ExprNodeDesc> exprNodes = new ArrayList<ExprNodeDesc>(); List<RexNode> rexInputRefs = getInputRef(inputRefs, inputRel); List<RexNode> exprs = inputRel.getChildExps(); // TODO: Change ExprNodeConverter to be independent of Partition Expr ExprNodeConverter exprConv = new ExprNodeConverter(inputTabAlias, inputRel.getRowType(), new HashSet<Integer>(), inputRel.getCluster().getTypeFactory()); for (int index = 0; index < rexInputRefs.size(); index++) { // The following check is only a guard against failures. // TODO: Knowing which expr is constant in GBY's aggregation function // arguments could be better done using Metadata provider of Calcite. //check the corresponding expression in exprs to see if it is literal if (exprs != null && index < exprs.size() && exprs.get(inputRefs.get(index)) instanceof RexLiteral) { //because rexInputRefs represent ref expr corresponding to value in inputRefs it is used to get // corresponding index ExprNodeDesc exprNodeDesc = exprConv.visitLiteral((RexLiteral) exprs.get(inputRefs.get(index))); exprNodes.add(exprNodeDesc); } else { RexNode iRef = rexInputRefs.get(index); exprNodes.add(iRef.accept(exprConv)); } } return exprNodes; }
private static boolean validGBParent(RelNode gbNode, RelNode parent) { boolean validParent = true; // TOODO: Verify GB having is not a seperate filter (if so we shouldn't // introduce derived table) if (parent instanceof Join || parent instanceof SetOp || parent instanceof Aggregate || (parent instanceof Filter && ((Aggregate) gbNode).getGroupSet().isEmpty())) { validParent = false; } if (parent instanceof Project) { for (RexNode child : parent.getChildExps()) { if (child instanceof RexOver || child instanceof RexWinAggCall) { // Hive can't handle select rank() over(order by sum(c1)/sum(c2)) from t1 group by c3 // but can handle select rank() over (order by c4) from // (select sum(c1)/sum(c2) as c4 from t1 group by c3) t2; // so introduce a project on top of this gby. return false; } } } return validParent; }
public static List<ExprNodeDesc> getExprNodes(List<Integer> inputRefs, RelNode inputRel, String inputTabAlias) { List<ExprNodeDesc> exprNodes = new ArrayList<ExprNodeDesc>(); List<RexNode> rexInputRefs = getInputRef(inputRefs, inputRel); List<RexNode> exprs = inputRel.getChildExps(); // TODO: Change ExprNodeConverter to be independent of Partition Expr ExprNodeConverter exprConv = new ExprNodeConverter(inputTabAlias, inputRel.getRowType(), new HashSet<Integer>(), inputRel.getCluster().getTypeFactory()); for (int index = 0; index < rexInputRefs.size(); index++) { // The following check is only a guard against failures. // TODO: Knowing which expr is constant in GBY's aggregation function // arguments could be better done using Metadata provider of Calcite. //check the corresponding expression in exprs to see if it is literal if (exprs != null && index < exprs.size() && exprs.get(inputRefs.get(index)) instanceof RexLiteral) { //because rexInputRefs represent ref expr corresponding to value in inputRefs it is used to get // corresponding index ExprNodeDesc exprNodeDesc = exprConv.visitLiteral((RexLiteral) exprs.get(inputRefs.get(index))); exprNodes.add(exprNodeDesc); } else { RexNode iRef = rexInputRefs.get(index); exprNodes.add(iRef.accept(exprConv)); } } return exprNodes; }
private static void replaceEmptyGroupAggr(final RelNode rel, RelNode parent) { // If this function is called, the parent should only include constant List<RexNode> exps = parent.getChildExps(); for (RexNode rexNode : exps) { if (!rexNode.accept(new HiveCalciteUtil.ConstantFinder())) { throw new RuntimeException("We expect " + parent.toString() + " to contain only constants. However, " + rexNode.toString() + " is " + rexNode.getKind()); } } HiveAggregate oldAggRel = (HiveAggregate) rel; RelDataTypeFactory typeFactory = oldAggRel.getCluster().getTypeFactory(); RelDataType longType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, typeFactory); RelDataType intType = TypeConverter.convert(TypeInfoFactory.intTypeInfo, typeFactory); // Create the dummy aggregation. SqlAggFunction countFn = SqlFunctionConverter.getCalciteAggFn("count", false, ImmutableList.of(intType), longType); // TODO: Using 0 might be wrong; might need to walk down to find the // proper index of a dummy. List<Integer> argList = ImmutableList.of(0); AggregateCall dummyCall = new AggregateCall(countFn, false, argList, longType, null); Aggregate newAggRel = oldAggRel.copy(oldAggRel.getTraitSet(), oldAggRel.getInput(), oldAggRel.indicator, oldAggRel.getGroupSet(), oldAggRel.getGroupSets(), ImmutableList.of(dummyCall)); RelNode select = introduceDerivedTable(newAggRel); parent.replaceInput(0, select); } }
@Test public void testIsNotNull() { // @formatter:off final RelNode basePlan = builder .scan("t") .filter( builder.call(SqlStdOperatorTable.IS_NOT_NULL, builder.field("_str") ) ) .build(); // @formatter:on statObj.setNumNulls(0); planner.setRoot(basePlan); System.out.println(RelOptUtil.toString(basePlan)); RelNode optimizedRelNode = planner.findBestExp(); System.out.println(RelOptUtil.toString(optimizedRelNode)); assertEquals("missing literal", SqlKind.LITERAL, optimizedRelNode.getChildExps().get(0).getKind()); RexLiteral val = (RexLiteral) optimizedRelNode.getChildExps().get(0); assertEquals(true, val.getValue()); }
@Test public void testIsNull_zero() { // @formatter:off final RelNode basePlan = builder .scan("t") .filter( builder.call(SqlStdOperatorTable.IS_NULL, builder.field("_str") ) ) .build(); // @formatter:on statObj.setNumNulls(0); planner.setRoot(basePlan); System.out.println(RelOptUtil.toString(basePlan)); RelNode optimizedRelNode = planner.findBestExp(); System.out.println(RelOptUtil.toString(optimizedRelNode)); assertEquals("missing literal", SqlKind.LITERAL, optimizedRelNode.getChildExps().get(0).getKind()); RexLiteral val = (RexLiteral) optimizedRelNode.getChildExps().get(0); assertEquals(false, val.getValue()); }
final List<RexNode> newProjects = new ArrayList<>(); final List<RexNode> inputExprs = input.getChildExps(); if (inputExprs == null || inputExprs.isEmpty()) { return aggregate;
@Test public void testGreaterThan_Below() { // @formatter:off final RelNode basePlan = builder .scan("t") .filter( builder.call(SqlStdOperatorTable.GREATER_THAN, builder.field("_int"), builder.literal(0) ) ) .build(); // @formatter:on statObj.setRange(100, 200); planner.setRoot(basePlan); RelNode optimizedRelNode = planner.findBestExp(); assertEquals("missing literal", SqlKind.LITERAL, optimizedRelNode.getChildExps().get(0).getKind()); RexLiteral val = (RexLiteral) optimizedRelNode.getChildExps().get(0); assertEquals(true, val.getValue()); }
@Test public void testIsNull_all() { // @formatter:off final RelNode basePlan = builder .scan("t") .filter( builder.call(SqlStdOperatorTable.IS_NULL, builder.field("_str") ) ) .build(); // @formatter:on statObj.setNumNulls(3); planner.setRoot(basePlan); System.out.println(RelOptUtil.toString(basePlan)); RelNode optimizedRelNode = planner.findBestExp(); System.out.println(RelOptUtil.toString(optimizedRelNode)); assertEquals("missing literal", SqlKind.LITERAL, optimizedRelNode.getChildExps().get(0).getKind()); RexLiteral val = (RexLiteral) optimizedRelNode.getChildExps().get(0); assertEquals(true, val.getValue()); }
@Test public void testIsNull_one() { // @formatter:off final RelNode basePlan = builder .scan("t") .filter( builder.call(SqlStdOperatorTable.IS_NULL, builder.field("_str") ) ) .build(); // @formatter:on statObj.setNumNulls(1); planner.setRoot(basePlan); System.out.println(RelOptUtil.toString(basePlan)); RelNode optimizedRelNode = planner.findBestExp(); System.out.println(RelOptUtil.toString(optimizedRelNode)); assertNotEquals("should not be a literal", SqlKind.LITERAL, optimizedRelNode.getChildExps().get(0).getKind()); }
private static void replaceEmptyGroupAggr(final RelNode rel, RelNode parent) { // If this function is called, the parent should only include constant List<RexNode> exps = parent.getChildExps(); for (RexNode rexNode : exps) { if (!rexNode.accept(new HiveCalciteUtil.ConstantFinder())) { throw new RuntimeException("We expect " + parent.toString() + " to contain only constants. However, " + rexNode.toString() + " is " + rexNode.getKind()); } } HiveAggregate oldAggRel = (HiveAggregate) rel; RelDataTypeFactory typeFactory = oldAggRel.getCluster().getTypeFactory(); RelDataType longType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, typeFactory); RelDataType intType = TypeConverter.convert(TypeInfoFactory.intTypeInfo, typeFactory); // Create the dummy aggregation. SqlAggFunction countFn = SqlFunctionConverter.getCalciteAggFn("count", false, ImmutableList.of(intType), longType); // TODO: Using 0 might be wrong; might need to walk down to find the // proper index of a dummy. List<Integer> argList = ImmutableList.of(0); AggregateCall dummyCall = new AggregateCall(countFn, false, argList, longType, null); Aggregate newAggRel = oldAggRel.copy(oldAggRel.getTraitSet(), oldAggRel.getInput(), oldAggRel.indicator, oldAggRel.getGroupSet(), oldAggRel.getGroupSets(), ImmutableList.of(dummyCall)); RelNode select = introduceDerivedTable(newAggRel); parent.replaceInput(0, select); } }
if (select.getChildExps().isEmpty()) { RexLiteral r = select.getCluster().getRexBuilder().makeExactLiteral(new BigDecimal(1)); ASTNode selectExpr = ASTBuilder.selectExpr(ASTBuilder.literal(r), "1"); int i = 0; for (RexNode r : select.getChildExps()) { if (RexUtil.isNull(r) && r.getType().getSqlTypeName() != SqlTypeName.NULL) {
@Override public RelWriter done(RelNode node) { int i = 0; if (values.size() > 0 && values.get(0).left.equals("subset")) { ++i; } for (RelNode input : node.getInputs()) { assert values.get(i).right == input; ++i; } for (RexNode expr : node.getChildExps()) { assert values.get(i).right == expr; ++i; } final List<Pair<String, Object>> valuesCopy = ImmutableList.copyOf(values); values.clear(); explain_(node, valuesCopy); pw.flush(); return this; }
public static boolean isProjectFlatten(RelNode project) { assert project instanceof Project : "Rel is NOT an instance of project!"; for (RexNode rex : project.getChildExps()) { RexNode newExpr = rex; if (rex instanceof RexCall) { RexCall function = (RexCall) rex; String functionName = function.getOperator().getName(); if (functionName.equalsIgnoreCase("flatten") ) { return true; } } } return false; }
@SuppressWarnings("deprecation") public RelWriter done(RelNode node) { int i = 0; if (values.size() > 0 && values.get(0).left.equals("subset")) { ++i; } for (RelNode input : node.getInputs()) { assert values.get(i).right == input; ++i; } for (RexNode expr : node.getChildExps()) { assert values.get(i).right == expr; ++i; } final List<Pair<String, Object>> valuesCopy = ImmutableList.copyOf(values); values.clear(); explain_(node, valuesCopy); pw.flush(); return this; }
@Override public RelWriter explainTerms(RelWriter pw) { super.explainTerms(pw); List<RexNode> childExprs = input.getChildExps(); List<String> convertFields = Lists.transform(conversions, new Function<ConversionColumn, String>() { @Override public String apply(ConversionColumn input) { return input.getInputField(); } }); for (Ord<RelDataTypeField> field : Ord.zip(rowType.getFieldList())) { String fieldName = field.e.getName(); if (fieldName == null) { fieldName = "field#" + field.i; } if (convertFields.contains(fieldName)) { pw.item(fieldName, "CONVERT(" + fieldName + ")"); } else { pw.item(fieldName, childExprs.get(field.i)); } } pw.item("conversions", conversions); return pw; } }
List<RexNode> childExprs = curRel.getChildExps(); if (childExprs != null && childExprs.size() > 0) { if (childExprs.get(curIndex) instanceof RexInputRef) {
private static void replaceEmptyGroupAggr(final RelNode rel, RelNode parent) { // If this function is called, the parent should only include constant List<RexNode> exps = parent.getChildExps(); for (RexNode rexNode : exps) { if (!rexNode.accept(new HiveCalciteUtil.ConstantFinder())) { throw new RuntimeException("We expect " + parent.toString() + " to contain only constants. However, " + rexNode.toString() + " is " + rexNode.getKind()); } } HiveAggregate oldAggRel = (HiveAggregate) rel; RelDataTypeFactory typeFactory = oldAggRel.getCluster().getTypeFactory(); RelDataType longType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, typeFactory); RelDataType intType = TypeConverter.convert(TypeInfoFactory.intTypeInfo, typeFactory); // Create the dummy aggregation. SqlAggFunction countFn = SqlFunctionConverter.getCalciteAggFn("count", ImmutableList.of(intType), longType); // TODO: Using 0 might be wrong; might need to walk down to find the // proper index of a dummy. List<Integer> argList = ImmutableList.of(0); AggregateCall dummyCall = new AggregateCall(countFn, false, argList, longType, null); Aggregate newAggRel = oldAggRel.copy(oldAggRel.getTraitSet(), oldAggRel.getInput(), oldAggRel.indicator, oldAggRel.getGroupSet(), oldAggRel.getGroupSets(), ImmutableList.of(dummyCall)); RelNode select = introduceDerivedTable(newAggRel); parent.replaceInput(0, select); } }