private static boolean isThreeArgCase(final RexNode rexNode) { return rexNode.getKind() == SqlKind.CASE && ((RexCall) rexNode).getOperands().size() == 3; } }
private BinaryTupleExpression getBinaryTupleExpression(RexCall call, TupleExpression.ExpressionOperatorEnum op) { assert call.operands.size() == 2; TupleExpression left = call.operands.get(0).accept(this); TupleExpression right = call.operands.get(1).accept(this); BinaryTupleExpression tuple = new BinaryTupleExpression(op, Lists.newArrayList(left, right)); tuple.setDigest(call.toString()); return tuple; }
/** Creates a call to an aggregate function. */ public AggCall aggregateCall(SqlAggFunction aggFunction, boolean distinct, RexNode filter, String alias, Iterable<? extends RexNode> operands) { if (filter != null) { if (filter.getType().getSqlTypeName() != SqlTypeName.BOOLEAN) { throw Static.RESOURCE.filterMustBeBoolean().ex(); } if (filter.getType().isNullable()) { filter = call(SqlStdOperatorTable.IS_TRUE, filter); } } return new AggCallImpl(aggFunction, distinct, filter, alias, ImmutableList.copyOf(operands)); }
private static RexCall adjustOBSchema(RexCall obyExpr, Project obChild, List<FieldSchema> resultSchema) { int a = -1; List<RexNode> operands = new ArrayList<>(); for (int k = 0; k < obyExpr.operands.size(); k++) { RexNode rn = obyExpr.operands.get(k); for (int j = 0; j < resultSchema.size(); j++) { if( obChild.getChildExps().get(j).toString().equals(rn.toString())) { a = j; break; } } if (a != -1) { operands.add(new RexInputRef(a, rn.getType())); } else { if (rn instanceof RexCall) { operands.add(adjustOBSchema((RexCall)rn, obChild, resultSchema)); } else { operands.add(rn); } } a = -1; } return (RexCall) obChild.getCluster().getRexBuilder().makeCall( obyExpr.getType(), obyExpr.getOperator(), operands); }
/** * Translates "condition" to a Druid filter, assuming it does not contain any boolean expressions. Returns null * if we cannot translate the condition. * * @param plannerContext planner context * @param rowSignature row signature of the dataSource to be filtered * @param rexNode Calcite row expression */ @Nullable private static DimFilter toLeafFilter( final PlannerContext plannerContext, final RowSignature rowSignature, final RexNode rexNode ) { if (rexNode.isAlwaysTrue()) { return Filtration.matchEverything(); } else if (rexNode.isAlwaysFalse()) { return Filtration.matchNothing(); } final DimFilter simpleFilter = toSimpleLeafFilter(plannerContext, rowSignature, rexNode); return simpleFilter != null ? simpleFilter : toExpressionLeafFilter(plannerContext, rowSignature, rexNode); }
@Nullable @Override public String toDruidExpression(RexNode rexNode, RelDataType rowType, DruidQuery query ) { final RexCall call = (RexCall) rexNode; final String arg0 = DruidExpressions.toDruidExpression(call.getOperands().get(0), rowType, query); if (arg0 == null) { return null; } if (SqlTypeUtil.isDatetime((call.getOperands().get(0).getType()))) { // Timestamp is represented as long internally no need to any thing here return DruidExpressions.functionCall("div", ImmutableList.of(arg0, DruidExpressions.numberLiteral(1000))); } // dealing with String type final String format = call.getOperands().size() == 2 ? DruidExpressions .toDruidExpression(call.getOperands().get(1), rowType, query) : DEFAULT_TS_FORMAT; return DruidExpressions .functionCall("unix_timestamp", ImmutableList.of(arg0, DruidExpressions.stringLiteral(format))); } }
final RexBuilder rexBuilder = aggregate.getCluster().getRexBuilder(); final List<AggregateCall> newCalls = new ArrayList<>(aggregate.getAggCallList().size()); final List<RexNode> newProjects = new ArrayList<>(project.getChildExps()); final List<RexNode> newCasts = new ArrayList<>(aggregate.getGroupCount() + aggregate.getAggCallList().size()); final RelDataTypeFactory typeFactory = aggregate.getCluster().getTypeFactory(); newCasts.add(rexBuilder.makeInputRef(project.getChildExps().get(fieldNumber).getType(), fieldNumber)); final boolean flip = RexLiteral.isNullLiteral(caseCall.getOperands().get(1)) && !RexLiteral.isNullLiteral(caseCall.getOperands().get(2)); final RexNode arg1 = caseCall.getOperands().get(flip ? 2 : 1); final RexNode filterFromCase = rexBuilder.makeCall( booleanType, flip ? SqlStdOperatorTable.IS_FALSE : SqlStdOperatorTable.IS_TRUE, ImmutableList.of(caseCall.getOperands().get(0)) ); && arg1.isA(SqlKind.LITERAL) && !RexLiteral.isNullLiteral(arg1) && RexLiteral.isNullLiteral(arg2)) { final RelDataType oldType = aggregate.getRowType().getFieldList().get(i).getType();
private static void replaceEmptyGroupAggr(final RelNode rel, RelNode parent) { // If this function is called, the parent should only include constant List<RexNode> exps = parent.getChildExps(); for (RexNode rexNode : exps) { if (!rexNode.accept(new HiveCalciteUtil.ConstantFinder())) { throw new RuntimeException("We expect " + parent.toString() + " to contain only constants. However, " + rexNode.toString() + " is " + rexNode.getKind()); } } HiveAggregate oldAggRel = (HiveAggregate) rel; RelDataTypeFactory typeFactory = oldAggRel.getCluster().getTypeFactory(); RelDataType longType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, typeFactory); RelDataType intType = TypeConverter.convert(TypeInfoFactory.intTypeInfo, typeFactory); // Create the dummy aggregation. SqlAggFunction countFn = SqlFunctionConverter.getCalciteAggFn("count", false, ImmutableList.of(intType), longType); // TODO: Using 0 might be wrong; might need to walk down to find the // proper index of a dummy. List<Integer> argList = ImmutableList.of(0); AggregateCall dummyCall = new AggregateCall(countFn, false, argList, longType, null); Aggregate newAggRel = oldAggRel.copy(oldAggRel.getTraitSet(), oldAggRel.getInput(), oldAggRel.indicator, oldAggRel.getGroupSet(), oldAggRel.getGroupSets(), ImmutableList.of(dummyCall)); RelNode select = introduceDerivedTable(newAggRel); parent.replaceInput(0, select); } }
this.joinRel = joinRel; this.isSemiJoin = isSemiJoin; nFieldsLeft = joinRel.getLeft().getRowType().getFieldList().size(); nFieldsRight = joinRel.getRight().getRowType().getFieldList().size(); nSysFields = joinRel.getSystemFieldList().size(); leftFieldsBitSet = ImmutableBitSet.range(nSysFields, Mappings.TargetMapping leftMapping = Mappings.createShiftMapping( nSysFields + nFieldsLeft, nSysFields, 0, nFieldsLeft); leftChildPredicates = lPreds.accept( new RexPermuteInputsShuttle(leftMapping, joinRel.getInput(0))); exprFields.put(r.toString(), RelOptUtil.InputFinder.bits(r)); allExprsDigests.add(r.toString()); nSysFields + nFieldsLeft + nFieldsRight, nSysFields + nFieldsLeft, 0, nFieldsRight); rightChildPredicates = rPreds.accept( new RexPermuteInputsShuttle(rightMapping, joinRel.getInput(1))); exprFields.put(r.toString(), RelOptUtil.InputFinder.bits(r)); allExprsDigests.add(r.toString()); RexBuilder rexBuilder = joinRel.getCluster().getRexBuilder(); List<RexNode> exprs = RelOptUtil.conjunctions( compose(rexBuilder, ImmutableList.of(joinRel.getCondition())));
private List<RexNode> extractFilterPreds(Filter filterOp) { List<RexNode> conjs = new ArrayList<>(); for (RexNode r : HiveRelOptUtil.conjunctions(filterOp.getCondition())) { if (r.getKind() == SqlKind.IS_NOT_NULL) { RexCall isNotNullNode = (RexCall) r; if (RexUtil.isReferenceOrAccess(isNotNullNode.getOperands().get(0), true)) { ImmutableBitSet ref = RelOptUtil.InputFinder.bits(isNotNullNode); RelColumnOrigin co = mq.getColumnOrigin(filterOp, ref.nextSetBit(0)); if (co == null) { // We add it back conjs.add(r); continue; } RelOptHiveTable table = (RelOptHiveTable) co.getOriginTable(); List<ColStatistics> colStats = table.getColStat(ImmutableList.of(co.getOriginColumnOrdinal()), true); if (colStats == null || colStats.isEmpty() || colStats.get(0).getNumNulls() != 0) { // We add it back conjs.add(r); } } } else { conjs.add(r); } } return conjs; }
project.getProjects(), project.getInput().getRowType()).inverse(); Set<Integer> needed = new HashSet<>(); for (RelFieldCollation fc : sort.getCollation().getFieldCollations()) { needed.add(fc.getFieldIndex()); final RexNode node = project.getProjects().get(map.getTarget(fc.getFieldIndex())); if (node.isA(SqlKind.CAST)) { RexCallBinding.create(cluster.getTypeFactory(), cast, ImmutableList.of(RexUtil.apply(map, sort.getCollation()))); if (cast.getOperator().getMonotonicity(binding) == SqlMonotonicity.NOT_MONOTONIC) { return; RelTraitSet traitSet = sort.getCluster().traitSetOf(HiveRelNode.CONVENTION); RelCollation newCollation = traitSet.canonize(RelCollationImpl.of(fieldCollations)); final RelNode newProject = project.copy(sort.getInput().getTraitSet(), ImmutableList.<RelNode>of(sort.getInput())); final HiveSortLimit newSort = sort.copy(newProject.getTraitSet(), newProject, newCollation, sort.offset, sort.fetch);
); if (rexNode.getType().isNullable()) { final DimFilter nonNullFilter = Expressions.toFilter( plannerContext, rowSignature, rexBuilder.makeCall(SqlStdOperatorTable.IS_NOT_NULL, ImmutableList.of(rexNode)) );
aggregatorFactory = new HyperUniquesAggregatorFactory(aggregatorName, arg.getDirectColumn(), false, true); } else { final SqlTypeName sqlTypeName = rexNode.getType().getSqlTypeName(); final ValueType inputType = Calcites.getValueTypeForSqlTypeName(sqlTypeName); if (inputType == null) { aggregatorName, null, ImmutableList.of(dimensionSpec), false, true
public static HiveTableFunctionScan createUDTFForSetOp(RelOptCluster cluster, RelNode input) throws SemanticException { RelTraitSet traitSet = TraitsUtil.getDefaultTraitSet(cluster); List<RexNode> originalInputRefs = Lists.transform(input.getRowType().getFieldList(), new Function<RelDataTypeField, RexNode>() { @Override public RexNode apply(RelDataTypeField input) { return new RexInputRef(input.getIndex(), input.getType()); } }); ImmutableList.Builder<RelDataType> argTypeBldr = ImmutableList.<RelDataType> builder(); for (int i = 0; i < originalInputRefs.size(); i++) { argTypeBldr.add(originalInputRefs.get(i).getType()); } RelDataType retType = input.getRowType(); String funcName = "replicate_rows"; FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcName); SqlOperator calciteOp = SqlFunctionConverter.getCalciteOperator(funcName, fi.getGenericUDTF(), argTypeBldr.build(), retType); // Hive UDTF only has a single input List<RelNode> list = new ArrayList<>(); list.add(input); RexNode rexNode = cluster.getRexBuilder().makeCall(calciteOp, originalInputRefs); return HiveTableFunctionScan.create(cluster, traitSet, list, rexNode, null, retType, null); }
RexBuilder rexBuilder = filterRel.getCluster().getRexBuilder(); final RelBuilder relBuilder = call.builder(); List<RelDataTypeField> origFields = setOp.getRowType().getFieldList(); int[] adjustments = new int[origFields.size()]; final List<RelNode> newSetOpInputs = new ArrayList<>(); for (int index = 0; index < setOp.getInputs().size(); index++) { RelNode input = setOp.getInput(index); RexNode newCondition = condition.accept(new RelOptUtil.RexInputConverter(rexBuilder, origFields, input.getRowType().getFieldList(), adjustments)); if (setOp instanceof Union && setOp.all) { final RelMetadataQuery mq = call.getMetadataQuery(); final RelOptPredicateList predicates = mq.getPulledUpPredicates(input); if (predicates != null) { ImmutableList.Builder<RexNode> listBuilder = ImmutableList.builder(); listBuilder.addAll(predicates.pulledUpPredicates); listBuilder.add(newCondition); RexExecutor executor = Util.first(filterRel.getCluster().getPlanner().getExecutor(), RexUtil.EXECUTOR); final RexSimplify simplify = new RexSimplify(rexBuilder, true, executor); final RexNode x = simplify.simplifyAnds(listBuilder.build()); if (x.isAlwaysFalse()) {
private RexNode transformIntoInClauseCondition(RexBuilder rexBuilder, RexNode condition, int minNumORClauses) throws SemanticException { assert condition.getKind() == SqlKind.OR; ImmutableList<RexNode> operands = RexUtil.flattenOr(((RexCall) condition).getOperands()); if (operands.size() < minNumORClauses) { for (int i = 0; i < operands.size(); i++) { ConstraintGroup m = new ConstraintGroup(operands.get(i)); allNodes.add(m); return ops.get(0); } else { return rexBuilder.makeCall(SqlStdOperatorTable.OR, ops);
private static List<RexNode> extractCommonOperands(RexBuilder rexBuilder, RelNode input, RexNode condition, int maxCNFNodeCount) { assert condition.getKind() == SqlKind.OR; Multimap<String, RexNode> reductionCondition = LinkedHashMultimap.create(); ImmutableList<RexNode> operands = RexUtil.flattenOr(((RexCall) condition).getOperands()); for (int i = 0; i < operands.size(); i++) { final RexNode operand = operands.get(i); RexNode ref = rexBuilder.makeInputRef(input, refs.iterator().next()); String stringRef = ref.toString(); reductionCondition.put(stringRef, conjCall); refsInCurrentOperand.add(stringRef);
RexNode reduced = reduceCall(literal, SqlKind.EQUALS, max, min); if (reduced != null) { if (reduced.isAlwaysTrue()) { return rexBuilder.makeLiteral(true); return rexBuilder.makeLiteral(false); return rexBuilder.makeCall(HiveIn.INSTANCE, newOperands); } else if (call.getOperands().get(0).getKind() == SqlKind.ROW) { RexCall struct = (RexCall) call.getOperands().get(0); maxMinStats.get(j).left, maxMinStats.get(j).right); if (reduced != null) { if (reduced.isAlwaysFalse()) { allTrue = false; addOperand = false;