public static RelTraitSet getDefaultTraitSet(RelOptCluster cluster) { return cluster.traitSetOf(HiveRelNode.CONVENTION, RelCollations.EMPTY); }
protected FlowFileTableScan(final RelOptCluster cluster, final RelOptTable table, final FlowFileTable<?, ?> flowFileTable, final int[] fields) { super(cluster, cluster.traitSetOf(EnumerableConvention.INSTANCE), table); this.flowFileTable = flowFileTable; this.fields = fields; }
public static RelTraitSet getDefaultTraitSet(RelOptCluster cluster) { return cluster.traitSetOf(HiveRelNode.CONVENTION, RelCollations.EMPTY); }
@Override public EnumerableRel implementEnumerable(List<EnumerableRel> inputs) { if (this.hasSubQuery) { try { return constr.newInstance(getCluster(), getCluster().traitSetOf(EnumerableConvention.INSTANCE), // inputs.get(0), inputs.get(1), condition, leftKeys, rightKeys, variablesSet, joinType); } catch (Exception e) { throw new IllegalStateException("Can't create EnumerableJoin!", e); } } else { return this; } }
private RelNode genFilterRelNode(QB qb, ASTNode searchCond, RelNode srcRel, Map<String, RelNode> aliasToRel, ImmutableMap<String, Integer> outerNameToPosMap, RowResolver outerRR, boolean forHavingClause) throws SemanticException { Map<ASTNode, RelNode> subQueryToRelNode = new HashMap<>(); boolean isSubQuery = genSubQueryRelNode(qb, searchCond, srcRel, forHavingClause, subQueryToRelNode); if(isSubQuery) { ExprNodeDesc subQueryExpr = genExprNodeDesc(searchCond, relToHiveRR.get(srcRel), outerRR, subQueryToRelNode, forHavingClause); ImmutableMap<String, Integer> hiveColNameCalcitePosMap = this.relToHiveColNameCalcitePosMap .get(srcRel); RexNode convertedFilterLHS = new RexNodeConverter(cluster, srcRel.getRowType(), outerNameToPosMap, hiveColNameCalcitePosMap, relToHiveRR.get(srcRel), outerRR, 0, true, subqueryId).convert(subQueryExpr); RelNode filterRel = new HiveFilter(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), srcRel, convertedFilterLHS); this.relToHiveColNameCalcitePosMap.put(filterRel, this.relToHiveColNameCalcitePosMap .get(srcRel)); relToHiveRR.put(filterRel, relToHiveRR.get(srcRel)); return filterRel; } else { return genFilterRelNode(searchCond, srcRel, outerNameToPosMap, outerRR, forHavingClause); } }
public OLAPTableScan(RelOptCluster cluster, RelOptTable table, OLAPTable olapTable, int[] fields) { super(cluster, cluster.traitSetOf(OLAPRel.CONVENTION), table); this.olapTable = olapTable; this.fields = fields; this.tableName = olapTable.getTableName(); this.rowType = getRowType(); this.kylinConfig = KylinConfig.getInstanceFromEnv(); }
@Override public EnumerableRel implementEnumerable(List<EnumerableRel> inputs) { try { return new EnumerableAggregate(getCluster(), getCluster().traitSetOf(EnumerableConvention.INSTANCE), // sole(inputs), indicator, this.groupSet, this.groupSets, rewriteAggCalls); } catch (InvalidRelException e) { throw new IllegalStateException("Can't create EnumerableAggregate!", e); } }
.createSqlType(SqlTypeName.BIGINT), HiveGroupingID.INSTANCE.getName()); aggregateCalls.add(aggCall); return new HiveAggregate(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), aggregate.getInput(), groupSet, origGroupSets, aggregateCalls);
private RelNode genFilterRelNode(QB qb, ASTNode searchCond, RelNode srcRel, Map<String, RelNode> aliasToRel, ImmutableMap<String, Integer> outerNameToPosMap, RowResolver outerRR, boolean forHavingClause) throws SemanticException { Map<ASTNode, RelNode> subQueryToRelNode = new HashMap<>(); boolean isSubQuery = genSubQueryRelNode(qb, searchCond, srcRel, forHavingClause, subQueryToRelNode); if(isSubQuery) { ExprNodeDesc subQueryExpr = genExprNodeDesc(searchCond, relToHiveRR.get(srcRel), outerRR, subQueryToRelNode, forHavingClause); ImmutableMap<String, Integer> hiveColNameCalcitePosMap = this.relToHiveColNameCalcitePosMap .get(srcRel); RexNode convertedFilterLHS = new RexNodeConverter(cluster, srcRel.getRowType(), outerNameToPosMap, hiveColNameCalcitePosMap, relToHiveRR.get(srcRel), outerRR, 0, true, subqueryId).convert(subQueryExpr); RelNode filterRel = new HiveFilter(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), srcRel, convertedFilterLHS); this.relToHiveColNameCalcitePosMap.put(filterRel, this.relToHiveColNameCalcitePosMap .get(srcRel)); relToHiveRR.put(filterRel, relToHiveRR.get(srcRel)); this.subqueryId++; return filterRel; } else { return genFilterRelNode(searchCond, srcRel, outerNameToPosMap, outerRR, forHavingClause); } }
/** * Create a DruidQueryRel representing a full scan. */ public static DruidQueryRel fullScan( final LogicalTableScan scanRel, final RelOptTable table, final DruidTable druidTable, final QueryMaker queryMaker ) { return new DruidQueryRel( scanRel.getCluster(), scanRel.getCluster().traitSetOf(Convention.NONE), table, druidTable, queryMaker, PartialDruidQuery.create(scanRel) ); }
RexNode factoredFilterExpr = RexUtil .pullFactors(cluster.getRexBuilder(), convertedFilterExpr); RelNode filterRel = new HiveFilter(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), srcRel, factoredFilterExpr); this.relToHiveColNameCalcitePosMap.put(filterRel, hiveColNameCalcitePosMap);
private RelNode createFirstGB(RelNode input, boolean left, RelOptCluster cluster, RexBuilder rexBuilder) throws CalciteSemanticException { final List<RexNode> gbChildProjLst = Lists.newArrayList(); final List<Integer> groupSetPositions = Lists.newArrayList(); for (int cInd = 0; cInd < input.getRowType().getFieldList().size(); cInd++) { gbChildProjLst.add(rexBuilder.makeInputRef(input, cInd)); groupSetPositions.add(cInd); } if (left) { gbChildProjLst.add(rexBuilder.makeBigintLiteral(new BigDecimal(2))); } else { gbChildProjLst.add(rexBuilder.makeBigintLiteral(new BigDecimal(1))); } // also add the last VCol groupSetPositions.add(input.getRowType().getFieldList().size()); // create the project before GB RelNode gbInputRel = HiveProject.create(input, gbChildProjLst, null); // groupSetPosition includes all the positions final ImmutableBitSet groupSet = ImmutableBitSet.of(groupSetPositions); List<AggregateCall> aggregateCalls = Lists.newArrayList(); RelDataType aggFnRetType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory()); AggregateCall aggregateCall = HiveCalciteUtil.createSingleArgAggCall("count", cluster, TypeInfoFactory.longTypeInfo, input.getRowType().getFieldList().size(), aggFnRetType); aggregateCalls.add(aggregateCall); return new HiveAggregate(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), gbInputRel, groupSet, null, aggregateCalls); }
private RelNode copyNodeScan(RelNode scan) { final RelNode newScan; if (scan instanceof DruidQuery) { final DruidQuery dq = (DruidQuery) scan; // Ideally we should use HiveRelNode convention. However, since Volcano planner // throws in that case because DruidQuery does not implement the interface, // we set it as Bindable. Currently, we do not use convention in Hive, hence that // should be fine. // TODO: If we want to make use of convention (e.g., while directly generating operator // tree instead of AST), this should be changed. newScan = DruidQuery.create(optCluster, optCluster.traitSetOf(BindableConvention.INSTANCE), scan.getTable(), dq.getDruidTable(), ImmutableList.<RelNode>of(dq.getTableScan())); } else { newScan = new HiveTableScan(optCluster, optCluster.traitSetOf(HiveRelNode.CONVENTION), (RelOptHiveTable) scan.getTable(), ((RelOptHiveTable) scan.getTable()).getName(), null, false, false); } return newScan; } }
private RelNode genLimitLogicalPlan(QB qb, RelNode srcRel) throws SemanticException { HiveRelNode sortRel = null; QBParseInfo qbp = getQBParseInfo(qb); SimpleEntry<Integer,Integer> entry = qbp.getDestToLimit().get(qbp.getClauseNames().iterator().next()); Integer offset = (entry == null) ? 0 : entry.getKey(); Integer fetch = (entry == null) ? null : entry.getValue(); if (fetch != null) { RexNode offsetRN = cluster.getRexBuilder().makeExactLiteral(BigDecimal.valueOf(offset)); RexNode fetchRN = cluster.getRexBuilder().makeExactLiteral(BigDecimal.valueOf(fetch)); RelTraitSet traitSet = cluster.traitSetOf(HiveRelNode.CONVENTION); RelCollation canonizedCollation = traitSet.canonize(RelCollations.EMPTY); sortRel = new HiveSortLimit(cluster, traitSet, srcRel, canonizedCollation, offsetRN, fetchRN); RowResolver inputRR = relToHiveRR.get(srcRel); RowResolver outputRR = inputRR.duplicate(); ImmutableMap<String, Integer> hiveColNameCalcitePosMap = buildHiveToCalciteColumnMap( outputRR, sortRel); relToHiveRR.put(sortRel, outputRR); relToHiveColNameCalcitePosMap.put(sortRel, hiveColNameCalcitePosMap); } return sortRel; }
/** Creates an OLAPValuesRel. */ public static OLAPValuesRel create(RelOptCluster cluster, final RelDataType rowType, final ImmutableList<ImmutableList<RexLiteral>> tuples) { final RelMetadataQuery mq = cluster.getMetadataQuery(); final RelTraitSet traitSet = cluster.traitSetOf(OLAPRel.CONVENTION) .replaceIfs(RelCollationTraitDef.INSTANCE, new Supplier<List<RelCollation>>() { public List<RelCollation> get() { return RelMdCollation.values(mq, rowType, tuples); } }).replaceIf(RelDistributionTraitDef.INSTANCE, new Supplier<RelDistribution>() { public RelDistribution get() { return RelMdDistribution.values(rowType, tuples); } }); return new OLAPValuesRel(cluster, rowType, tuples, traitSet); }
@Override public EnumerableRel implementEnumerable(List<EnumerableRel> inputs) { return new EnumerableSort(getCluster(), getCluster().traitSetOf(EnumerableConvention.INSTANCE).replace(collation), // sole(inputs), collation, offset, fetch); }
@Override public RelOptMaterialization apply(RelOptMaterialization materialization) { final RelNode viewScan = materialization.tableRel; final RelNode newViewScan; if (viewScan instanceof DruidQuery) { final DruidQuery dq = (DruidQuery) viewScan; newViewScan = DruidQuery.create(optCluster, optCluster.traitSetOf(HiveRelNode.CONVENTION), viewScan.getTable(), dq.getDruidTable(), ImmutableList.<RelNode>of(dq.getTableScan())); } else { newViewScan = new HiveTableScan(optCluster, optCluster.traitSetOf(HiveRelNode.CONVENTION), (RelOptHiveTable) viewScan.getTable(), viewScan.getTable().getQualifiedName().get(0), null, false, false); } return new RelOptMaterialization(newViewScan, materialization.queryRel, null); } }
private RelNode genLimitLogicalPlan(QB qb, RelNode srcRel) throws SemanticException { HiveRelNode sortRel = null; QBParseInfo qbp = getQBParseInfo(qb); SimpleEntry<Integer,Integer> entry = qbp.getDestToLimit().get(qbp.getClauseNames().iterator().next()); Integer offset = (entry == null) ? 0 : entry.getKey(); Integer fetch = (entry == null) ? null : entry.getValue(); if (fetch != null) { RexNode offsetRN = cluster.getRexBuilder().makeExactLiteral(BigDecimal.valueOf(offset)); RexNode fetchRN = cluster.getRexBuilder().makeExactLiteral(BigDecimal.valueOf(fetch)); RelTraitSet traitSet = cluster.traitSetOf(HiveRelNode.CONVENTION); RelCollation canonizedCollation = traitSet.canonize(RelCollations.EMPTY); sortRel = new HiveSortLimit(cluster, traitSet, srcRel, canonizedCollation, offsetRN, fetchRN); RowResolver outputRR = new RowResolver(); if (!RowResolver.add(outputRR, relToHiveRR.get(srcRel))) { throw new CalciteSemanticException( "Duplicates detected when adding columns to RR: see previous message", UnsupportedFeature.Duplicates_in_RR); } ImmutableMap<String, Integer> hiveColNameCalcitePosMap = buildHiveToCalciteColumnMap( outputRR, sortRel); relToHiveRR.put(sortRel, outputRR); relToHiveColNameCalcitePosMap.put(sortRel, hiveColNameCalcitePosMap); } return sortRel; }
@Override public EnumerableRel implementEnumerable(List<EnumerableRel> inputs) { if (getInput() instanceof OLAPFilterRel) { // merge project & filter OLAPFilterRel filter = (OLAPFilterRel) getInput(); RelNode inputOfFilter = inputs.get(0).getInput(0); RexProgram program = RexProgram.create(inputOfFilter.getRowType(), this.rewriteProjects, filter.getCondition(), this.rowType, getCluster().getRexBuilder()); return new EnumerableCalc(getCluster(), getCluster().traitSetOf(EnumerableConvention.INSTANCE), // inputOfFilter, program); } else { // keep project for table scan EnumerableRel input = sole(inputs); RexProgram program = RexProgram.create(input.getRowType(), this.rewriteProjects, null, this.rowType, getCluster().getRexBuilder()); return new EnumerableCalc(getCluster(), getCluster().traitSetOf(EnumerableConvention.INSTANCE), // input, program); } }
@Override public EnumerableRel implementEnumerable(List<EnumerableRel> inputs) { // keep it for having clause RexBuilder rexBuilder = getCluster().getRexBuilder(); RelDataType inputRowType = getInput().getRowType(); RexProgramBuilder programBuilder = new RexProgramBuilder(inputRowType, rexBuilder); programBuilder.addIdentity(); programBuilder.addCondition(this.condition); RexProgram program = programBuilder.getProgram(); return new EnumerableCalc(getCluster(), getCluster().traitSetOf(EnumerableConvention.INSTANCE), // sole(inputs), program); }