@Override public RelNode createJoin(RelNode left, RelNode right, RexNode condition, Set<CorrelationId> variablesSet, JoinRelType joinType, boolean semiJoinDone) { // According to calcite, it is going to be removed before Calcite-2.0 // TODO: to handle CorrelationId return HiveJoin.getJoin(left.getCluster(), left, right, condition, joinType); } }
public static DruidUnionRel create( final QueryMaker queryMaker, final RelDataType rowType, final List<RelNode> rels, final int limit ) { Preconditions.checkState(rels.size() > 0, "rels must be nonempty"); return new DruidUnionRel( rels.get(0).getCluster(), rels.get(0).getTraitSet(), queryMaker, rowType, new ArrayList<>(rels), limit ); }
@Override public RelNode createAggregate(RelNode child, boolean indicator, ImmutableBitSet groupSet, ImmutableList<ImmutableBitSet> groupSets, List<AggregateCall> aggCalls) { if (indicator) { throw new IllegalStateException("Hive does not support indicator columns but Calcite " + "created an Aggregate operator containing them"); } return new HiveAggregate(child.getCluster(), child.getTraitSet(), child, groupSet, groupSets, aggCalls); } }
public static boolean isFittingIntoMemory(Double maxSize, RelNode input, int buckets) { final RelMetadataQuery mq = input.getCluster().getMetadataQuery(); Double currentMemory = mq.cumulativeMemoryWithinPhase(input); if (currentMemory != null) { if(currentMemory / buckets > maxSize) { return false; } return true; } return false; }
@Override public RelNode createFilter(RelNode child, RexNode condition) { RelOptCluster cluster = child.getCluster(); HiveFilter filter = new HiveFilter(cluster, TraitsUtil.getDefaultTraitSet(cluster), child, condition); return filter; } }
@Override public RelNode createSemiJoin(RelNode left, RelNode right, RexNode condition) { final JoinInfo joinInfo = JoinInfo.of(left, right, condition); final RelOptCluster cluster = left.getCluster(); return HiveSemiJoin.getSemiJoin(cluster, left.getTraitSet(), left, right, condition, joinInfo.leftKeys, joinInfo.rightKeys); } }
public static PartialDruidQuery create(final RelNode scanRel) { final Supplier<RelBuilder> builderSupplier = () -> RelFactories.LOGICAL_BUILDER.create( scanRel.getCluster(), scanRel.getTable().getRelOptSchema() ); return new PartialDruidQuery(builderSupplier, scanRel, null, null, null, null, null, null, null, null); }
@Override public RelNode run( RelOptPlanner planner, RelNode rel, RelTraitSet requiredOutputTraits, List<RelOptMaterialization> materializations, List<RelOptLattice> lattices ) { final RelNode decorrelatedRel = RelDecorrelator.decorrelateQuery(rel); final RelBuilder relBuilder = RelFactories.LOGICAL_BUILDER.create(decorrelatedRel.getCluster(), null); return new RelFieldTrimmer(null, relBuilder).trim(decorrelatedRel); } }
private PlannerResult planExplanation( final RelNode rel, final SqlExplain explain, final Set<String> datasourceNames ) { final String explanation = RelOptUtil.dumpPlan("", rel, explain.getFormat(), explain.getDetailLevel()); final Supplier<Sequence<Object[]>> resultsSupplier = Suppliers.ofInstance( Sequences.simple(ImmutableList.of(new Object[]{explanation}))); final RelDataTypeFactory typeFactory = rel.getCluster().getTypeFactory(); return new PlannerResult( resultsSupplier, typeFactory.createStructType( ImmutableList.of(Calcites.createSqlType(typeFactory, SqlTypeName.VARCHAR)), ImmutableList.of("PLAN") ), datasourceNames ); } }
@Override public RelNode createProject(RelNode child, List<? extends RexNode> childExprs, List<String> fieldNames) { RelOptCluster cluster = child.getCluster(); RelDataType rowType = RexUtil.createStructType(cluster.getTypeFactory(), childExprs, fieldNames); RelTraitSet trait = TraitsUtil.getDefaultTraitSet(cluster, child.getTraitSet()); RelNode project = HiveProject.create(cluster, child, childExprs, rowType, trait, Collections.<RelCollation> emptyList()); return project; } }
public boolean isBucketedInput() { final RelMetadataQuery mq = this.getInput().getCluster().getMetadataQuery(); return mq.distribution(this).getKeys(). containsAll(groupSet.asList()); }
public static Double getDistinctRowCount(RelNode r, RelMetadataQuery mq, int indx) { ImmutableBitSet bitSetOfRqdProj = ImmutableBitSet.of(indx); return mq.getDistinctRowCount(r, bitSetOfRqdProj, r .getCluster().getRexBuilder().makeLiteral(true)); }
private static RelNode introduceDerivedTable(final RelNode rel) { List<RexNode> projectList = HiveCalciteUtil.getProjsFromBelowAsInputRef(rel); HiveProject select = HiveProject.create(rel.getCluster(), rel, projectList, rel.getRowType(), rel.getCollationList()); return select; }
/** * utility function to extract timezone id from Druid query * @param query Druid Rel * @return time zone */ private static TimeZone timezoneId(final DruidQuery query, final RexNode arg) { return arg.getType().getSqlTypeName() == SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE ? TimeZone.getTimeZone( query.getTopNode().getCluster().getPlanner().getContext().unwrap(CalciteConnectionConfig.class).timeZone()) : TimeZone.getTimeZone("UTC"); }
@Override public RexNode apply(RelDataTypeField field) { return rel.getCluster().getRexBuilder().makeInputRef(field.getType(), field.getIndex()); } });
private static ExprNodeDesc convertToExprNode(RexNode rn, RelNode inputRel, String tabAlias, Set<Integer> vcolsInCalcite) { return rn.accept(new ExprNodeConverter(tabAlias, inputRel.getRowType(), vcolsInCalcite, inputRel.getCluster().getTypeFactory(), true)); }
@Override public RelNode convert(RelNode rel) { RelTraitSet newTraitSet = rel.getTraitSet().replace(getOutConvention()); return new OLAPToEnumerableConverter(rel.getCluster(), newTraitSet, rel); }
@Override public RelNode convert(RelNode rel) { final Union union = (Union) rel; final RelTraitSet traitSet = union.getTraitSet().replace(OLAPRel.CONVENTION); final List<RelNode> inputs = union.getInputs(); return new OLAPUnionRel(rel.getCluster(), traitSet, convertList(inputs, OLAPRel.CONVENTION), union.all); } }
@Override public RelNode convert(RelNode rel) { final Sort sort = (Sort) rel; if (sort.offset != null || sort.fetch != null) { return null; } final RelTraitSet traitSet = sort.getTraitSet().replace(OLAPRel.CONVENTION); final RelNode input = sort.getInput(); return new OLAPSortRel(rel.getCluster(), traitSet, convert(input, input.getTraitSet().replace(OLAPRel.CONVENTION)), sort.getCollation(), sort.offset, sort.fetch); }
@Override public RelNode convert(RelNode rel) { final Window window = (Window) rel; final RelTraitSet traitSet = window.getTraitSet().replace(OLAPRel.CONVENTION); final RelNode input = window.getInput(); return new OLAPWindowRel(rel.getCluster(), traitSet, convert(input, input.getTraitSet().replace(OLAPRel.CONVENTION)), window.constants, window.getRowType(), window.groups); } }