@Override public PlanNode replaceChildren(List<PlanNode> newChildren) { return new DistinctLimitNode(getId(), Iterables.getOnlyElement(newChildren), limit, partial, distinctSymbols, hashSymbol); } }
@Override public PlanNode replaceChildren(List<PlanNode> newChildren) { return new DistinctLimitNode(getId(), Iterables.getOnlyElement(newChildren), limit, partial, distinctSymbols, hashSymbol); } }
@Override public PlanNode visitDistinctLimit(DistinctLimitNode node, RewriteContext<Set<Symbol>> context) { Set<Symbol> expectedInputs; if (node.getHashSymbol().isPresent()) { expectedInputs = ImmutableSet.copyOf(concat(node.getDistinctSymbols(), ImmutableList.of(node.getHashSymbol().get()))); } else { expectedInputs = ImmutableSet.copyOf(node.getDistinctSymbols()); } PlanNode source = context.rewrite(node.getSource(), expectedInputs); return new DistinctLimitNode(node.getId(), source, node.getLimit(), node.isPartial(), node.getDistinctSymbols(), node.getHashSymbol()); }
@Override public PlanNode visitDistinctLimit(DistinctLimitNode node, RewriteContext<Set<Symbol>> context) { Set<Symbol> expectedInputs; if (node.getHashSymbol().isPresent()) { expectedInputs = ImmutableSet.copyOf(concat(node.getDistinctSymbols(), ImmutableList.of(node.getHashSymbol().get()))); } else { expectedInputs = ImmutableSet.copyOf(node.getDistinctSymbols()); } PlanNode source = context.rewrite(node.getSource(), expectedInputs); return new DistinctLimitNode(node.getId(), source, node.getLimit(), node.isPartial(), node.getDistinctSymbols(), node.getHashSymbol()); }
@Override @Deprecated public PlanNode visitAggregation(AggregationNode node, RewriteContext<LimitContext> context) { LimitContext limit = context.get(); if (limit != null && node.getAggregations().isEmpty() && node.getOutputSymbols().size() == node.getGroupingKeys().size() && node.getOutputSymbols().containsAll(node.getGroupingKeys())) { PlanNode rewrittenSource = context.rewrite(node.getSource()); return new DistinctLimitNode(idAllocator.getNextId(), rewrittenSource, limit.getCount(), false, rewrittenSource.getOutputSymbols(), Optional.empty()); } PlanNode rewrittenNode = context.defaultRewrite(node); if (limit != null) { // Drop in a LimitNode b/c limits cannot be pushed through aggregations rewrittenNode = new LimitNode(idAllocator.getNextId(), rewrittenNode, limit.getCount(), limit.isPartial()); } return rewrittenNode; }
@Override @Deprecated public PlanNode visitAggregation(AggregationNode node, RewriteContext<LimitContext> context) { LimitContext limit = context.get(); if (limit != null && node.getAggregations().isEmpty() && node.getOutputSymbols().size() == node.getGroupingKeys().size() && node.getOutputSymbols().containsAll(node.getGroupingKeys())) { PlanNode rewrittenSource = context.rewrite(node.getSource()); return new DistinctLimitNode(idAllocator.getNextId(), rewrittenSource, limit.getCount(), false, rewrittenSource.getOutputSymbols(), Optional.empty()); } PlanNode rewrittenNode = context.defaultRewrite(node); if (limit != null) { // Drop in a LimitNode b/c limits cannot be pushed through aggregations rewrittenNode = new LimitNode(idAllocator.getNextId(), rewrittenNode, limit.getCount(), limit.isPartial()); } return rewrittenNode; }
@Override public PlanWithProperties visitDistinctLimit(DistinctLimitNode node, HashComputationSet parentPreference) { // skip hash symbol generation for single bigint if (canSkipHashGeneration(node.getDistinctSymbols())) { return planSimpleNodeWithProperties(node, parentPreference); } Optional<HashComputation> hashComputation = computeHash(node.getDistinctSymbols()); PlanWithProperties child = planAndEnforce( node.getSource(), new HashComputationSet(hashComputation), false, parentPreference.withHashComputation(node, hashComputation)); Symbol hashSymbol = child.getRequiredHashSymbol(hashComputation.get()); // TODO: we need to reason about how pre-computed hashes from child relate to distinct symbols. We should be able to include any precomputed hash // that's functionally dependent on the distinct field in the set of distinct fields of the new node to be able to propagate it downstream. // Currently, such precomputed hashes will be dropped by this operation. return new PlanWithProperties( new DistinctLimitNode(node.getId(), child.getNode(), node.getLimit(), node.isPartial(), node.getDistinctSymbols(), Optional.of(hashSymbol)), ImmutableMap.of(hashComputation.get(), hashSymbol)); }
@Override public PlanWithProperties visitDistinctLimit(DistinctLimitNode node, HashComputationSet parentPreference) { // skip hash symbol generation for single bigint if (canSkipHashGeneration(node.getDistinctSymbols())) { return planSimpleNodeWithProperties(node, parentPreference); } Optional<HashComputation> hashComputation = computeHash(node.getDistinctSymbols()); PlanWithProperties child = planAndEnforce( node.getSource(), new HashComputationSet(hashComputation), false, parentPreference.withHashComputation(node, hashComputation)); Symbol hashSymbol = child.getRequiredHashSymbol(hashComputation.get()); // TODO: we need to reason about how pre-computed hashes from child relate to distinct symbols. We should be able to include any precomputed hash // that's functionally dependent on the distinct field in the set of distinct fields of the new node to be able to propagate it downstream. // Currently, such precomputed hashes will be dropped by this operation. return new PlanWithProperties( new DistinctLimitNode(node.getId(), child.getNode(), node.getLimit(), node.isPartial(), node.getDistinctSymbols(), Optional.of(hashSymbol)), ImmutableMap.of(hashComputation.get(), hashSymbol)); }
@Override public Result apply(LimitNode parent, Captures captures, Context context) { AggregationNode child = captures.get(CHILD); return Result.ofPlanNode( new DistinctLimitNode( parent.getId(), child.getSource(), parent.getCount(), false, child.getGroupingKeys(), child.getHashSymbol())); } }
@Override public Result apply(LimitNode parent, Captures captures, Context context) { AggregationNode child = captures.get(CHILD); return Result.ofPlanNode( new DistinctLimitNode( parent.getId(), child.getSource(), parent.getCount(), false, child.getGroupingKeys(), child.getHashSymbol())); } }
@Override public PlanNode visitDistinctLimit(DistinctLimitNode node, RewriteContext<Void> context) { return new DistinctLimitNode(node.getId(), context.rewrite(node.getSource()), node.getLimit(), node.isPartial(), canonicalizeAndDistinct(node.getDistinctSymbols()), canonicalize(node.getHashSymbol())); }
@Override public PlanNode visitDistinctLimit(DistinctLimitNode node, RewriteContext<Void> context) { return new DistinctLimitNode(node.getId(), context.rewrite(node.getSource()), node.getLimit(), node.isPartial(), canonicalizeAndDistinct(node.getDistinctSymbols()), canonicalize(node.getHashSymbol())); }
@Override public PlanWithProperties visitDistinctLimit(DistinctLimitNode node, PreferredProperties preferredProperties) { PlanWithProperties child = planChild(node, PreferredProperties.any()); if (!child.getProperties().isSingleNode()) { child = withDerivedProperties( gatheringExchange( idAllocator.getNextId(), REMOTE, new DistinctLimitNode(idAllocator.getNextId(), child.getNode(), node.getLimit(), true, node.getDistinctSymbols(), node.getHashSymbol())), child.getProperties()); } return rebaseAndDeriveProperties(node, child); }
@Override public PlanWithProperties visitDistinctLimit(DistinctLimitNode node, PreferredProperties preferredProperties) { PlanWithProperties child = planChild(node, PreferredProperties.any()); if (!child.getProperties().isSingleNode()) { child = withDerivedProperties( gatheringExchange( idAllocator.getNextId(), REMOTE, new DistinctLimitNode(idAllocator.getNextId(), child.getNode(), node.getLimit(), true, node.getDistinctSymbols(), node.getHashSymbol())), child.getProperties()); } return rebaseAndDeriveProperties(node, child); }