@VisibleForTesting public static QuerySegmentSpec getQuerySegmentSpecForLookUp(BaseQuery query) { if (query.getDataSource() instanceof QueryDataSource) { QueryDataSource ds = (QueryDataSource) query.getDataSource(); Query subquery = ds.getQuery(); if (subquery instanceof BaseQuery) { return getQuerySegmentSpecForLookUp((BaseQuery) subquery); } throw new IllegalStateException("Invalid subquery type " + subquery.getClass()); } return query.getQuerySegmentSpec(); }
/** * @deprecated use {@link #computeOverriddenContext(Map, Map) computeOverriddenContext(getContext(), overrides))} * instead. This method may be removed in the next minor or major version of Druid. */ @Deprecated protected Map<String, Object> computeOverridenContext(final Map<String, Object> overrides) { return computeOverriddenContext(getContext(), overrides); }
@Override public void advance() { cursorOffset.increment(); // Must call BaseQuery.checkInterrupted() after cursorOffset.increment(), not before, because // FilteredOffset.increment() is a potentially long, not an "instant" operation (unlike to all other subclasses // of Offset) and it returns early on interruption, leaving itself in an illegal state. We should not let // aggregators, etc. access this illegal state and throw a QueryInterruptedException by calling // BaseQuery.checkInterrupted(). BaseQuery.checkInterrupted(); }
if (baseQuery.getQuerySegmentSpec() instanceof MultipleIntervalSegmentSpec) { return factory.getToolchest() .mergeResults( Execs.directExecutor(), Iterables.transform( baseQuery.getIntervals(), new Function<Interval, QueryRunner<T>>() ((SpecificSegmentSpec) ((BaseQuery) query).getQuerySegmentSpec()).getDescriptor();
@Override public QueryRunner<T> getRunner(QuerySegmentWalker walker) { return ((BaseQuery) query).getQuerySegmentSpec().lookup(this, walker); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } SegmentMetadataQuery that = (SegmentMetadataQuery) o; return merge == that.merge && usingDefaultInterval == that.usingDefaultInterval && lenientAggregatorMerge == that.lenientAggregatorMerge && Objects.equals(toInclude, that.toInclude) && Objects.equals(analysisTypes, that.analysisTypes); }
@Override public QueryRunner<T> getRunner(QuerySegmentWalker walker) { return getQuerySegmentSpecForLookUp(this).lookup(this, walker); }
@Override public Query withOverriddenContext(Map contextOverride) { return new TestQuery( getDataSource(), getQuerySegmentSpec(), isDescending(), BaseQuery.computeOverriddenContext(getContext(), contextOverride) ); } }
@Override public <ContextType> ContextType getContextValue(String key, ContextType defaultValue) { ContextType retVal = getContextValue(key); return retVal == null ? defaultValue : retVal; }
} else if ((int) context.get("count") == 1) { Assert.assertTrue("Should retry with 2 missing segments", ((MultipleSpecificSegmentSpec) ((BaseQuery) query).getQuerySegmentSpec()).getDescriptors().size() == 2); } else { Assert.assertTrue("Should retry with 1 missing segments", ((MultipleSpecificSegmentSpec) ((BaseQuery) query).getQuerySegmentSpec()).getDescriptors().size() == 1);
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } TimeBoundaryQuery that = (TimeBoundaryQuery) o; if (!bound.equals(that.bound)) { return false; } if (dimFilter != null ? !dimFilter.equals(that.dimFilter) : that.dimFilter != null) { return false; } return true; }
@Test public void testSegmentLookUpForNestedQueries() { QuerySegmentSpec innerQuerySegmentSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of( "2011-11-07/2011-11-08"))); QuerySegmentSpec outerQuerySegmentSpec = new MultipleIntervalSegmentSpec(Collections.singletonList((Intervals.of( "2011-11-04/2011-11-08")))); List<AggregatorFactory> aggs = Collections.singletonList(QueryRunnerTestHelper.rowsCount); final GroupByQuery innerQuery = GroupByQuery.builder() .setDataSource("blah") .setInterval(innerQuerySegmentSpec) .setGranularity(Granularities.DAY) .setAggregatorSpecs(aggs) .build(); final GroupByQuery query = GroupByQuery .builder() .setDataSource(innerQuery) .setInterval(outerQuerySegmentSpec) .setAggregatorSpecs(aggs) .setGranularity(Granularities.DAY) .build(); Assert.assertEquals(innerQuerySegmentSpec, BaseQuery.getQuerySegmentSpecForLookUp(query)); } }
@Override public String getId() { return (String) getContextValue(QUERY_ID); }
@VisibleForTesting public static QuerySegmentSpec getQuerySegmentSpecForLookUp(BaseQuery query) { if (query.getDataSource() instanceof QueryDataSource) { QueryDataSource ds = (QueryDataSource) query.getDataSource(); Query subquery = ds.getQuery(); if (subquery instanceof BaseQuery) { return getQuerySegmentSpecForLookUp((BaseQuery) subquery); } throw new IllegalStateException("Invalid subquery type " + subquery.getClass()); } return query.getQuerySegmentSpec(); }
/** * @deprecated use {@link #computeOverriddenContext(Map, Map) computeOverriddenContext(getContext(), overrides))} * instead. This method may be removed in the next minor or major version of Druid. */ @Deprecated protected Map<String, Object> computeOverridenContext(final Map<String, Object> overrides) { return computeOverriddenContext(getContext(), overrides); }
@Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } final TopNQuery topNQuery = (TopNQuery) o; return threshold == topNQuery.threshold && Objects.equals(virtualColumns, topNQuery.virtualColumns) && Objects.equals(dimensionSpec, topNQuery.dimensionSpec) && Objects.equals(topNMetricSpec, topNQuery.topNMetricSpec) && Objects.equals(dimFilter, topNQuery.dimFilter) && Objects.equals(aggregatorSpecs, topNQuery.aggregatorSpecs) && Objects.equals(postAggregatorSpecs, topNQuery.postAggregatorSpecs); }
@Override protected long scanAndAggregate( final PooledTopNParams params, final int[] positions, final BufferAggregator[] theAggregators ) { for (ScanAndAggregate specializedScanAndAggregate : specializedScanAndAggregateImplementations) { long processedRows = specializedScanAndAggregate.scanAndAggregate(params, positions, theAggregators); if (processedRows >= 0) { BaseQuery.checkInterrupted(); return processedRows; } } long processedRows = scanAndAggregateDefault(params, positions, theAggregators); BaseQuery.checkInterrupted(); return processedRows; }
@Override public QueryRunner<T> getRunner(QuerySegmentWalker walker) { return getQuerySegmentSpecForLookUp(this).lookup(this, walker); }
@Nullable @Override public String getSqlQueryId() { return (String) getContextValue(SQL_QUERY_ID); }
@Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } final ScanQuery scanQuery = (ScanQuery) o; return batchSize == scanQuery.batchSize && limit == scanQuery.limit && legacy == scanQuery.legacy && Objects.equals(virtualColumns, scanQuery.virtualColumns) && Objects.equals(resultFormat, scanQuery.resultFormat) && Objects.equals(dimFilter, scanQuery.dimFilter) && Objects.equals(columns, scanQuery.columns); }