public SelectQuery build() { return new SelectQuery( dataSource, querySegmentSpec, descending, dimFilter, granularity, dimensions, metrics, virtualColumns, pagingSpec, context ); }
public static SelectQueryBuilder copy(SelectQuery query) { return new SelectQueryBuilder() .dataSource(query.getDataSource()) .intervals(query.getQuerySegmentSpec()) .descending(query.isDescending()) .filters(query.getFilter()) .granularity(query.getGranularity()) .dimensionSpecs(query.getDimensions()) .metrics(query.getMetrics()) .virtualColumns(query.getVirtualColumns()) .pagingSpec(query.getPagingSpec()) .context(query.getContext()); }
/** * Don't run this benchmark with a query that doesn't use {@link Granularities#ALL}, * this pagination function probably doesn't work correctly in that case. */ private SelectQuery incrementQueryPagination(SelectQuery query, SelectResultValue prevResult) { Map<String, Integer> pagingIdentifiers = prevResult.getPagingIdentifiers(); Map<String, Integer> newPagingIdentifers = new HashMap<>(); for (String segmentId : pagingIdentifiers.keySet()) { int newOffset = pagingIdentifiers.get(segmentId) + 1; newPagingIdentifers.put(segmentId, newOffset); } return query.withPagingSpec(new PagingSpec(newPagingIdentifers, pagingThreshold)); }
@Override public String toString() { return "SelectQuery{" + "dataSource='" + getDataSource() + '\'' + ", querySegmentSpec=" + getQuerySegmentSpec() + ", descending=" + isDescending() + ", dimFilter=" + dimFilter + ", granularity=" + getGranularity() + ", dimensions=" + dimensions + ", metrics=" + metrics + ", virtualColumns=" + virtualColumns + ", pagingSpec=" + pagingSpec + '}'; }
@Override protected BinaryFn<Result<SelectResultValue>, Result<SelectResultValue>, Result<SelectResultValue>> createMergeFn( Query<Result<SelectResultValue>> input ) { SelectQuery query = (SelectQuery) input; return new SelectBinaryFn( query.getGranularity(), query.getPagingSpec(), query.isDescending() ); } };
query.getDataSource().getNames().size() == 1, "At the point where this code is called, only one data source should exist. Data sources: %s", query.getDataSource().getNames() ); if (query.getDimensions() == null || query.getDimensions().isEmpty()) { dims = DefaultDimensionSpec.toSpec(adapter.getAvailableDimensions()); } else { dims = query.getDimensions(); if (query.getMetrics() == null || query.getMetrics().isEmpty()) { metrics = adapter.getAvailableMetrics(); } else { metrics = query.getMetrics(); List<Interval> intervals = query.getQuerySegmentSpec().getIntervals(); Preconditions.checkArgument(intervals.size() == 1, "Can only handle a single interval, got[%s]", intervals); final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter())); query.getQuerySegmentSpec().getIntervals(), filter, query.getVirtualColumns(), query.isDescending(), query.getGranularity(), new Function<Cursor, Result<SelectResultValue>>()
@Override public byte[] computeCacheKey(SelectQuery query) final DimFilter dimFilter = query.getDimensionsFilter(); final byte[] filterBytes = dimFilter == null ? new byte[]{} : dimFilter.getCacheKey(); final byte[] granularityBytes = query.getGranularity().getCacheKey(); query.getDimensions() != null ? query.getDimensions() : Collections.emptyList(); final byte[][] dimensionsBytes = new byte[dimensionSpecs.size()][]; int dimensionsBytesSize = 0; if (query.getMetrics() != null) { metrics.addAll(query.getMetrics()); final byte[] virtualColumnsCacheKey = query.getVirtualColumns().getCacheKey(); final byte isDescendingByte = query.isDescending() ? (byte) 1 : 0; + query.getPagingSpec().getCacheKey().length + dimensionsBytesSize + metricBytesSize .put(granularityBytes) .put(filterBytes) .put(query.getPagingSpec().getCacheKey()) .put(isDescendingByte);
query.getPagingSpec(), query.isDescending() ); final PagingOffset offset = query.getPagingOffset(segmentId);
final String dataSource = Iterables.getOnlyElement(query.getDataSource().getNames()); PagingSpec pagingSpec = query.getPagingSpec(); Map<String, Integer> paging = pagingSpec.getPagingIdentifiers(); if (paging == null || paging.isEmpty()) { final Granularity granularity = query.getGranularity(); .filter(identifier -> SegmentId.tryParse(dataSource, identifier) != null) .map(SegmentId.makeIntervalExtractor(dataSource)) .sorted(query.isDescending() ? Comparators.intervalsByEndThenStart() : Comparators.intervalsByStartThenEnd()) .forEach(interval -> { if (query.isDescending()) { long granularEnd = granularity.bucketStart(interval.getEnd()).getMillis(); Long currentEnd = granularThresholds.get(granularEnd); if (query.isDescending()) { while (it.hasNext()) { Interval interval = it.next().getInterval();
+ "\"context\":null}"; SelectQuery queryWithNull = new SelectQuery( new TableDataSource(QueryRunnerTestHelper.dataSource), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), ); SelectQuery queryWithFalse = queryWithNull.withPagingSpec( new PagingSpec(null, 3, false) ); SelectQuery queryWithTrue = queryWithNull.withPagingSpec( new PagingSpec(null, 3, true) );
private void runDayGranularityTest(SelectQuery query, int[][] expectedOffsets) { for (int[] expected : expectedOffsets) { List<Result<SelectResultValue>> results = runner.run(QueryPlus.wrap(query), ImmutableMap.of()).toList(); Assert.assertEquals(2, results.size()); SelectResultValue value0 = results.get(0).getValue(); SelectResultValue value1 = results.get(1).getValue(); Map<String, Integer> pagingIdentifiers0 = value0.getPagingIdentifiers(); Map<String, Integer> pagingIdentifiers1 = value1.getPagingIdentifiers(); Map<String, Integer> merged = PagingSpec.merge(Arrays.asList(pagingIdentifiers0, pagingIdentifiers1)); for (int i = 0; i < 4; i++) { if (query.isDescending() ^ expected[i] >= 0) { Assert.assertEquals(expected[i], merged.get(segmentIdentifiers.get(i)).intValue()); } } query = query.withPagingSpec(toNextCursor(merged, query, 3)); } }
@Test public void testFullSelectNoDimensionAndMetric() { SelectQuery query = newTestQuery() .intervals(I_0112_0114_SPEC) .dimensionSpecs(DefaultDimensionSpec.toSpec("foo")) .metrics(Collections.singletonList("foo2")) .build(); Iterable<Result<SelectResultValue>> results = runner.run(QueryPlus.wrap(query), new HashMap<>()).toList(); final List<List<Map<String, Object>>> events = toEvents( new String[]{ EventHolder.timestampKey + ":TIME", "foo:NULL", "foo2:NULL" }, V_0112_0114 ); PagingOffset offset = query.getPagingOffset(segmentIdString); List<Result<SelectResultValue>> expectedResults = toExpected( segmentIdString, events, Collections.singletonList("foo"), Collections.singletonList("foo2"), offset.startOffset(), offset.threshold() ); verify(expectedResults, results); }
public PagingOffset getPagingOffset(String identifier) { return pagingSpec.getOffset(identifier, isDescending()); }
@Override public SelectQuery withOverriddenContext(Map<String, Object> contextOverrides) { Map<String, Object> newContext = computeOverriddenContext(getContext(), contextOverrides); return Druids.SelectQueryBuilder.copy(this).context(newContext).build(); }
@JsonCreator public SelectQuery( @JsonProperty("dataSource") DataSource dataSource, @JsonProperty("intervals") QuerySegmentSpec querySegmentSpec, @JsonProperty("descending") boolean descending, @JsonProperty("filter") DimFilter dimFilter, @JsonProperty("granularity") Granularity granularity, @JsonProperty("dimensions") List<DimensionSpec> dimensions, @JsonProperty("metrics") List<String> metrics, @JsonProperty("virtualColumns") VirtualColumns virtualColumns, @JsonProperty("pagingSpec") PagingSpec pagingSpec, @JsonProperty("context") Map<String, Object> context ) { super(dataSource, querySegmentSpec, descending, context, Granularities.nullToAll(granularity)); this.dimFilter = dimFilter; this.dimensions = dimensions; this.virtualColumns = VirtualColumns.nullToEmpty(virtualColumns); this.metrics = metrics; this.pagingSpec = pagingSpec; Preconditions.checkNotNull(pagingSpec, "must specify a pagingSpec"); Preconditions.checkArgument(checkPagingSpec(pagingSpec, descending), "invalid pagingSpec"); }
query.getDimensions() != null ? query.getDimensions() : Collections.emptyList(); private final List<String> dimOutputNames = dimensionSpecs.size() > 0 ? Lists.transform(dimensionSpecs, DimensionSpec::getOutputName) : Collections.emptyList();
String dataSource = Iterables.getOnlyElement(query.getDataSource().getNames()); if (query.getDimensions() == null || query.getDimensions().isEmpty()) { dims = DefaultDimensionSpec.toSpec(adapter.getAvailableDimensions()); } else { dims = query.getDimensions(); if (query.getMetrics() == null || query.getMetrics().isEmpty()) { metrics = adapter.getAvailableMetrics(); } else { metrics = query.getMetrics(); List<Interval> intervals = query.getQuerySegmentSpec().getIntervals(); Preconditions.checkArgument(intervals.size() == 1, "Can only handle a single interval, got[%s]", intervals); final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter())); query.getQuerySegmentSpec().getIntervals(), filter, query.getVirtualColumns(), query.isDescending(), query.getGranularity(), new Function<Cursor, Result<SelectResultValue>>()
@Override public byte[] computeCacheKey(SelectQuery query) final DimFilter dimFilter = query.getDimensionsFilter(); final byte[] filterBytes = dimFilter == null ? new byte[]{} : dimFilter.getCacheKey(); final byte[] granularityBytes = query.getGranularity().getCacheKey(); query.getDimensions() != null ? query.getDimensions() : Collections.emptyList(); final byte[][] dimensionsBytes = new byte[dimensionSpecs.size()][]; int dimensionsBytesSize = 0; if (query.getMetrics() != null) { metrics.addAll(query.getMetrics()); final byte[] virtualColumnsCacheKey = query.getVirtualColumns().getCacheKey(); final byte isDescendingByte = query.isDescending() ? (byte) 1 : 0; + query.getPagingSpec().getCacheKey().length + dimensionsBytesSize + metricBytesSize .put(granularityBytes) .put(filterBytes) .put(query.getPagingSpec().getCacheKey()) .put(isDescendingByte);
@Override public String toString() { return "SelectQuery{" + "dataSource='" + getDataSource() + '\'' + ", querySegmentSpec=" + getQuerySegmentSpec() + ", descending=" + isDescending() + ", dimFilter=" + dimFilter + ", granularity=" + getGranularity() + ", dimensions=" + dimensions + ", metrics=" + metrics + ", virtualColumns=" + virtualColumns + ", pagingSpec=" + pagingSpec + '}'; }
query.getPagingSpec(), query.isDescending() ); final PagingOffset offset = query.getPagingOffset(segmentId);