@SuppressWarnings("unchecked") public int countRows(String dataSource, String interval) { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) .aggregators( ImmutableList.of( new LongSumAggregatorFactory("rows", "count") ) ) .granularity(Granularities.ALL) .intervals(interval) .build(); List<Map<String, Object>> results = queryClient.query(getQueryURL(broker), query); if (results.isEmpty()) { return 0; } else { Map<String, Object> map = (Map<String, Object>) results.get(0).get("result"); return (Integer) map.get("rows"); } }
private void setupQueries() { // queries for the basic schema Map<String, Druids.SelectQueryBuilder> basicQueries = new LinkedHashMap<>(); BenchmarkSchemaInfo basicSchema = BenchmarkSchemas.SCHEMA_MAP.get("basic"); { // basic.A QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); Druids.SelectQueryBuilder queryBuilderA = Druids.newSelectQueryBuilder() .dataSource(new TableDataSource("blah")) .dimensionSpecs(DefaultDimensionSpec.toSpec(Collections.emptyList())) .metrics(Collections.emptyList()) .intervals(intervalSpec) .granularity(Granularities.ALL) .descending(false); basicQueries.put("A", queryBuilderA); } SCHEMA_QUERY_MAP.put("basic", basicQueries); }
private static SearchQueryBuilder basicB(final BenchmarkSchemaInfo basicSchema) { final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); final List<String> dimUniformFilterVals = new ArrayList<>(); int resultNum = (int) (100000 * 0.1); int step = 100000 / resultNum; for (int i = 1; i < 100001 && dimUniformFilterVals.size() < resultNum; i += step) { dimUniformFilterVals.add(String.valueOf(i)); } List<String> dimHyperUniqueFilterVals = new ArrayList<>(); resultNum = (int) (100000 * 0.1); step = 100000 / resultNum; for (int i = 0; i < 100001 && dimHyperUniqueFilterVals.size() < resultNum; i += step) { dimHyperUniqueFilterVals.add(String.valueOf(i)); } final List<DimFilter> dimFilters = new ArrayList<>(); dimFilters.add(new InDimFilter("dimUniform", dimUniformFilterVals, null)); dimFilters.add(new InDimFilter("dimHyperUnique", dimHyperUniqueFilterVals, null)); return Druids.newSearchQueryBuilder() .dataSource("blah") .granularity(Granularities.ALL) .intervals(intervalSpec) .query("") .dimensions(Lists.newArrayList("dimUniform", "dimHyperUnique")) .filters(new AndDimFilter(dimFilters)); }
@Test public void testQuerySerialization() throws IOException { Query query = Druids.newTimeBoundaryQueryBuilder() .dataSource("testing") .build(); String json = jsonMapper.writeValueAsString(query); Query serdeQuery = jsonMapper.readValue(json, Query.class); Assert.assertEquals(query, serdeQuery); }
@Test public void testQuerySerialization() throws IOException { Query query = Druids.newDataSourceMetadataQueryBuilder() .dataSource("testing") .build(); String json = jsonMapper.writeValueAsString(query); Query serdeQuery = jsonMapper.readValue(json, Query.class); Assert.assertEquals(query, serdeQuery); }
private void testTimeBoundary( QueryRunner runner, List<Result<TimeBoundaryResultValue>> expectedResults, String failMsg ) { TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder() .dataSource("testing") .build(); failMsg += " timeBoundary "; HashMap<String, Object> context = new HashMap<>(); Iterable<Result<TimeBoundaryResultValue>> actualResults = runner.run(QueryPlus.wrap(query), context).toList(); TestHelper.assertExpectedResults(expectedResults, actualResults, failMsg); } }
DataSourceMetadataQuery dataSourceMetadataQuery = Druids.newDataSourceMetadataQueryBuilder() .dataSource("testing") .build();
public IntervalChunkingQueryRunnerTest() { queryBuilder = Druids.newTimeseriesQueryBuilder() .dataSource("test") .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))); }
private static SearchQueryBuilder basicD(final BenchmarkSchemaInfo basicSchema) { final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec( Collections.singletonList(basicSchema.getDataInterval()) ); final List<String> dimUniformFilterVals = new ArrayList<>(); final int resultNum = (int) (100000 * 0.1); final int step = 100000 / resultNum; for (int i = 1; i < 100001 && dimUniformFilterVals.size() < resultNum; i += step) { dimUniformFilterVals.add(String.valueOf(i)); } final String dimName = "dimUniform"; final List<DimFilter> dimFilters = new ArrayList<>(); dimFilters.add(new InDimFilter(dimName, dimUniformFilterVals, null)); dimFilters.add(new SelectorDimFilter(dimName, "3", null)); dimFilters.add(new BoundDimFilter(dimName, "100", "10000", true, true, true, null, null)); return Druids.newSearchQueryBuilder() .dataSource("blah") .granularity(Granularities.ALL) .intervals(intervalSpec) .query("") .dimensions(Collections.singletonList("dimUniform")) .filters(new AndDimFilter(dimFilters)); }
private Druids.SelectQueryBuilder newTestQuery() { return Druids.newSelectQueryBuilder() .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) .dimensionSpecs(DefaultDimensionSpec.toSpec(Collections.emptyList())) .metrics(Collections.emptyList()) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .granularity(QueryRunnerTestHelper.allGran) .pagingSpec(PagingSpec.newSpec(3)) .descending(descending); }
@Test @SuppressWarnings("unchecked") public void testTimeBoundaryMax() { TimeBoundaryQuery timeBoundaryQuery = Druids.newTimeBoundaryQueryBuilder() .dataSource("testing") .bound(TimeBoundaryQuery.MAX_TIME) .build(); Map<String, Object> context = new ConcurrentHashMap<>(); context.put(Result.MISSING_SEGMENTS_KEY, new ArrayList<>()); Iterable<Result<TimeBoundaryResultValue>> results = runner.run(QueryPlus.wrap(timeBoundaryQuery), context).toList(); TimeBoundaryResultValue val = results.iterator().next().getValue(); DateTime minTime = val.getMinTime(); DateTime maxTime = val.getMaxTime(); Assert.assertNull(minTime); Assert.assertEquals(DateTimes.of("2011-04-15T00:00:00.000Z"), maxTime); }
@Test public void testContextSerde() throws Exception final DataSourceMetadataQuery query = Druids.newDataSourceMetadataQueryBuilder() .dataSource("foo") .intervals("2013/2014")
public AsyncQueryRunnerTest() { this.executor = Executors.newSingleThreadExecutor(); query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .intervals("2014/2015") .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .build(); }
private static SearchQueryBuilder basicA(final BenchmarkSchemaInfo basicSchema) { final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); return Druids.newSearchQueryBuilder() .dataSource("blah") .granularity(Granularities.ALL) .intervals(intervalSpec) .query("123"); }
ServiceEmitter serviceEmitter = new ServiceEmitter("", "", cachingEmitter); SelectQuery query = Druids .newSelectQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.dayGran)
@Test @SuppressWarnings("unchecked") public void testTimeBoundaryMin() { TimeBoundaryQuery timeBoundaryQuery = Druids.newTimeBoundaryQueryBuilder() .dataSource("testing") .bound(TimeBoundaryQuery.MIN_TIME) .build(); Map<String, Object> context = new ConcurrentHashMap<>(); context.put(Result.MISSING_SEGMENTS_KEY, new ArrayList<>()); Iterable<Result<TimeBoundaryResultValue>> results = runner.run(QueryPlus.wrap(timeBoundaryQuery), context).toList(); TimeBoundaryResultValue val = results.iterator().next().getValue(); DateTime minTime = val.getMinTime(); DateTime maxTime = val.getMaxTime(); Assert.assertEquals(DateTimes.of("2011-01-12T00:00:00.000Z"), minTime); Assert.assertNull(maxTime); }
private TimeseriesQuery makeTimeseriesQuery() { return Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) .granularity(allGran) .intervals(fullOnInterval) .aggregators( Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) ) ) .postAggregators(addRowsIndexConstant) .build(); }
private Druids.SearchQueryBuilder testBuilder() { return Druids.newSearchQueryBuilder() .dataSource(dataSource) .granularity(allGran) .intervals(fullOnIntervalSpec); }
@Test public void testSelectStarWithLimitTimeDescending() throws Exception { testQuery( "SELECT * FROM druid.foo ORDER BY __time DESC LIMIT 2", ImmutableList.of( Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) .granularity(Granularities.ALL) .dimensions(ImmutableList.of("dummy")) .metrics(ImmutableList.of("__time", "cnt", "dim1", "dim2", "dim3", "m1", "m2", "unique_dim1")) .descending(true) .pagingSpec(FIRST_PAGING_SPEC) .context(QUERY_CONTEXT_DEFAULT) .build() ), ImmutableList.of( new Object[]{T("2001-01-03"), 1L, "abc", NULL_VALUE, NULL_VALUE, 6f, 6d, HLLC_STRING}, new Object[]{T("2001-01-02"), 1L, "def", "abc", NULL_VALUE, 5f, 5d, HLLC_STRING} ) ); }
@Test @SuppressWarnings("unchecked") public void testFilteredTimeBoundaryQueryNoMatches() throws IOException { QueryRunner customRunner = getCustomRunner(); TimeBoundaryQuery timeBoundaryQuery = Druids.newTimeBoundaryQueryBuilder() .dataSource("testing") .filters("quality", "foobar") // foobar dimension does not exist .build(); Assert.assertTrue(timeBoundaryQuery.hasFilters()); HashMap<String, Object> context = new HashMap<String, Object>(); List<Result<TimeBoundaryResultValue>> results = customRunner.run(QueryPlus.wrap(timeBoundaryQuery), context).toList(); Assert.assertTrue(Iterables.size(results) == 0); }