@Test public void testFallbackToCursorBasedPlan() { final SearchQueryBuilder builder = testBuilder(); final SearchQuery query = builder.filters("qualityLong", "1000").build(); final Map<String, Set<String>> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); expectedResults.put("qualityLong", Sets.newHashSet("1000")); expectedResults.put("qualityDouble", Sets.newHashSet("10000.0")); expectedResults.put("qualityFloat", Sets.newHashSet("10000.0")); expectedResults.put("qualityNumericString", Sets.newHashSet("100000")); expectedResults.put("quality", Sets.newHashSet("AutoMotive", "automotive")); expectedResults.put("placement", Sets.newHashSet("PREFERRED", "preferred")); expectedResults.put("placementish", Sets.newHashSet("a", "preferred")); expectedResults.put("market", Sets.newHashSet("spot")); checkSearchQuery(query, expectedResults); }
@Test public void testSearchSameValueInMultiDims() { SearchQuery searchQuery; Druids.SearchQueryBuilder builder = testBuilder() .dimensions(Arrays.asList(placementDimension, placementishDimension)); Map<String, Set<String>> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); searchQuery = builder.query("PREFERRED").build(); expectedResults.put(placementDimension, Sets.newHashSet("PREFERRED", "preferred", "PREFERRed")); expectedResults.put(placementishDimension, Sets.newHashSet("preferred", "Preferred")); checkSearchQuery(searchQuery, expectedResults); searchQuery = builder.query("preferred", true).build(); expectedResults.put(placementDimension, Sets.newHashSet("preferred")); expectedResults.put(placementishDimension, Sets.newHashSet("preferred")); checkSearchQuery(searchQuery, expectedResults); }
@Test public void testFragmentSearch() { Druids.SearchQueryBuilder builder = testBuilder(); Map<String, Set<String>> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); SearchQuery searchQuery; searchQuery = builder.fragments(Arrays.asList("auto", "ve")).build(); expectedResults.put(qualityDimension, Sets.newHashSet("automotive", "AutoMotive")); checkSearchQuery(searchQuery, expectedResults); searchQuery = builder.fragments(Arrays.asList("auto", "ve"), true).build(); expectedResults.put(qualityDimension, Sets.newHashSet("automotive")); checkSearchQuery(searchQuery, expectedResults); }
@Test public void testDefaultSearchQueryMetricsMetricNamesAndUnits() { SearchQuery query = Druids .newSearchQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.dayGran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .build(); CachingEmitter cachingEmitter = new CachingEmitter(); ServiceEmitter serviceEmitter = new ServiceEmitter("", "", cachingEmitter); SearchQueryMetrics queryMetrics = DefaultSearchQueryMetricsFactory.instance().makeMetrics(query); DefaultQueryMetricsTest.testQueryMetricsDefaultMetricNamesAndUnits(cachingEmitter, serviceEmitter, queryMetrics); } }
@Test public void testSearchNonExistingDimension() { List<SearchHit> expectedHits = new ArrayList<>(); checkSearchQuery( Druids.newSearchQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.allGran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .dimensions("does_not_exist") .query("a") .build(), expectedHits ); }
@Test public void testSearchWithNotExistedDimension() { SearchQuery searchQuery = Druids.newSearchQueryBuilder() .dimensions( new DefaultDimensionSpec("asdf", "asdf") ) .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.allGran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .build(); List<SearchHit> noHit = new ArrayList<>(); checkSearchQuery(searchQuery, noHit); }
@Test public void testSearchWithEmptyResults() { List<SearchHit> expectedHits = new ArrayList<>(); checkSearchQuery( Druids.newSearchQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.allGran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .query("abcd123") .build(), expectedHits ); }
private void testFilteredSearch(QueryRunner runner, List<Result<SearchResultValue>> expectedResults, String failMsg) { SearchQuery query = Druids.newSearchQueryBuilder() .dataSource(dataSource) .granularity(allGran) .filters(marketDimension, "spot") .intervals(fullOnInterval) .query("a") .build(); failMsg += " filtered search "; HashMap<String, Object> context = new HashMap<>(); Iterable<Result<SearchResultValue>> actualResults = runner.run(QueryPlus.wrap(query), context).toList(); TestHelper.assertExpectedResults(expectedResults, actualResults, failMsg); }
private void testFullOnSearch(QueryRunner runner, List<Result<SearchResultValue>> expectedResults, String failMsg) { SearchQuery query = Druids.newSearchQueryBuilder() .dataSource(dataSource) .granularity(allGran) .intervals(fullOnInterval) .query("a") .build(); failMsg += " search "; HashMap<String, Object> context = new HashMap<>(); Iterable<Result<SearchResultValue>> actualResults = runner.run(QueryPlus.wrap(query), context).toList(); TestHelper.assertExpectedResults(expectedResults, actualResults, failMsg); }
public SearchQuery withLimit(int newLimit) { return Druids.SearchQueryBuilder.copy(this).limit(newLimit).build(); }
@Test public void testFragmentSearch() { SearchQuery searchQuery = Druids.newSearchQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.allGran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .query(new FragmentSearchQuerySpec(Arrays.asList("auto", "ve"))) .build(); List<SearchHit> expectedHits = new ArrayList<>(); expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 93)); checkSearchQuery(searchQuery, expectedHits); }
public SearchQuery withDimFilter(DimFilter dimFilter) { return Druids.SearchQueryBuilder.copy(this).filters(dimFilter).build(); }
@Test public void testQuerySerialization() throws IOException { Query query = Druids.newSearchQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.allGran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .query("a") .build(); String json = jsonMapper.writeValueAsString(query); Query serdeQuery = jsonMapper.readValue(json, Query.class); Assert.assertEquals(query, serdeQuery); }
@Override public Query<Result<SearchResultValue>> withDataSource(DataSource dataSource) { return Druids.SearchQueryBuilder.copy(this).dataSource(dataSource).build(); }
private SearchQuery makeFilteredSearchQuery() { return Druids.newSearchQueryBuilder() .dataSource(dataSource) .filters(new NotDimFilter(new SelectorDimFilter(marketDimension, "spot", null))) .granularity(allGran) .intervals(fullOnInterval) .query("a") .build(); } }
@Test public void testSearchNoOverrappingIntervals() { SearchQuery searchQuery; Druids.SearchQueryBuilder builder = testBuilder() .dimensions(Collections.singletonList(qualityDimension)) .intervals("2011-01-10T00:00:00.000Z/2011-01-11T00:00:00.000Z"); Map<String, Set<String>> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); searchQuery = builder.query("business").build(); expectedResults.put(qualityDimension, new HashSet<>()); checkSearchQuery(searchQuery, expectedResults); }
@Override public SearchQuery withOverriddenContext(Map<String, Object> contextOverrides) { Map<String, Object> newContext = computeOverriddenContext(getContext(), contextOverrides); return Druids.SearchQueryBuilder.copy(this).context(newContext).build(); }
private SearchQuery makeSearchQuery() { return Druids.newSearchQueryBuilder() .dataSource(dataSource) .granularity(allGran) .intervals(fullOnInterval) .query("a") .build(); }
@Test public void testSearchIntervals() { SearchQuery searchQuery; Druids.SearchQueryBuilder builder = testBuilder() .dimensions(Collections.singletonList(qualityDimension)) .intervals("2011-01-12T00:00:00.000Z/2011-01-13T00:00:00.000Z"); Map<String, Set<String>> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); searchQuery = builder.query("otive").build(); expectedResults.put(qualityDimension, Sets.newHashSet("AutoMotive")); checkSearchQuery(searchQuery, expectedResults); }
@Override public SearchQuery withQuerySegmentSpec(QuerySegmentSpec spec) { return Druids.SearchQueryBuilder.copy(this).intervals(spec).build(); }