public TimeseriesQueryBuilder filters(String dimensionName, String value, String... values) { dimFilter = new InDimFilter(dimensionName, Lists.asList(value, values), null); return this; }
@Override public DimFilter optimize() { return new InDimFilter(dimension, Collections.singletonList(value), extractionFn).optimize(); }
public TopNQueryBuilder filters(String dimensionName, String value, String... values) { dimFilter = new InDimFilter(dimensionName, Lists.asList(value, values), null); return this; }
public static InDimFilter IN(String dimension, List<String> values, ExtractionFn extractionFn) { return new InDimFilter(dimension, values, extractionFn); }
private DimFilter toInFilter(String dim) { List<String> emptyList = new ArrayList<>(); return new InDimFilter(dim, emptyList, null); }
private DimFilter toInFilterWithFn(String dim, ExtractionFn fn, String value, String... values) { return new InDimFilter(dim, Lists.asList(value, values), fn); } }
private static SearchQueryBuilder basicB(final BenchmarkSchemaInfo basicSchema) { final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); final List<String> dimUniformFilterVals = new ArrayList<>(); int resultNum = (int) (100000 * 0.1); int step = 100000 / resultNum; for (int i = 1; i < 100001 && dimUniformFilterVals.size() < resultNum; i += step) { dimUniformFilterVals.add(String.valueOf(i)); } List<String> dimHyperUniqueFilterVals = new ArrayList<>(); resultNum = (int) (100000 * 0.1); step = 100000 / resultNum; for (int i = 0; i < 100001 && dimHyperUniqueFilterVals.size() < resultNum; i += step) { dimHyperUniqueFilterVals.add(String.valueOf(i)); } final List<DimFilter> dimFilters = new ArrayList<>(); dimFilters.add(new InDimFilter("dimUniform", dimUniformFilterVals, null)); dimFilters.add(new InDimFilter("dimHyperUnique", dimHyperUniqueFilterVals, null)); return Druids.newSearchQueryBuilder() .dataSource("blah") .granularity(Granularities.ALL) .intervals(intervalSpec) .query("") .dimensions(Lists.newArrayList("dimUniform", "dimHyperUnique")) .filters(new AndDimFilter(dimFilters)); }
private DimFilter toInFilter(String dim, String value, String... values) { return new InDimFilter(dim, Lists.asList(value, values), null); }
@Test public void testDeserialization() throws IOException { final InDimFilter actualInDimFilter = mapper.readerFor(DimFilter.class).readValue(actualInFilter); final InDimFilter expectedInDimFilter = new InDimFilter("dimTest", Arrays.asList("good", "bad"), null); Assert.assertEquals(expectedInDimFilter, actualInDimFilter); }
@Test public void testSerialization() throws IOException { final InDimFilter dimInFilter = new InDimFilter("dimTest", Arrays.asList("good", "bad"), null); final String expectedInFilter = mapper.writeValueAsString(dimInFilter); Assert.assertEquals(expectedInFilter, actualInFilter); }
private static SearchQueryBuilder basicD(final BenchmarkSchemaInfo basicSchema) { final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec( Collections.singletonList(basicSchema.getDataInterval()) ); final List<String> dimUniformFilterVals = new ArrayList<>(); final int resultNum = (int) (100000 * 0.1); final int step = 100000 / resultNum; for (int i = 1; i < 100001 && dimUniformFilterVals.size() < resultNum; i += step) { dimUniformFilterVals.add(String.valueOf(i)); } final String dimName = "dimUniform"; final List<DimFilter> dimFilters = new ArrayList<>(); dimFilters.add(new InDimFilter(dimName, dimUniformFilterVals, null)); dimFilters.add(new SelectorDimFilter(dimName, "3", null)); dimFilters.add(new BoundDimFilter(dimName, "100", "10000", true, true, true, null, null)); return Druids.newSearchQueryBuilder() .dataSource("blah") .granularity(Granularities.ALL) .intervals(intervalSpec) .query("") .dimensions(Collections.singletonList("dimUniform")) .filters(new AndDimFilter(dimFilters)); }
@Test public void testGetCacheKey() { final InDimFilter inDimFilter_1 = new InDimFilter("dimTest", Arrays.asList("good", "bad"), null); final InDimFilter inDimFilter_2 = new InDimFilter("dimTest", Collections.singletonList("good,bad"), null); Assert.assertNotEquals(inDimFilter_1.getCacheKey(), inDimFilter_2.getCacheKey()); RegexDimExtractionFn regexFn = new RegexDimExtractionFn(".*", false, null); final InDimFilter inDimFilter_3 = new InDimFilter("dimTest", Arrays.asList("good", "bad"), regexFn); final InDimFilter inDimFilter_4 = new InDimFilter("dimTest", Collections.singletonList("good,bad"), regexFn); Assert.assertNotEquals(inDimFilter_3.getCacheKey(), inDimFilter_4.getCacheKey()); }
return this; } else { return new InDimFilter(dimension, keys, null);
@Test public void testMultithreaded() { assertFilterMatchesMultithreaded( new SelectorDimFilter(LONG_COLUMN, "3", null), ImmutableList.of("3") ); assertFilterMatchesMultithreaded( new InDimFilter(LONG_COLUMN, Arrays.asList("2", "4", "8"), null), ImmutableList.of("2", "4") ); // cross the hashing threshold to test hashset implementation, filter on even values List<String> infilterValues = new ArrayList<>(InDimFilter.NUMERIC_HASHING_THRESHOLD * 2); for (int i = 0; i < InDimFilter.NUMERIC_HASHING_THRESHOLD * 2; i++) { infilterValues.add(String.valueOf(i * 2)); } assertFilterMatchesMultithreaded( new InDimFilter(LONG_COLUMN, infilterValues, null), ImmutableList.of("2", "4", "6") ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "2", "5", false, false, null, null, StringComparators.NUMERIC), ImmutableList.of("2", "3", "4", "5") ); }
@Test public void testFilterTheUnfilterable() { // single value matching assertFilterMatches( new SelectorDimFilter("hyperion", "a string", null), ImmutableList.of() ); assertFilterMatches( new SelectorDimFilter("hyperion", null, null), ImmutableList.of("1", "2", "3", "4", "5", "6") ); // predicate based matching assertFilterMatches( new InDimFilter("hyperion", Arrays.asList("hello", "world"), null), ImmutableList.of() ); assertFilterMatches( new InDimFilter("hyperion", Arrays.asList("hello", "world", null), null), ImmutableList.of("1", "2", "3", "4", "5", "6") ); } }
new InDimFilter(columnName, Arrays.asList("2", "4", "8"), null), ImmutableList.of("2", "4") ); new InDimFilter(columnName, Arrays.asList("2.0", "4.0", "8.0"), null), ImmutableList.of("2", "4") ); new InDimFilter(columnName, infilterValues, null), ImmutableList.of("2", "4", "6") );
new RegexDimFilter("dimSequential", "X", null), new SearchQueryDimFilter("dimSequential", new ContainsSearchQuerySpec("X", false), null), new InDimFilter("dimSequential", Collections.singletonList("X"), null)
.setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) .setDimFilter(new InDimFilter("dim1", elements, null)) .setAggregatorSpecs( AGGS(
superFilterList.add(new InDimFilter("null_column", Arrays.asList("NOT-EMPTY", "FOOBAR", "EMPTY"), extractionFn)); superFilterList.add(new BoundDimFilter("null_column", "EMPTY", "EMPTY", false, false, true, extractionFn, StringComparators.ALPHANUMERIC
@Test public void testInFilter() throws Exception { testQuery( "SELECT dim1, COUNT(*) FROM druid.foo WHERE dim1 IN ('abc', 'def', 'ghi') GROUP BY dim1", ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) .setDimFilter(new InDimFilter("dim1", ImmutableList.of("abc", "def", "ghi"), null)) .setAggregatorSpecs( AGGS( new CountAggregatorFactory("a0") ) ) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), ImmutableList.of( new Object[]{"abc", 1L}, new Object[]{"def", 1L} ) ); }