@Override public AggregatorFactory getCombiningFactory() { return new LongMaxAggregatorFactory(name, name, null, macroTable); }
@Override public List<AggregatorFactory> getRequiredColumns() { return Collections.singletonList(new LongMaxAggregatorFactory(fieldName, fieldName, expression, macroTable)); }
private static AggregatorFactory createMaxAggregatorFactory( final ValueType aggregationType, final String name, final String fieldName, final String expression, final ExprMacroTable macroTable ) { switch (aggregationType) { case LONG: return new LongMaxAggregatorFactory(name, fieldName, expression, macroTable); case FLOAT: return new FloatMaxAggregatorFactory(name, fieldName, expression, macroTable); case DOUBLE: return new DoubleMaxAggregatorFactory(name, fieldName, expression, macroTable); default: throw new ISE("Cannot create aggregator factory for type[%s]", aggregationType); } } }
@Test public void testEqualsAndHashCode() { LongMaxAggregatorFactory one = new LongMaxAggregatorFactory("name1", "fieldName1"); LongMaxAggregatorFactory oneMore = new LongMaxAggregatorFactory("name1", "fieldName1"); LongMaxAggregatorFactory two = new LongMaxAggregatorFactory("name2", "fieldName2"); Assert.assertEquals(one.hashCode(), oneMore.hashCode()); Assert.assertTrue(one.equals(oneMore)); Assert.assertFalse(one.equals(two)); }
queryAggs.add(new LongMaxAggregatorFactory("maxLongUniform", "maxLongUniform")); queryAggs.add(new DoubleSumAggregatorFactory("sumFloatNormal", "sumFloatNormal")); queryAggs.add(new DoubleMinAggregatorFactory("minFloatZipf", "minFloatZipf"));
new LongMaxAggregatorFactory("name", "fieldName1") }; AggregatorFactory[] af2 = new AggregatorFactory[]{ new LongMaxAggregatorFactory("name", "fieldName2") }; Assert.assertArrayEquals( new AggregatorFactory[]{ new LongMaxAggregatorFactory("name", "name") }, AggregatorFactory.mergeAggregators(ImmutableList.of(af1, af2)) new LongMaxAggregatorFactory("name", "fieldName1") }; af2 = new AggregatorFactory[]{
queryAggs.add(new LongMaxAggregatorFactory("maxLongUniform", "maxLongUniform")); queryAggs.add(new DoubleSumAggregatorFactory("sumFloatNormal", "sumFloatNormal")); queryAggs.add(new DoubleMinAggregatorFactory("minFloatZipf", "minFloatZipf"));
queryAggs.add(new LongMaxAggregatorFactory("maxLongUniform", "maxLongUniform")); queryAggs.add(new DoubleSumAggregatorFactory("sumFloatNormal", "sumFloatNormal")); queryAggs.add(new DoubleMinAggregatorFactory("minFloatZipf", "minFloatZipf"));
AGGREGATORS.add(new LongMaxAggregatorFactory("agg_2", "long_dim_2")); AGGREGATORS.add(new FloatFirstAggregatorFactory("agg_3", "float_dim_3")); AGGREGATORS.add(new DoubleLastAggregatorFactory("agg_4", "double_dim_4"));
new IncrementalIndexSchema.Builder() .withQueryGranularity(Granularities.MINUTE) .withMetrics(new LongMaxAggregatorFactory("max", "max")) .build()
new LongMaxAggregatorFactory("n", "f") }; final Metadata m1 = new Metadata( Collections.singletonMap("k", "v"), new AggregatorFactory[]{ new LongMaxAggregatorFactory("n", "n") }, new TimestampSpec("ds", "auto", null),
new IncrementalIndexSchema.Builder() .withQueryGranularity(Granularities.MINUTE) .withMetrics(new LongMaxAggregatorFactory("max", "max")) .build()
expectedLenient.put("foo", new LongSumAggregatorFactory("foo", "foo")); expectedLenient.put("bar", null); expectedLenient.put("baz", new LongMaxAggregatorFactory("baz", "baz")); Assert.assertNull(mergeStrict(analysis1, analysis2).getAggregators()); Assert.assertEquals(expectedLenient, mergeLenient(analysis1, analysis2).getAggregators());
@Test public void testGroupByWithFilterMatchingNothing() throws Exception { // This query should actually return [0, null] rather than an empty result set, but it doesn't. // This test just "documents" the current behavior. testQuery( "SELECT COUNT(*), MAX(cnt) FROM druid.foo WHERE dim1 = 'foobar'", ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) .filters(SELECTOR("dim1", "foobar", null)) .granularity(Granularities.ALL) .aggregators(AGGS( new CountAggregatorFactory("a0"), new LongMaxAggregatorFactory("a1", "cnt") )) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of() ); }
@Test public void testGroupByWithFilterMatchingNothingWithGroupByLiteral() throws Exception { testQuery( "SELECT COUNT(*), MAX(cnt) FROM druid.foo WHERE dim1 = 'foobar' GROUP BY 'dummy'", ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) .filters(SELECTOR("dim1", "foobar", null)) .granularity(Granularities.ALL) .aggregators(AGGS( new CountAggregatorFactory("a0"), new LongMaxAggregatorFactory("a1", "cnt") )) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of() ); }
.setQuerySegmentSpec(intervalSpec) .setDimensions(new DefaultDimensionSpec("newDimB", "renamedDimB")) .setAggregatorSpecs(new LongMaxAggregatorFactory("maxBSum", "metBSum")) .setContext( ImmutableMap.of(
.setGranularity(Granularities.ALL) .setAggregatorSpecs(AGGS( new LongMaxAggregatorFactory("_a0", "a0"), new LongMinAggregatorFactory("_a1", "a0"), new LongSumAggregatorFactory("_a2:sum", "a0"), new CountAggregatorFactory("_a2:count"), new LongMaxAggregatorFactory("_a3", "d0"), new CountAggregatorFactory("_a4") ))
new LongSumAggregatorFactory("a3", "cnt"), new LongMinAggregatorFactory("a4", "cnt"), new LongMaxAggregatorFactory("a5", "cnt"), new FilteredAggregatorFactory( new CountAggregatorFactory("a6"),
)) .setDimFilter(new SelectorDimFilter("m1", "5.0", null)) .setAggregatorSpecs(AGGS(new LongMaxAggregatorFactory("a0", "__time"))) .setContext(QUERY_CONTEXT_DEFAULT) .build()
new LongMaxAggregatorFactory("time_alias_max", "time_alias"), new DoubleMaxAggregatorFactory("index_alias_max", "index_alias")