@Override public List<AggregatorFactory> getRequiredColumns() { return Collections.singletonList(new CountAggregatorFactory(name)); }
new AggregatorFactory[]{new CountAggregatorFactory("cnt").getCombiningFactory()}, KEY_SERDE_FACTORY, MoreExecutors.listeningDecorator(SERVICE),
@Test public void testComparator() { CountAggregator agg = new CountAggregator(); Object first = agg.get(); agg.aggregate(); Comparator comp = new CountAggregatorFactory("null").getComparator(); Assert.assertEquals(-1, comp.compare(first, agg.get())); Assert.assertEquals(0, comp.compare(first, first)); Assert.assertEquals(0, comp.compare(agg.get(), agg.get())); Assert.assertEquals(1, comp.compare(agg.get(), first)); } }
private BufferArrayGrouper newGrouper( TestColumnSelectorFactory columnSelectorFactory, int bufferSize ) { final ByteBuffer buffer = ByteBuffer.allocate(bufferSize); final BufferArrayGrouper grouper = new BufferArrayGrouper( Suppliers.ofInstance(buffer), columnSelectorFactory, new AggregatorFactory[]{ new LongSumAggregatorFactory("valueSum", "value"), new CountAggregatorFactory("count") }, 1000 ); grouper.init(); return grouper; }
public IntervalChunkingQueryRunnerTest() { queryBuilder = Druids.newTimeseriesQueryBuilder() .dataSource("test") .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))); }
private StreamingMergeSortedGrouper<Integer> newGrouper( TestColumnSelectorFactory columnSelectorFactory, int bufferSize ) { final ByteBuffer buffer = ByteBuffer.allocate(bufferSize); final StreamingMergeSortedGrouper<Integer> grouper = new StreamingMergeSortedGrouper<>( Suppliers.ofInstance(buffer), GrouperTestUtil.intKeySerde(), columnSelectorFactory, new AggregatorFactory[]{ new LongSumAggregatorFactory("valueSum", "value"), new CountAggregatorFactory("count") }, System.currentTimeMillis() + 1000L ); grouper.init(); return grouper; } }
@Test public void testSerde() throws IOException { final FinalizingFieldAccessPostAggregator original = new FinalizingFieldAccessPostAggregator("foo", "bar"); final FinalizingFieldAccessPostAggregator decorated = original.decorate( ImmutableMap.of("bar", new CountAggregatorFactory("bar")) ); final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); Assert.assertEquals( original, objectMapper.readValue(objectMapper.writeValueAsString(decorated), PostAggregator.class) ); }
private static BufferHashGrouper<Integer> makeGrouper( TestColumnSelectorFactory columnSelectorFactory, int bufferSize, int initialBuckets ) { final BufferHashGrouper<Integer> grouper = new BufferHashGrouper<>( Suppliers.ofInstance(ByteBuffer.allocate(bufferSize)), GrouperTestUtil.intKeySerde(), columnSelectorFactory, new AggregatorFactory[]{ new SketchMergeAggregatorFactory("sketch", "sketch", 16, false, true, 2), new CountAggregatorFactory("count") }, Integer.MAX_VALUE, 0.75f, initialBuckets, true ); grouper.init(); return grouper; }
public AsyncQueryRunnerTest() { this.executor = Executors.newSingleThreadExecutor(); query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .intervals("2014/2015") .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .build(); }
private BufferHashGrouper<Integer> makeGrouper( TestColumnSelectorFactory columnSelectorFactory, int bufferSize, int initialBuckets, float maxLoadFactor ) { final ByteBuffer buffer = ByteBuffer.allocateDirect(bufferSize); final BufferHashGrouper<Integer> grouper = new BufferHashGrouper<>( Suppliers.ofInstance(buffer), GrouperTestUtil.intKeySerde(), columnSelectorFactory, new AggregatorFactory[]{ new LongSumAggregatorFactory("valueSum", "value"), new CountAggregatorFactory("count") }, Integer.MAX_VALUE, maxLoadFactor, initialBuckets, true ); grouper.init(); return grouper; } }
@Override public Aggregator apply(Cursor input) { Aggregator agg = new FilteredAggregatorFactory( new CountAggregatorFactory("count"), maybeOptimize(filter) ).factorize(input.getColumnSelectorFactory()); for (; !input.isDone(); input.advance()) { agg.aggregate(); } return agg; } }
private static LimitedBufferHashGrouper<Integer> makeGrouper( TestColumnSelectorFactory columnSelectorFactory, int bufferSize, int initialBuckets, int limit ) { LimitedBufferHashGrouper<Integer> grouper = new LimitedBufferHashGrouper<>( Suppliers.ofInstance(ByteBuffer.allocate(bufferSize)), GrouperTestUtil.intKeySerde(), columnSelectorFactory, new AggregatorFactory[]{ new LongSumAggregatorFactory("valueSum", "value"), new CountAggregatorFactory("count") }, Integer.MAX_VALUE, 0.5f, initialBuckets, limit, false ); grouper.init(); return grouper; } }
@Test public void testSimpleNaming() { Assert.assertEquals("overrideName", new FilteredAggregatorFactory( new CountAggregatorFactory("foo"), new TrueDimFilter(), "overrideName" ).getName()); Assert.assertEquals("delegateName", new FilteredAggregatorFactory( new CountAggregatorFactory("delegateName"), new TrueDimFilter(), "" ).getName()); Assert.assertEquals("delegateName", new FilteredAggregatorFactory( new CountAggregatorFactory("delegateName"), new TrueDimFilter(), null ).getName()); } }
@Override public IncrementalIndex createIndex() { return new IncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) .setMaxRowCount(1000) .buildOnheap(); } }
private IncrementalIndex getSingleDimIndex(String dimName, List<String> values) throws Exception { IncrementalIndex toPersist1 = new IncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("count")) .setMaxRowCount(1000) .buildOnheap(); addDimValuesToIndex(toPersist1, dimName, values); return toPersist1; }
private IncrementalIndex getIndexD3() throws Exception { IncrementalIndex toPersist1 = new IncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("count")) .setMaxRowCount(1000) .buildOnheap(); toPersist1.add( new MapBasedInputRow( 1, Arrays.asList("d3", "d1", "d2"), ImmutableMap.of("d1", "100", "d2", "4000", "d3", "30000") ) ); toPersist1.add( new MapBasedInputRow( 1, Arrays.asList("d3", "d1", "d2"), ImmutableMap.of("d1", "300", "d2", "2000", "d3", "40000") ) ); toPersist1.add( new MapBasedInputRow( 1, Arrays.asList("d3", "d1", "d2"), ImmutableMap.of("d1", "200", "d2", "3000", "d3", "50000") ) ); return toPersist1; }
@Test public void testSelectMatchesNothing() { String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity("all") .aggregators(Collections.singletonList(new CountAggregatorFactory("rows"))) .intervals(Collections.singletonList(Intervals.of("2010-08-31/2010-09-01"))) .build() ).lhs; Assert.assertEquals("hotBroker", brokerName); }
@Test public void testVerifyAggregations() { List<AggregatorFactory> aggFactories = Arrays.asList( new CountAggregatorFactory("count"), new DoubleSumAggregatorFactory("idx", "index"), new DoubleSumAggregatorFactory("rev", "revenue") ); List<PostAggregator> postAggs = Collections.singletonList( new ArithmeticPostAggregator( "addStuff", "+", Arrays.asList( new FieldAccessPostAggregator("idx", "idx"), new FieldAccessPostAggregator("count", "count") ) ) ); boolean exceptionOccured = false; try { Queries.prepareAggregations(ImmutableList.of(), aggFactories, postAggs); } catch (IllegalArgumentException e) { exceptionOccured = true; } Assert.assertFalse(exceptionOccured); }
@Test public void testVerifyAggregationsMissingVal() { List<AggregatorFactory> aggFactories = Arrays.asList( new CountAggregatorFactory("count"), new DoubleSumAggregatorFactory("idx", "index"), new DoubleSumAggregatorFactory("rev", "revenue") ); List<PostAggregator> postAggs = Collections.singletonList( new ArithmeticPostAggregator( "addStuff", "+", Arrays.asList( new FieldAccessPostAggregator("idx", "idx2"), new FieldAccessPostAggregator("count", "count") ) ) ); boolean exceptionOccured = false; try { Queries.prepareAggregations(ImmutableList.of(), aggFactories, postAggs); } catch (IllegalArgumentException e) { exceptionOccured = true; } Assert.assertTrue(exceptionOccured); }
private static IndexBuilder overrideIndexBuilderSchema(IndexBuilder indexBuilder) { IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withMetrics( new CountAggregatorFactory("count"), new HyperUniquesAggregatorFactory("hyperion", "dim1"), new DoubleMaxAggregatorFactory("dmax", "dim0") ).build(); return indexBuilder.schema(schema); }