private static AggregatorFactory[] getCombiningAggregators(AggregatorFactory[] aggregators) { AggregatorFactory[] combiningAggregators = new AggregatorFactory[aggregators.length]; for (int i = 0; i < aggregators.length; i++) { combiningAggregators[i] = aggregators[i].getCombiningFactory(); } return combiningAggregators; }
@Override public AggregatorFactory getCombiningFactory() { return delegate.getCombiningFactory(); }
@Override public AggregatorFactory apply(AggregatorFactory input) { return input.getCombiningFactory(); } }
@Override public AggregatorFactory getCombiningFactory() { return delegate.getCombiningFactory(); }
private AggregatorFactory[] getCombiningFactories(AggregatorFactory[] aggregatorFactories) { final AggregatorFactory[] combiningFactories = new AggregatorFactory[aggregatorFactories.length]; Arrays.setAll(combiningFactories, i -> aggregatorFactories[i].getCombiningFactory()); return combiningFactories; } }
@Override public AggregatorFactory apply(AggregatorFactory input) { return input.getCombiningFactory(); } }
private AggregatorFactory[] getCombiningAggregators(AggregatorFactory[] aggregators) { AggregatorFactory[] combiningAggregators = new AggregatorFactory[aggregators.length]; for (int i = 0; i < aggregators.length; i++) { combiningAggregators[i] = aggregators[i].getCombiningFactory(); } return combiningAggregators; }
/** * Returns an AggregatorFactory that can be used to merge the output of aggregators from this factory and * other factory. * This method is relevant only for AggregatorFactory which can be used at ingestion time. * * @return a new Factory that can be used for merging the output of aggregators from this factory and other. */ public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException { final AggregatorFactory combiningFactory = this.getCombiningFactory(); if (other.getName().equals(this.getName()) && combiningFactory.equals(other.getCombiningFactory())) { return combiningFactory; } else { throw new AggregatorFactoryNotMergeableException(this, other); } }
if (aggregatorFactories != null) { final AggregatorFactory[] combiningFactories = new AggregatorFactory[aggregatorFactories.length]; Arrays.setAll(combiningFactories, i -> aggregatorFactories[i].getCombiningFactory()); return combiningFactories; } else {
/** * Rewrite the aggregator and dimension specs since the push down nested query will return * results with dimension and aggregation specs of the original nested query. */ @VisibleForTesting GroupByQuery rewriteNestedQueryForPushDown(GroupByQuery query) { return query.withAggregatorSpecs(Lists.transform(query.getAggregatorSpecs(), (agg) -> agg.getCombiningFactory())) .withDimensionSpecs(Lists.transform( query.getDimensions(), (dim) -> new DefaultDimensionSpec( dim.getOutputName(), dim.getOutputName(), dim.getOutputType() ) )); }
@Before public void setup() { stringLastAggFactory = new StringLastAggregatorFactory("billy", "nilly", MAX_STRING_SIZE); combiningAggFactory = stringLastAggFactory.getCombiningFactory(); timeSelector = new TestLongColumnSelector(times); valueSelector = new TestObjectColumnSelector<>(strings); objectSelector = new TestObjectColumnSelector<>(pairs); colSelectorFactory = EasyMock.createMock(ColumnSelectorFactory.class); EasyMock.expect(colSelectorFactory.makeColumnValueSelector(ColumnHolder.TIME_COLUMN_NAME)).andReturn(timeSelector); EasyMock.expect(colSelectorFactory.makeColumnValueSelector("nilly")).andReturn(valueSelector); EasyMock.expect(colSelectorFactory.makeColumnValueSelector("billy")).andReturn(objectSelector); EasyMock.replay(colSelectorFactory); }
@Before public void setup() { stringLastAggFactory = new StringFirstAggregatorFactory("billy", "nilly", MAX_STRING_SIZE); combiningAggFactory = stringLastAggFactory.getCombiningFactory(); timeSelector = new TestLongColumnSelector(times); valueSelector = new TestObjectColumnSelector<>(strings); objectSelector = new TestObjectColumnSelector<>(pairs); colSelectorFactory = EasyMock.createMock(ColumnSelectorFactory.class); EasyMock.expect(colSelectorFactory.makeColumnValueSelector(ColumnHolder.TIME_COLUMN_NAME)).andReturn(timeSelector); EasyMock.expect(colSelectorFactory.makeColumnValueSelector("nilly")).andReturn(valueSelector); EasyMock.expect(colSelectorFactory.makeColumnValueSelector("billy")).andReturn(objectSelector); EasyMock.replay(colSelectorFactory); }
() -> GroupByRowProcessor.createGrouper( queryWithoutSubtotalsSpec.withAggregatorSpecs( Lists.transform(queryWithoutSubtotalsSpec.getAggregatorSpecs(), (agg) -> agg.getCombiningFactory()) ).withDimensionSpecs( Lists.transform(
AggregatorFactory[] combiningMetricAggs = new AggregatorFactory[metricAggs.length]; for (int i = 0; i < metricAggs.length; i++) { combiningMetricAggs[i] = metricAggs[i].getCombiningFactory();
@Override protected void setup(Context context) { config = HadoopDruidIndexerConfig.fromConfiguration(context.getConfiguration()); aggregators = config.getSchema().getDataSchema().getAggregators(); combiningAggs = new AggregatorFactory[aggregators.length]; for (int i = 0; i < aggregators.length; ++i) { metricNames.add(aggregators[i].getName()); combiningAggs[i] = aggregators[i].getCombiningFactory(); } typeHelperMap = InputRowSerde.getTypeHelperMap(config.getSchema() .getDataSchema() .getParser() .getParseSpec() .getDimensionsSpec()); }
@Override protected void setup(Context context) { config = HadoopDruidIndexerConfig.fromConfiguration(context.getConfiguration()); aggregators = config.getSchema().getDataSchema().getAggregators(); combiningAggs = new AggregatorFactory[aggregators.length]; for (int i = 0; i < aggregators.length; ++i) { combiningAggs[i] = aggregators[i].getCombiningFactory(); } typeHelperMap = InputRowSerde.getTypeHelperMap(config.getSchema() .getDataSchema() .getParser() .getParseSpec() .getDimensionsSpec()); }
combiningAggregatorFactories[i] = query.getAggregatorSpecs().get(i).getCombiningFactory();
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); aggregators = config.getSchema().getDataSchema().getAggregators(); if (DatasourcePathSpec.checkIfReindexingAndIsUseAggEnabled(config.getSchema().getIOConfig().getPathSpec())) { aggsForSerializingSegmentInputRow = aggregators; } else { // Note: this is required for "delta-ingestion" use case where we are reading rows stored in Druid as well // as late arriving data on HDFS etc. aggsForSerializingSegmentInputRow = new AggregatorFactory[aggregators.length]; for (int i = 0; i < aggregators.length; ++i) { aggsForSerializingSegmentInputRow[i] = aggregators[i].getCombiningFactory(); } } typeHelperMap = InputRowSerde.getTypeHelperMap(config.getSchema() .getDataSchema() .getParser() .getParseSpec() .getDimensionsSpec()); }
expectedAggregators.put(agg.getName(), agg.getCombiningFactory());
@Test public void makeAggregateCombiner() { AggregatorFactory aggregatorFactory = new ArrayOfDoublesSketchAggregatorFactory("", "", null, null, null); AggregatorFactory combiningFactory = aggregatorFactory.getCombiningFactory(); AggregateCombiner<ArrayOfDoublesSketch> combiner = combiningFactory.makeAggregateCombiner(); ArrayOfDoublesUpdatableSketch sketch1 = new ArrayOfDoublesUpdatableSketchBuilder().build(); sketch1.update("a", new double[] {1}); ArrayOfDoublesUpdatableSketch sketch2 = new ArrayOfDoublesUpdatableSketchBuilder().build(); sketch2.update("b", new double[] {1}); sketch2.update("c", new double[] {1}); TestObjectColumnSelector<ArrayOfDoublesSketch> selector = new TestObjectColumnSelector<ArrayOfDoublesSketch>(new ArrayOfDoublesSketch[] {sketch1, sketch2}); combiner.reset(selector); Assert.assertEquals(1, combiner.getObject().getEstimate(), 0); selector.increment(); combiner.fold(selector); Assert.assertEquals(3, combiner.getObject().getEstimate(), 0); }