/** * Create a new <tt>DataSourceConstraint</tt> instance with a new subset of metric names. * <p> * The new set of metric names will be an intersection between old metric names and * a user provided set of metric names * * @param metricNames The set of metric names that are to be intersected with metric names in * <tt>this DataSourceConstraint</tt> * * @return the new <tt>DataSourceConstraint</tt> instance with a new subset of metric names */ public DataSourceConstraint withMetricIntersection(Set<String> metricNames) { return new DataSourceConstraint( requestDimensions, filterDimensions, metricDimensions, metricNames.stream() .filter(this.metricNames::contains) .collect(Collectors.toSet()), allDimensions, allDimensionNames, allColumnNames, apiFilters ); }
@Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj instanceof QueryPlanningConstraint) { QueryPlanningConstraint that = (QueryPlanningConstraint) obj; return super.equals(that) && Objects.equals(this.logicalTable, that.logicalTable) && Objects.equals(this.intervals, that.intervals) && Objects.equals(this.logicalMetrics, that.logicalMetrics) && Objects.equals(this.minimumGranularity, that.minimumGranularity) && Objects.equals(this.requestGranularity, that.requestGranularity) && Objects.equals(this.logicalMetricNames, that.logicalMetricNames); } return false; }
/** * Constructor. * * @param requestDimensions Dimensions contained in request * @param filterDimensions Filtered dimensions * @param metricDimensions Metric related dimensions * @param metricNames Names of metrics * @param apiFilters Map of dimension to its set of API filters */ protected DataSourceConstraint( @NotNull Set<Dimension> requestDimensions, @NotNull Set<Dimension> filterDimensions, @NotNull Set<Dimension> metricDimensions, @NotNull Set<String> metricNames, @NotNull ApiFilters apiFilters ) { this.requestDimensions = Collections.unmodifiableSet(requestDimensions); this.filterDimensions = Collections.unmodifiableSet(filterDimensions); this.metricDimensions = Collections.unmodifiableSet(metricDimensions); this.metricNames = Collections.unmodifiableSet(metricNames); this.allDimensions = generateAllDimensions(); this.allDimensionNames = generateAllDimensionNames(); this.allColumnNames = generateAllColumnNames(); this.apiFilters = apiFilters; }
/** * Copy Constructor. * * @param dataSourceConstraint The data source constraint to copy from */ protected DataSourceConstraint(DataSourceConstraint dataSourceConstraint) { this.requestDimensions = dataSourceConstraint.getRequestDimensions(); this.filterDimensions = dataSourceConstraint.getFilterDimensions(); this.metricDimensions = dataSourceConstraint.getMetricDimensions(); this.metricNames = dataSourceConstraint.getMetricNames(); this.apiFilters = dataSourceConstraint.getApiFilters(); this.allDimensions = dataSourceConstraint.getAllDimensions(); this.allDimensionNames = dataSourceConstraint.getAllDimensionNames(); this.allColumnNames = dataSourceConstraint.getAllColumnNames(); }
/** * DruidDimensionRowProvider fetches data from Druid and adds it to the dimension cache. * The dimensions to be loaded can be passed in as a parameter. * * @param physicalTableDictionary The physical tables * @param dimensionDictionary The dimension dictionary to load dimensions from. * @param dimensionsToLoad The dimensions to use. * @param druidWebService The druid webservice to query. */ public DruidDimensionValueLoader( PhysicalTableDictionary physicalTableDictionary, DimensionDictionary dimensionDictionary, List<String> dimensionsToLoad, DruidWebService druidWebService ) { this.dimensions = dimensionsToLoad.stream() .map(dimensionDictionary::findByApiName) .collect(Collectors.toCollection(LinkedHashSet::new)); this.dataSources = physicalTableDictionary.values().stream() .map(table -> table.withConstraint(DataSourceConstraint.unconstrained(table))) .map(TableDataSource::new) .collect(Collectors.toCollection(LinkedHashSet::new)); this.druidWebService = druidWebService; }
/** * Constructor. * * @param dataSourceConstraint Data source constraint containing all the column names as logical names * @param physicalTableSchema A map from logical column name to physical column names */ public PhysicalDataSourceConstraint( @NotNull DataSourceConstraint dataSourceConstraint, @NotNull PhysicalTableSchema physicalTableSchema ) { super(dataSourceConstraint); this.allColumnPhysicalNames = dataSourceConstraint.getAllColumnNames().stream() .map(physicalTableSchema::getPhysicalColumnName) .collect(Collectors.collectingAndThen(Collectors.toSet(), Collections::unmodifiableSet)); }
@Override public Boolean apply(DataSourceConstraint constraint) { Map<Dimension, Set<ApiFilter>> constraintMap = constraint.getApiFilters(); return dimensionKeySelectFilters.keySet() .stream() .allMatch(dimension -> emptyConstraintOrAnyRows(dimension, constraintMap)); }
/** * SqlDimensionValueLoader fetches data from Sql and adds it to the dimension cache. * The dimensions to be loaded can be passed in as a parameter. * * @param physicalTableDictionary The physical tables * @param dimensionDictionary The dimension dictionary to load dimensions from. * @param dimensionsToLoad The dimensions to be loaded. * @param sqlBackedClient The sql backed client. */ public SqlDimensionValueLoader( PhysicalTableDictionary physicalTableDictionary, DimensionDictionary dimensionDictionary, List<String> dimensionsToLoad, SqlBackedClient sqlBackedClient ) { this.dimensions = dimensionsToLoad.stream() .map(dimensionDictionary::findByApiName) .collect(Collectors.toCollection(LinkedHashSet::new)); this.dataSources = physicalTableDictionary.values().stream() .map(table -> table.withConstraint(DataSourceConstraint.unconstrained(table))) .map(TableDataSource::new) .collect(Collectors.toCollection(LinkedHashSet::new)); this.sqlBackedClient = sqlBackedClient; }
/** * Ensure that the schema of the constraint is consistent with what the table supports. * * @param constraint The constraint being tested * * @throws IllegalArgumentException If there are columns referenced by the constraint unavailable in the table */ private void validateConstraintSchema(DataSourceConstraint constraint) throws IllegalArgumentException { Set<String> tableColumnNames = getSchema().getColumnNames(); // Validate that the requested columns are answerable by the current table if (!constraint.getAllColumnNames().stream().allMatch(tableColumnNames::contains)) { String message = String.format( "Received invalid request requesting for columns: %s that is not available in this table: %s", constraint.getAllColumnNames().stream() .filter(name -> !tableColumnNames.contains(name)) .collect(Collectors.joining(",")), getName() ); LOG.error(message); throw new IllegalArgumentException(message); } }
/** * Constructor. * * @param dataApiRequest Api request containing the constraints information. * @param templateDruidQuery Query containing metric constraint information. */ public DataSourceConstraint(DataApiRequest dataApiRequest, DruidAggregationQuery<?> templateDruidQuery) { this.requestDimensions = Collections.unmodifiableSet(dataApiRequest.getDimensions()); this.filterDimensions = Collections.unmodifiableSet(dataApiRequest.getApiFilters().keySet()); this.metricDimensions = Collections.unmodifiableSet(templateDruidQuery.getMetricDimensions()); this.metricNames = Collections.unmodifiableSet(templateDruidQuery.getDependentFieldNames()); this.apiFilters = new ApiFilters(dataApiRequest.getApiFilters()); this.allDimensions = generateAllDimensions(); this.allDimensionNames = generateAllDimensionNames(); this.allColumnNames = generateAllColumnNames(); }
/** * Return a view of the available intervals for the original source table given a constraint. * * @param constraint The constraint which limits available intervals * * @return The intervals that the table can report on */ @Override public SimplifiedIntervalList getAvailableIntervals(DataSourceConstraint constraint) { if (getConstraint().equals(constraint)) { return getAvailableIntervals(); } return getSourceTable().getAvailableIntervals(constraint); }
/** * Return the {@link TableName} of the dataSources which back the original source table given a constraint. * * @param constraint A constraint which may narrow the data sources participating. * * @return A set of tablenames for backing dataSources */ @Override public Set<DataSourceName> getDataSourceNames(DataSourceConstraint constraint) { if (getConstraint().equals(constraint)) { return getDataSourceNames(); } return getSourceTable().getDataSourceNames(constraint); }