/** * Create a new result set with an additional final column. * * @param column the column being added * * @return the result set being constructed */ public ResultSetSchema withAddColumn(Column column) { LinkedHashSet<Column> columns = new LinkedHashSet<>(this.getColumns()); columns.add(column); return new ResultSetSchema(this.getGranularity(), columns); } }
@Override protected Result map(Result result, ResultSetSchema schema) { if (columnName == null) { throw new IllegalStateException("Cannot map results without a column name"); } MetricColumn metricColumn = schema.getColumn(columnName, MetricColumn.class).orElseThrow( () -> new IllegalStateException("Unexpected missing column: " + columnName) ); BigDecimal value = result.getMetricValueAsNumber(metricColumn); if (value == null) { return result; } BigDecimal newValue = value.setScale(0, RoundingMode.CEILING); return result.withMetricValue(metricColumn, newValue); }
@Override protected ResultSetSchema map(ResultSetSchema schema) { return schema.withAddColumn(new MetricColumn(ROW_NUM_COLUMN_NAME)); }
/** * Extract schema components from ResultSet schema. * * @param schema Schema object from the ResultSet * * @return Schema components. */ private Map<String, Object> getSchemaComponents(ResultSetSchema schema) { Map<String, Object> schemaComponents = new HashMap<>(); schemaComponents.put(SCHEMA_TIMEZONE, DateTimeUtils.getTimeZone(schema.getGranularity()).getID()); schemaComponents.put(SCHEMA_GRANULARITY, schema.getGranularity().getName()); schemaComponents.put( SCHEMA_DIM_COLUMNS, schema.getColumns(DimensionColumn.class) .stream() .map(Column::getName) .collect(Collectors.toCollection(LinkedHashSet::new)) ); schemaComponents.put( SCHEMA_METRIC_COLUMNS, getMetricColumnNames(schema) ); return schemaComponents; }
/** * Build a result set using the api request time grain. * * @param json The json representing the druid response. * @param druidQuery The druid query being processed * @param dateTimeZone The date time zone for parsing result rows * * @return The initial result set from the json node. */ public ResultSet buildResultSet(JsonNode json, DruidAggregationQuery<?> druidQuery, DateTimeZone dateTimeZone) { LinkedHashSet<Column> columns = druidResponseParser.buildSchemaColumns(druidQuery) .collect(Collectors.toCollection(LinkedHashSet::new)); ResultSetSchema resultSetSchema = new ResultSetSchema(granularity, columns); return druidResponseParser.parse(json, resultSetSchema, druidQuery.getQueryType(), dateTimeZone); } }
/** * Creates new Result object from JsonNode. * * @param serializedResult JsonNode which contains all the serialized details to generate Result object * @param resultSetSchema Schema of the result to generate the Result object * * @return Result object generated from given JsonNode */ private Result getResult(JsonNode serializedResult, ResultSetSchema resultSetSchema) { return new Result( extractDimensionValues( serializedResult.get(DIMENSION_VALUES_KEY), resultSetSchema.getColumns(DimensionColumn.class) ), extractMetricValues( serializedResult.get(METRIC_VALUES_KEY), resultSetSchema.getColumns(MetricColumnWithValueType.class) ), DateTime.parse(serializedResult.get(TIMESTAMP_KEY).asText()) ); }
/** * Remove result records which are missing and not marked as volatile. * Any bucket which is partially volatile is not removed. In the case of the All granularity, all data is * considered to be in a single bucket. * * @param result The result row being transformed * @param schema The schema for that result * @return Null if the bucket this result falls in is missing but not volatile */ @Override public Result map(Result result, ResultSetSchema schema) { Granularity grain = schema.getGranularity(); if (grain.equals(AllGranularity.INSTANCE)) { return ! volatileIntervalSupply.get().isEmpty() || missingIntervals.isEmpty() ? result : null; } // Currently any Granularity which isn't 'ALL' must currently be a TimeGrain Interval resultInterval = new Interval(result.getTimeStamp(), ((TimeGrain) grain).getPeriod()); return getMissingNotVolatile().stream().anyMatch((it) -> it.overlaps(resultInterval)) ? null : result; }
/** * Generates ZonedSchema object from given JsonNode. * * @param schemaNode JsonNode which contains all the columns, timezone and granularity * * @return ResultSetSchema object generated from the JsonNode */ private ResultSetSchema getResultSetSchema(JsonNode schemaNode) { DateTimeZone timezone = generateTimezone( schemaNode.get(SCHEMA_TIMEZONE).asText(), DateTimeZone.forID( SYSTEM_CONFIG.getStringProperty(SYSTEM_CONFIG.getPackageVariableName("timezone"), "UTC") ) ); //Recreate ResultSetSchema LinkedHashSet<Column> columns = Stream.concat( Streams.stream(schemaNode.get(SCHEMA_DIM_COLUMNS)) .map(JsonNode::asText) .map(this::resolveDimensionName) .map(DimensionColumn::new), Streams.stream(() -> schemaNode.get(SCHEMA_METRIC_COLUMNS_TYPE).fields()) .map(entry -> new MetricColumnWithValueType(entry.getKey(), entry.getValue().asText())) ).collect(Collectors.toCollection(LinkedHashSet::new)); return new ResultSetSchema(generateGranularity(schemaNode.get(SCHEMA_GRANULARITY).asText(), timezone), columns); }
/** * Builds a set of only those metric columns which correspond to the metrics requested in the API. * * @param apiMetricColumnNames Set of Metric names extracted from the requested api metrics * * @return set of metric columns */ protected LinkedHashSet<MetricColumn> generateApiMetricColumns(Set<String> apiMetricColumnNames) { // Get the metric columns from the schema Map<String, MetricColumn> metricColumnMap = resultSet.getSchema().getColumns(MetricColumn.class).stream() .collect(StreamUtils.toLinkedDictionary(MetricColumn::getName)); // Select only api metrics from resultSet return apiMetricColumnNames.stream() .map(metricColumnMap::get) .collect(Collectors.toCollection(LinkedHashSet::new)); }
@Override public ResultSet map(ResultSet resultSet) { ResultSetSchema schema = map(resultSet.getSchema()); MetricColumn column = schema.getColumn(ROW_NUM_COLUMN_NAME, MetricColumn.class).get(); int resultSetSize = resultSet.size(); List<Result> newResults = new ArrayList<>(resultSetSize); for (int i = 0; i < resultSetSize; i++) { newResults.add(rowNumMap(resultSet.get(i), column, i)); } ResultSet newResultSet = new ResultSet(schema, newResults); LOG.trace("Mapped resultSet: {} to new resultSet {}", resultSet, newResultSet); return newResultSet; }
new ResultSetSchema(AllGranularity.INSTANCE, Collections.emptySet()), Collections.emptyList() ), responseContext
responseData.getResultSet().getSchema().getColumns(DimensionColumn.class)) { if (request instanceof DataApiRequest) { DataApiRequest dataApiRequest = (DataApiRequest) request;
@Override protected Result map(Result result, ResultSetSchema schema) { MetricColumn column = schema.getColumn(A_JSON_NODE_METRIC.asName(), MetricColumn.class).orElseThrow( () -> new IllegalStateException(UNEXPECTED_MISSING_COLUMN + A_JSON_NODE_METRIC.asName()) ); ObjectNode node = (ObjectNode) result.getMetricValueAsJsonNode(column); node.put("length", node.get("clarification").textValue().length()); return result; }
metricValues.put(pageViewColumn, new BigDecimal(111)); ResultSetSchema schema = new ResultSetSchema(granularity, Collections.singleton(pageViewColumn)); Result result = new Result(new HashMap<>(), metricValues, DateTime.parse("2016-01-12T00:00:00.000Z")); List<Result> results = new ArrayList<>();
Set<DimensionColumn> dimensionColumns = schema.getColumns(DimensionColumn.class); Set<MetricColumn> metricColumns = schema.getColumns(MetricColumn.class);
@Override protected Result map(Result result, ResultSetSchema schema) { MetricColumn column = schema.getColumn(A_JSON_NODE_METRIC.asName(), MetricColumn.class).orElseThrow( () -> new IllegalStateException(UNEXPECTED_MISSING_COLUMN + A_JSON_NODE_METRIC.asName()) ); ObjectNode node = (ObjectNode) result.getMetricValueAsJsonNode(column); node.put("length", node.get("clarification").textValue().length()); return result; }
metricValues.put(pageViewColumn, new BigDecimal(111)); ResultSetSchema schema = new ResultSetSchema(granularity, Collections.singleton(pageViewColumn)); Result result = new Result(new HashMap<>(), metricValues, DateTime.parse("2016-01-12T00:00:00.000Z")); List<Result> results = new ArrayList<>();
@Override protected Result map(Result result, ResultSetSchema schema) { MetricColumn column = schema.getColumn(A_NULL_METRIC.asName(), MetricColumn.class).orElseThrow( () -> new IllegalStateException(UNEXPECTED_MISSING_COLUMN + A_NULL_METRIC.asName()) ); Object nullMetric = result.getMetricValue(column); if (nullMetric != null) { throw new IllegalStateException( String.format("Metric 'nullMetric' should be null but is: %s", nullMetric) ); } return result; }
@Override protected Result map(Result result, ResultSetSchema schema) { MetricColumn column = schema.getColumn(A_NULL_METRIC.asName(), MetricColumn.class).orElseThrow( () -> new IllegalStateException(UNEXPECTED_MISSING_COLUMN + A_NULL_METRIC.asName()) ); Object nullMetric = result.getMetricValue(column); if (nullMetric != null) { throw new IllegalStateException( String.format("Metric 'nullMetric' should be null but is: %s", nullMetric) ); } return result; }
@Override protected Result map(Result result, ResultSetSchema schema) { MetricColumn column = schema.getColumn(A_BOOLEAN_METRIC.asName(), MetricColumn.class).orElseThrow( () -> new IllegalStateException(UNEXPECTED_MISSING_COLUMN + A_BOOLEAN_METRIC.asName()) ); return result.getMetricValueAsBoolean(column) ? result : null; }