public String getLogMessage() { return PAGINATION_PAGE_INVALID.logFormat(page, rowsPerPage, lastPage); }
/** * Log an error message and throw an exception for an unsupported query type. * * @param queryType The query type that is not supported */ private void unsupportedQueryType(QueryType queryType) { String msg = RESULT_SET_ERROR.logFormat(queryType); LOG.error(msg); throw new UnsupportedOperationException(msg); }
/** * Confirm count size is non negative. * * @param countRequest The value of the count from the request (if any) * @param count The bound value for the count */ protected void validateCount(String countRequest, int count) { // This is the validation part for count that is inlined here because currently it is very brief. if (count < 0) { LOG.debug(INTEGER_INVALID.logFormat(countRequest, "count")); throw new BadApiRequestException(INTEGER_INVALID.logFormat(countRequest, "count")); } } /**
/** * Parses the requested input String by converting it to an integer, while treating null as zero. * * @param value The requested integer value as String. * @param parameterName The parameter name that corresponds to the requested integer value. * * @return The integer corresponding to {@code value} or zero if {@code value} is null. * @throws BadApiRequestException if the input String can not be parsed as an integer. */ protected int generateInteger(String value, String parameterName) throws BadApiRequestException { try { return value == null ? 0 : Integer.parseInt(value); } catch (NumberFormatException nfe) { LOG.debug(INTEGER_INVALID.logFormat(value, parameterName), nfe); throw new BadApiRequestException(INTEGER_INVALID.logFormat(value, parameterName), nfe); } }
/** * Confirm the top N bucket size (if any) is valid. * * @param topNRequest The value of the count from the request (if any) * @param sorts collection of sorted columns * @param topN The bound value for the count */ protected void validateTopN(String topNRequest, int topN, LinkedHashSet<OrderByColumn> sorts) { // This is the validation part for topN that is inlined here because currently it is very brief. if (topN < 0) { LOG.debug(INTEGER_INVALID.logFormat(topNRequest, "topN")); throw new BadApiRequestException(INTEGER_INVALID.logFormat(topNRequest, "topN")); } else if (topN > 0 && this.sorts.isEmpty()) { LOG.debug(TOP_N_UNSORTED.logFormat(topNRequest)); throw new BadApiRequestException(TOP_N_UNSORTED.format(topNRequest)); } }
@Override public void serialize(Dimension value, JsonGenerator gen, SerializerProvider provider) throws IOException { gen.writeString( SerializerUtil.findPhysicalName(value, gen).orElseThrow(() -> { LOG.error(ErrorMessageFormat.PHYSICAL_NAME_NOT_FOUND.logFormat(value.getApiName())); return new IllegalStateException(ErrorMessageFormat.PHYSICAL_NAME_NOT_FOUND.format()); } ) ); } }
@Override public NoMatchFoundException noneFoundException() { LOG.error(MESSAGE_FORMAT.logFormat(logicalTableName, requestIntervals)); return new NoMatchFoundException(MESSAGE_FORMAT.format(logicalTableName, requestIntervals)); } }
@Override public void validateDuplicateMetrics(ArrayNode metricsJsonArray) { Set<String> metricsList = new HashSet<>(); List<String> duplicateMetrics = new ArrayList<>(); for (int i = 0; i < metricsJsonArray.size(); i++) { String metricName = metricsJsonArray.get(i).get("name").asText(); boolean status = metricsList.add(metricName); if (!status) { duplicateMetrics.add(metricName); } } if (!duplicateMetrics.isEmpty()) { LOG.debug(DUPLICATE_METRICS_IN_API_REQUEST.logFormat(duplicateMetrics.toString())); throw new BadApiRequestException(DUPLICATE_METRICS_IN_API_REQUEST.format(duplicateMetrics.toString())); } }
/** * SketchEstimate converts the sketch into a number. Hence this method always should have one aggregator * * @param fields List of post aggregation fields * * @return New ThetaSketchEstimatePostAggregation with provided field and only one aggregator. */ @JsonIgnore @Override public ThetaSketchEstimatePostAggregation withFields(List<PostAggregation> fields) { if (fields.size() != 1) { LOG.error(INVALID_NUMBER_OF_FIELDS.logFormat(fields)); throw new IllegalArgumentException(INVALID_NUMBER_OF_FIELDS.format(fields)); } return withField(fields.get(0)); } }
@Override public NoMatchFoundException noneFoundException() { Set<String> aggDimensions = requestConstraint.getRequestDimensions().stream() .filter(Dimension::isAggregatable) .map(Dimension::getApiName) .collect(Collectors.toSet()); Set<String> nonAggDimensions = requestConstraint.getRequestDimensions().stream() .filter(StreamUtils.not(Dimension::isAggregatable)) .map(Dimension::getApiName) .collect(Collectors.toSet()); LOG.error(MESSAGE_FORMAT.logFormat(nonAggDimensions, aggDimensions)); return new NoMatchFoundException(MESSAGE_FORMAT.format(nonAggDimensions, aggDimensions)); } }
/** * Verifies that the passed parameter is greater than 0. * * @param parameter The parameter to be validated. * @param parameterName The name of the parameter to appear in the error message * * @throws BadPaginationException if 'parameter' is not greater than 0. */ private static void validate(int parameter, String parameterName) throws BadPaginationException { if (parameter < MINIMAL_VALUE) { ErrorMessageFormat errorMessage = ErrorMessageFormat.PAGINATION_PARAMETER_INVALID; LOG.debug(errorMessage.logFormat(parameterName, parameter)); throw new BadPaginationException(errorMessage.format(parameterName, parameter)); } }
@Override public void invoke(Throwable error) { LOG.error(ErrorMessageFormat.FAILED_TO_SEND_QUERY_TO_DRUID.logFormat(druidQuery), error); responseEmitter.onError(new ResponseException( Status.INTERNAL_SERVER_ERROR, druidQuery, error, objectMappers.getMapper().writer() )); } };
/** * Constructor. * * @param physicalTable The physical table of the data source. It must have only 1 backing data source. */ public TableDataSource(ConstrainedTable physicalTable) { super(DefaultDataSourceType.TABLE, physicalTable); if (physicalTable.getDataSourceNames().size() > 1) { LOG.error(TOO_MANY_BACKING_DATA_SOURCES.logFormat(getPhysicalTable())); throw new IllegalArgumentException(TOO_MANY_BACKING_DATA_SOURCES.format(getPhysicalTable())); } }
public String getName() { return getPhysicalTable().getDataSourceNames().stream().findFirst() .orElseThrow(() -> { LOG.error(TOO_FEW_BACKING_DATA_SOURCES.logFormat(getPhysicalTable())); return new IllegalArgumentException(TOO_FEW_BACKING_DATA_SOURCES.format(getPhysicalTable())); }).asName(); }
/** * Extracts the operation to be performed by the ApiJobStore filter query. * * @param tokenizedQuery The tokenized filter expression. * * @return The operation to be performed by the ApiJobStore filter query. * @throws BadFilterException if the operation does not exist */ private FilterOperation extractOperation(Matcher tokenizedQuery) throws BadFilterException { String operationName = tokenizedQuery.group(2); try { return DefaultFilterOperation.fromString(operationName); } catch (IllegalArgumentException ignored) { LOG.debug(FILTER_OPERATOR_INVALID.logFormat(operationName)); throw new BadFilterException(FILTER_OPERATOR_INVALID.format(operationName)); } }
/** * Extracts the operation to be performed by the having query. * * @param query The parsed having query * * @return The operation to be performed by the having query. * @throws BadHavingException if the operation name in the query is malformed. */ private HavingOperation extractOperation(Matcher query) throws BadHavingException { String operationName = query.group(2); try { return HavingOperation.fromString(operationName); } catch (IllegalArgumentException ignored) { LOG.debug(HAVING_OPERATOR_INVALID.logFormat(operationName)); throw new BadHavingException(HAVING_OPERATOR_INVALID.format(operationName)); } }
@Override public void invoke(int statusCode, String reason, String responseBody) { LOG.error(ErrorMessageFormat.ERROR_FROM_DRUID.logFormat(responseBody, statusCode, reason, druidQuery)); responseEmitter.onError(new ResponseException( statusCode, reason, responseBody, druidQuery, null, getObjectMappers().getMapper().writer() )); } };
/** * Extracts the JobField to be examined from the tokenizedQuery. * * @param tokenizedQuery The tokenized filter expression. * * @return The JobField to be examined * @throws BadFilterException is the JobField does not exist */ private JobField extractJobField(Matcher tokenizedQuery) throws BadFilterException { String fieldName = tokenizedQuery.group(1); return Arrays.stream(DefaultJobField.values()) .filter(field -> field.getName().equals(fieldName)) .findFirst() .orElseThrow(() -> { LOG.debug(FILTER_JOBFIELD_UNDEFINED.logFormat(fieldName, DefaultJobField.values())); return new BadFilterException(FILTER_JOBFIELD_UNDEFINED.format( fieldName, DefaultJobField.values() )); }); }
@Override public void serialize(Dimension value, JsonGenerator gen, SerializerProvider provider) throws IOException { String apiName = value.getApiName(); String physicalName = SerializerUtil.findPhysicalName(value, gen).orElseThrow(() -> { LOG.error(ErrorMessageFormat.PHYSICAL_NAME_NOT_FOUND.logFormat(value.getApiName())); return new IllegalStateException(ErrorMessageFormat.PHYSICAL_NAME_NOT_FOUND.format()); } ); // serialize to only apiName if api and physical name is same or there are nested queries if (physicalName.equals(apiName) || SerializerUtil.hasInnerQuery(gen)) { gen.writeString(apiName); } else { gen.writeObject(new DefaultDimensionSpec(physicalName, apiName, value)); } } }
@Override public NoMatchFoundException noneFoundException() { String logicalTableName = requestConstraint.getLogicalTable().getName(); Set<String> logicalMetrics = requestConstraint.getLogicalMetricNames(); Set<String> dimensions = requestConstraint.getAllDimensionNames(); String grainName = requestConstraint.getMinimumGranularity().getName(); LOG.error(MESSAGE_FORMAT.logFormat(logicalTableName, dimensions, logicalMetrics, grainName)); return new NoMatchFoundException( MESSAGE_FORMAT.format(logicalTableName, dimensions, logicalMetrics, grainName)); } }