/** * Take a list of Jackson ArrayNodes and merge their contents, preserving order. * * @param responses A list of pairs that encompass JSON nodes and response metadata * * @return A new pair holding the merged json and the aggregate request log context */ private Pair<JsonNode, LoggingContext> mergeResponses(List<Pair<JsonNode, LoggingContext>> responses) { JsonNodeFactory factory = new JsonNodeFactory(true); ArrayNode result = factory.arrayNode(); RequestLog.restore(logCtx); for (Pair<JsonNode, LoggingContext> entry : responses) { for (JsonNode jsonNode : entry.getKey()) { result.add(jsonNode); } RequestLog.accumulate(entry.getValue().getRequestLog()); } RequestLog updatedCtx = RequestLog.dump(); return new Pair<>(result, new LoggingContext(updatedCtx)); } }
@Override public void invoke(JsonNode rootNode) { response.processResponse(rootNode, druidQuery, new LoggingContext(RequestLog.copy())); } };
@Override public void processResponse(JsonNode json, DruidAggregationQuery<?> druidQuery, LoggingContext metadata) { if (failed.get()) { return; } AtomicInteger sharedIndex; Interval interval = druidQuery.getIntervals().get(0); if (interval == null || (sharedIndex = expectedIntervals.get(interval)) == null) { fail(UNEXPECTED_INTERVAL_FORMAT, druidQuery, interval); return; } int index; if ((index = sharedIndex.getAndSet(-1)) < 0 || completedIntervals.get(index) != null) { fail(EXTRA_RETURN_FORMAT, druidQuery, interval); return; } completedIntervals.set(index, new Pair<>(json, metadata)); if (completed.decrementAndGet() == 0) { Pair<JsonNode, LoggingContext> mergedResponse = mergeResponses(completedIntervals); RequestLog.restore(mergedResponse.getValue().getRequestLog()); next.processResponse(mergedResponse.getKey(), queryBeforeSplit, mergedResponse.getValue()); } }
mapper.readTree(cacheEntry.getValue()), druidQuery, new LoggingContext(logCtx) );
@Override public void processResponse(JsonNode json, DruidAggregationQuery<?> druidQuery, LoggingContext metadata) { try { RequestLog.restore(metadata.getRequestLog()); ResultSet resultSet = buildResultSet(json, druidQuery, apiRequest.getTimeZone()); resultSet = mapResultSet(resultSet);
mapper.readTree(jsonResult), druidQuery, new LoggingContext(logCtx) ); return true;
LoggingContext copy = new LoggingContext(RequestLog.copy()); SuccessCallback success = rootNode -> { response.processResponse(