@Override public String getId() { return query.getId(); }
private String extractEtagFromResults( final byte[] cachedResult ) { if (cachedResult == null) { return null; } log.debug("Fetching result level cache identifier for query: %s", query.getId()); int etagLength = ByteBuffer.wrap(cachedResult, 0, Integer.BYTES).getInt(); return StringUtils.fromUtf8(Arrays.copyOfRange(cachedResult, Integer.BYTES, etagLength + Integer.BYTES)); }
@Override public void exceptionCaught(final ClientResponse<InputStream> clientResponse, final Throwable e) { String msg = StringUtils.format( "Query[%s] url[%s] failed with exception msg [%s]", query.getId(), url, e.getMessage() ); setupResponseReadFailure(msg, e); }
@Override public void queryId(QueryType query) { setDimension(DruidMetrics.ID, StringUtils.nullToEmptyNonDruidDataString(query.getId())); }
/** * Initializes this object to execute a specific query. Does not actually execute the query. * * @param baseQuery the query */ @SuppressWarnings("unchecked") public void initialize(final Query baseQuery) { transition(State.NEW, State.INITIALIZED); String queryId = baseQuery.getId(); if (Strings.isNullOrEmpty(queryId)) { queryId = UUID.randomUUID().toString(); } this.baseQuery = baseQuery.withId(queryId); this.toolChest = warehouse.getToolChest(baseQuery); }
private void checkTotalBytesLimit(long bytes) { if (maxScatterGatherBytes < Long.MAX_VALUE && totalBytesGathered.addAndGet(bytes) > maxScatterGatherBytes) { String msg = StringUtils.format( "Query[%s] url[%s] max scatter-gather bytes limit reached.", query.getId(), url ); setupResponseReadFailure(msg, null); throw new RE(msg); } } };
private long checkQueryTimeout() { long timeLeft = timeoutAt - System.currentTimeMillis(); if (timeLeft <= 0) { String msg = StringUtils.format("Query[%s] url[%s] timed out.", query.getId(), url); setupResponseReadFailure(msg, null); throw new RE(msg); } else { return timeLeft; } }
private InputStream dequeue() throws InterruptedException { final InputStreamHolder holder = queue.poll(checkQueryTimeout(), TimeUnit.MILLISECONDS); if (holder == null) { throw new RE("Query[%s] url[%s] timed out.", query.getId(), url); } final long currentQueuedByteCount = queuedByteCount.addAndGet(-holder.getLength()); if (usingBackpressure && currentQueuedByteCount < maxQueuedBytes) { long backPressureTime = Preconditions.checkNotNull(trafficCopRef.get(), "No TrafficCop, how can this be?") .resume(holder.getChunkNum()); channelSuspendedTime.addAndGet(backPressureTime); } return holder.getStream(); }
private ResultLevelCachePopulator createResultLevelCachePopulator( String cacheKeyStr, String resultSetId ) { if (resultSetId != null && populateResultCache) { ResultLevelCachePopulator resultLevelCachePopulator = new ResultLevelCachePopulator( cache, objectMapper, ResultLevelCacheUtil.computeResultLevelCacheKey(cacheKeyStr), cacheConfig, true ); try { // Save the resultSetId and its length resultLevelCachePopulator.cacheObjectStream.write(ByteBuffer.allocate(Integer.BYTES) .putInt(resultSetId.length()) .array()); resultLevelCachePopulator.cacheObjectStream.write(StringUtils.toUtf8(resultSetId)); } catch (IOException ioe) { log.error(ioe, "Failed to write cached values for query %s", query.getId()); return null; } return resultLevelCachePopulator; } else { return null; } }
log.debug( "Completed queryId[%s] request to url[%s] with %,d bytes returned in %,d millis [%,f b/s].", query.getId(), url, totalByteCount.get(),
try { final Query query = requestLogLine.getQuery(); MDC.put("queryId", query.getId()); MDC.put("sqlQueryId", StringUtils.nullToEmptyNonDruidDataString(query.getSqlQueryId())); MDC.put("dataSource", findInnerDatasource(query).toString());
@Override public void registerQuery(Query query, final ListenableFuture future) { final String id = query.getId(); final List<String> datasources = query.getDataSource().getNames(); queries.put(id, future); queryDatasources.putAll(id, datasources); future.addListener( new Runnable() { @Override public void run() { queries.remove(id, future); for (String datasource : datasources) { queryDatasources.remove(id, datasource); } } }, Execs.directExecutor() ); }
private Sequence<T> deserializeResults(final byte[] cachedResult, CacheStrategy strategy, String resultSetId) { if (cachedResult == null) { log.error("Cached result set is null"); } final Function<Object, T> pullFromCacheFunction = strategy.pullFromCache(true); final TypeReference<Object> cacheObjectClazz = strategy.getCacheObjectClazz(); //Skip the resultsetID and its length bytes Sequence<T> cachedSequence = Sequences.simple(() -> { try { int resultOffset = Integer.BYTES + resultSetId.length(); return objectMapper.readValues( objectMapper.getFactory().createParser( cachedResult, resultOffset, cachedResult.length - resultOffset ), cacheObjectClazz ); } catch (IOException e) { throw new RE(e, "Failed to retrieve results from cache for query ID [%s]", query.getId()); } }); return Sequences.map(cachedSequence, pullFromCacheFunction); }
if (inputQuery != null) { targetServer = hostFinder.pickServer(inputQuery); if (inputQuery.getId() == null) { inputQuery = inputQuery.withId(UUID.randomUUID().toString());
queryLifecycle.initialize(readQuery(req, in, context)); query = queryLifecycle.getQuery(); final String queryId = query.getId();
checkTotalBytesLimit(response.getContent().readableBytes()); log.debug("Initial response from url[%s] for queryId[%s]", url, query.getId()); responseStartTimeNs = System.nanoTime(); acquireResponseMetrics().reportNodeTimeToFirstByte(responseStartTimeNs - requestStartTimeNs).emit(emitter);
log.debug("Return cached result set as there is no change in identifiers for query %s ", query.getId()); return deserializeResults(cachedResultSet, strategy, existingResultSetId); } else {