/** * Constructor. * * @param filter The filter that needs to be analyzed */ public DruidFilterInfo(Filter filter) { numEachFilterType = buildFilterCount(filter); }
/** * Increments the number of cache-hit queries. */ public static void incrementCountCacheHits() { getBardQueryInfo().factCacheHitCount.incrementAndGet(); }
/** * Exports current thread's request log object as a formatted string without resetting it. * * @return log object as a formatted string */ public static String export() { RequestLog current = RLOG.get(); record(new Durations(current.aggregateDurations())); record(new Threads(current.threadIds)); return LogFormatterProvider.getInstance().format(current.info); }
BardQueryInfo.getBardQueryInfo().incrementCountCacheHits(); RequestLog logCtx = RequestLog.dump(); nextResponse.processResponse(
REGISTRY.meter("request.logical.table." + table.getName() + "." + table.getGranularity()).mark(); RequestLog.record(new BardQueryInfo(druidQuery.getQueryType().toJson())); RequestLog.record( new DataRequest( table, request.getIntervals(),
BardQueryInfo.getBardQueryInfo().incrementCountWeightCheck(); final WeightCheckResponseProcessor weightCheckResponse = new WeightCheckResponseProcessor(response); final DruidAggregationQuery<?> weightEvaluationQuery = queryWeightUtil.makeWeightEvaluationQuery(druidQuery);
for (Set<ApiFilter> filterSet : filterSuperSet) { for (ApiFilter apiFilter : filterSet) { Filter filter = new Filter(apiFilter); this.filters.add(filter); this.combinedDimensions.add(filter.dimension);
@Override public boolean handleRequest( final RequestContext context, final DataApiRequest request, final DruidAggregationQuery<?> druidQuery, final ResponseProcessor response ) { SuccessCallback success = new SuccessCallback() { @Override public void invoke(JsonNode rootNode) { response.processResponse(rootNode, druidQuery, new LoggingContext(RequestLog.copy())); } }; HttpErrorCallback error = response.getErrorCallback(druidQuery); FailureCallback failure = response.getFailureCallback(druidQuery); BardQueryInfo.incrementCountFactHits(); DruidWebService webService = druidWebServiceSelector.select(context, request, druidQuery); webService.postDruidQuery(context, success, error, failure, druidQuery); return true; } }
/** * Intercept the Container request to add length of request and a start timestamp. * * @param request Request to intercept * * @throws IOException if there's a problem processing the request */ @Override public void filter(ContainerRequestContext request) throws IOException { appendRequestId(request.getHeaders().getFirst(X_REQUEST_ID_HEADER)); RequestLog.startTiming(TOTAL_TIMER); try (TimedPhase timer = RequestLog.startTiming(this)) { RequestLog.record(new Preface(request)); // sets PROPERTY_REQ_LEN if content-length not defined lengthOfRequestEntity(request); // store start time to later calculate elapsed time request.setProperty(PROPERTY_NANOS, System.nanoTime()); } }
RequestLog.startTiming(RESPONSE_WORKFLOW_TIMER); RequestLog.record(new DruidResponse(druidQueryId));
try { RequestLog.startTiming(this); RequestLog.record(new FeatureFlagRequest(flagName));
try { RequestLog.startTiming(this); RequestLog.record(new TableRequest("all", "all"));
try { RequestLog.startTiming(this); RequestLog.record(new JobRequest(ticket)); apiRequest = new JobsApiRequestImpl( DefaultResponseFormatType.JSON.toString(),
try { RequestLog.startTiming(this); RequestLog.record(new SliceRequest(sliceName));
try { RequestLog.startTiming(this); RequestLog.record(new DimensionRequest("all", "no"));
RequestLog.record(new Epilogue(msg, status, responseLengthObserver));
BardQueryInfo.getBardQueryInfo().incrementCountCacheHits(); } else { // Current query is not in data cache
/** * Increments the number of weight check queries. */ public static void incrementCountWeightCheck() { getBardQueryInfo().weightCheckCount.incrementAndGet(); } }
BardQueryInfo.getBardQueryInfo().incrementCountCacheHits(); RequestLog logCtx = RequestLog.dump(); nextResponse.processResponse(
/** * Increments the number of fact queries. */ public static void incrementCountFactHits() { getBardQueryInfo().factQueryCount.incrementAndGet(); }