/** * Execute the query. Can only be called if the query has been authorized. Note that query logs and metrics will * not be emitted automatically when the Sequence is fully iterated. It is the caller's responsibility to call * {@link #emitLogsAndMetrics(Throwable, String, long)} to emit logs and metrics. * * @return result sequence and response context */ public QueryResponse execute() { transition(State.AUTHORIZED, State.EXECUTING); final Map<String, Object> responseContext = DirectDruidClient.makeResponseContextForQuery(); final Sequence res = QueryPlus.wrap(baseQuery) .withIdentity(authenticationResult.getIdentity()) .run(texasRanger, responseContext); return new QueryResponse(res == null ? Sequences.empty() : res, responseContext); }
@Override public Sequence<Result<TimeseriesResultValue>> doRun( QueryRunner<Result<TimeseriesResultValue>> baseRunner, QueryPlus<Result<TimeseriesResultValue>> queryPlus, Map<String, Object> context ) { int limit = ((TimeseriesQuery) queryPlus.getQuery()).getLimit(); Sequence<Result<TimeseriesResultValue>> result = super.doRun( baseRunner, // Don't do post aggs until makePostComputeManipulatorFn() is called queryPlus.withQuery(((TimeseriesQuery) queryPlus.getQuery()).withPostAggregatorSpecs(ImmutableList.of())), context ); if (limit < Integer.MAX_VALUE) { return result.limit(limit); } return result; }
@SuppressWarnings("unchecked") private Sequence<T> getSimpleServerResults( final QueryRunner serverRunner, final MultipleSpecificSegmentSpec segmentsOfServerSpec, long maxQueuedBytesPerServer ) { return serverRunner.run( queryPlus.withQuerySegmentSpec(segmentsOfServerSpec).withMaxQueuedBytes(maxQueuedBytesPerServer), responseContext ); }
@Override public Sequence<Result<TopNResultValue>> run( QueryPlus<Result<TopNResultValue>> input, Map<String, Object> responseContext ) { if (!(input.getQuery() instanceof TopNQuery)) { throw new ISE("Got a [%s] which isn't a %s", input.getClass(), TopNQuery.class); } TopNQuery query = (TopNQuery) input.getQuery(); return queryEngine.query(query, segment.asStorageAdapter(), (TopNQueryMetrics) input.getQueryMetrics()); } };
private static <T> List<T> runQuery(QueryRunnerFactory factory, QueryRunner runner, Query<T> query) { QueryToolChest toolChest = factory.getToolchest(); QueryRunner<T> theRunner = new FinalizeResultsQueryRunner<>( toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner)), toolChest ); Sequence<T> queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); return queryResult.toList(); }
@Override public Sequence<ScanResultValue> run(QueryPlus<ScanResultValue> queryPlus, Map<String, Object> responseContext) { Query<ScanResultValue> query = queryPlus.getQuery(); if (!(query instanceof ScanQuery)) { throw new ISE("Got a [%s] which isn't a %s", query.getClass(), ScanQuery.class); } // it happens in unit tests final Number timeoutAt = (Number) responseContext.get(CTX_TIMEOUT_AT); if (timeoutAt == null || timeoutAt.longValue() == 0L) { responseContext.put(CTX_TIMEOUT_AT, JodaUtils.MAX_INSTANT); } return engine.process((ScanQuery) query, segment, responseContext); } }
final QueryPlus<T> retryQueryPlus = queryPlus.withQuerySegmentSpec( new MultipleSpecificSegmentSpec( missingSegments return new MergeSequence<>(queryPlus.getQuery().getResultOrdering(), Sequences.simple(listOfSequences)) .toYielder(initValue, accumulator); } else {
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MILLISECONDS) public void queryNative(Blackhole blackhole) { final Sequence<Row> resultSequence = QueryPlus.wrap(groupByQuery).run(walker, new HashMap<>()); final List<Row> resultList = resultSequence.toList(); for (Row row : resultList) { blackhole.consume(row); } }
final Sequence<Result<BySegmentResultValueClass<T>>> resultsBySegments = serverRunner.run( queryPlus .withQuery((Query<Result<BySegmentResultValueClass<T>>>) downstreamQuery) .withQuerySegmentSpec(segmentsOfServerSpec) .withMaxQueuedBytes(maxQueuedBytesPerServer), responseContext );
@Override public Sequence<T> run(final QueryPlus<T> queryPlus, final Map<String, Object> responseContext) { return queryPlus.run(appenderator, responseContext); } };
@Override public Sequence<T> apply(Query query) { return runner.run( queryPlus.withQuery(query), responseContext ); } }
@Override public Sequence<Result<SearchResultValue>> run( QueryPlus<Result<SearchResultValue>> queryPlus, Map<String, Object> responseContext ) { final QueryPlus<Result<SearchResultValue>> queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-01-12/2011-02-28"))) ); final QueryPlus<Result<SearchResultValue>> queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-03-01/2011-04-15"))) ); return Sequences.concat(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)); } }
private static <T> List<T> runQuery(QueryRunnerFactory factory, QueryRunner runner, Query<T> query) { QueryToolChest toolChest = factory.getToolchest(); QueryRunner<T> theRunner = new FinalizeResultsQueryRunner<>( toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner)), toolChest ); Sequence<T> queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); return queryResult.toList(); }
@Override public Sequence<Result<SelectResultValue>> run( QueryPlus<Result<SelectResultValue>> queryPlus, Map<String, Object> responseContext ) { Query<Result<SelectResultValue>> input = queryPlus.getQuery(); if (!(input instanceof SelectQuery)) { throw new ISE("Got a [%s] which isn't a %s", input.getClass(), SelectQuery.class); } return engine.process((SelectQuery) input, segment); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)) ) ); } }
@Override public <T> QueryRunner<T> getQueryRunner(Query<T> query) { if (appenderator == null) { // Not yet initialized, no data yet, just return a noop runner. return new NoopQueryRunner<>(); } return (queryPlus, responseContext) -> queryPlus.run(appenderator, responseContext); }
@SuppressWarnings("unchecked") private Sequence<T> getBySegmentServerResults( final QueryRunner serverRunner, final MultipleSpecificSegmentSpec segmentsOfServerSpec, long maxQueuedBytesPerServer ) { Sequence<Result<BySegmentResultValueClass<T>>> resultsBySegments = serverRunner .run( queryPlus.withQuerySegmentSpec(segmentsOfServerSpec).withMaxQueuedBytes(maxQueuedBytesPerServer), responseContext ); // bySegment results need to be de-serialized, see DirectDruidClient.run() return (Sequence<T>) resultsBySegments .map(result -> result.map( resultsOfSegment -> resultsOfSegment.mapResults( toolChest.makePreComputeManipulatorFn(query, MetricManipulatorFns.deserializing())::apply ) )); }
@Override public Sequence<T> apply(DataSource singleSource) { return baseRunner.run( queryPlus.withQuery(query.withDataSource(singleSource)), responseContext ); } }
@Override public Sequence<T> apply(Interval singleInterval) { return new AsyncQueryRunner<T>( //Note: it is assumed that toolChest.mergeResults(..) gives a query runner that is //not lazy i.e. it does most of its work on call to run() method toolChest.mergeResults( new MetricsEmittingQueryRunner<T>( emitter, toolChest, baseRunner, QueryMetrics::reportIntervalChunkTime, queryMetrics -> queryMetrics.chunkInterval(singleInterval) ).withWaitMeasuredFromNow() ), executor, queryWatcher ).run( queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(singleInterval))), responseContext ); } }