new QueryableIndexSegment(qIndexes.get(i), segmentId) ); singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner)); QueryRunner theRunner = toolChest.postMergeQueryDecoration( new FinalizeResultsQueryRunner<>( toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)), toolChest
/** * Generally speaking this is the exact same thing as makePreComputeManipulatorFn. It is leveraged in * order to compute PostAggregators on results after they have been completely merged together, which * should actually be done in the mergeResults() call instead of here. * <p> * This should never actually be overridden and it should be removed as quickly as possible. * * @param query The Query that is currently being processed * @param fn The function that should be applied to all metrics in the results * * @return A function that will apply the provided fn to all metrics in the input ResultType object */ public Function<ResultType, ResultType> makePostComputeManipulatorFn(QueryType query, MetricManipulationFn fn) { return makePreComputeManipulatorFn(query, fn); }
private QueryMetrics<? super Query<T>> acquireResponseMetrics() { if (queryMetrics == null) { queryMetrics = toolChest.makeMetrics(query); queryMetrics.server(host); } return queryMetrics; }
private static <T> List<T> runQuery(QueryRunnerFactory factory, QueryRunner runner, Query<T> query) { QueryToolChest toolChest = factory.getToolchest(); QueryRunner<T> theRunner = new FinalizeResultsQueryRunner<>( toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner)), toolChest ); Sequence<T> queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); return queryResult.toList(); }
private static <T> Sequence<T> executeQuery(final Injector injector, final QueryableIndex index, final Query<T> query) { final QueryRunnerFactoryConglomerate conglomerate = injector.getInstance(QueryRunnerFactoryConglomerate.class); final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query); final QueryRunner<T> runner = factory.createRunner(new QueryableIndexSegment(index, SegmentId.dummy("segment"))); return factory .getToolchest() .mergeResults(factory.mergeRunners(Execs.directExecutor(), ImmutableList.of(runner))) .run(QueryPlus.wrap(query), new HashMap<>()); }
GroupByQuery pushDownQuery = nestedQuery; QueryRunner<Row> segment1Runner = new FinalizeResultsQueryRunner<Row>( toolChest.mergeResults( groupByFactory.mergeRunners(executorService, getQueryRunnerForSegment1()) ), toolChest.mergeResults( groupByFactory2.mergeRunners(executorService, getQueryRunnerForSegment2()) ), toolChest.mergeResults( (queryPlus, responseContext) -> Sequences .simple( Sequences.map( segment1Runner.run(queryPlus, responseContext), toolChest.makePreComputeManipulatorFn( (GroupByQuery) queryPlus.getQuery(), MetricManipulatorFns.deserializing() Sequences.map( segment2Runner.run(queryPlus, responseContext), toolChest.makePreComputeManipulatorFn( (GroupByQuery) queryPlus.getQuery(), MetricManipulatorFns.deserializing() return toolChest.mergeResults((queryPlus, responseContext) -> pushDownQueryResults) .run(QueryPlus.wrap(nestedQuery), context);
private List<QueryRunner<Row>> makeMultiRunners() { List<QueryRunner<Row>> runners = new ArrayList<>(); for (int i = 0; i < numSegments; i++) { String segmentName = "qIndex" + i; QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy(segmentName), new QueryableIndexSegment(queryableIndexes.get(i), SegmentId.dummy(segmentName)) ); runners.add(factory.getToolchest().preMergeQueryDecoration(runner)); } return runners; } }
if (types == null) { final TypeFactory typeFactory = objectMapper.getTypeFactory(); JavaType baseType = typeFactory.constructType(toolChest.getResultTypeReference()); JavaType bySegmentType = typeFactory.constructParametricType( Result.class, retVal = Sequences.map( retVal, toolChest.makePreComputeManipulatorFn( query, MetricManipulatorFns.deserializing()
public ResultLevelCachingQueryRunner( QueryRunner baseRunner, QueryToolChest queryToolChest, Query<T> query, ObjectMapper objectMapper, Cache cache, CacheConfig cacheConfig ) { this.baseRunner = baseRunner; this.objectMapper = objectMapper; this.cache = cache; this.cacheConfig = cacheConfig; this.query = query; this.strategy = queryToolChest.getCacheStrategy(query); this.populateResultCache = ResultLevelCacheUtil.populateResultLevelCacheOnBrokers(query, strategy, cacheConfig); this.useResultCache = ResultLevelCacheUtil.useResultLevelCacheOnBrokers(query, strategy, cacheConfig); }
private Set<ServerToSegment> computeSegmentsToQuery(TimelineLookup<String, ServerSelector> timeline) { final List<TimelineObjectHolder<String, ServerSelector>> serversLookup = toolChest.filterSegments( query, intervals.stream().flatMap(i -> timeline.lookup(i).stream()).collect(Collectors.toList()) ); final Set<ServerToSegment> segments = new LinkedHashSet<>(); final Map<String, Optional<RangeSet<String>>> dimensionRangeCache = new HashMap<>(); // Filter unneeded chunks based on partition dimension for (TimelineObjectHolder<String, ServerSelector> holder : serversLookup) { final Set<PartitionChunk<ServerSelector>> filteredChunks = DimFilterUtils.filterShards( query.getFilter(), holder.getObject(), partitionChunk -> partitionChunk.getObject().getSegment().getShardSpec(), dimensionRangeCache ); for (PartitionChunk<ServerSelector> chunk : filteredChunks) { ServerSelector server = chunk.getObject(); final SegmentDescriptor segment = new SegmentDescriptor( holder.getInterval(), holder.getVersion(), chunk.getChunkNumber() ); segments.add(new ServerToSegment(server, segment)); } } return segments; }
private static <T> List<T> runQuery(QueryRunnerFactory factory, QueryRunner runner, Query<T> query) { QueryToolChest toolChest = factory.getToolchest(); QueryRunner<T> theRunner = new FinalizeResultsQueryRunner<>( toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner)), toolChest ); Sequence<T> queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); return queryResult.toList(); }
@Override public <T> QueryRunner<T> getQueryRunnerForIntervals(final Query<T> query, Iterable<Interval> intervals) { final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query); final Map<Integer, FireChief> partitionChiefs = chiefs.get(Iterables.getOnlyElement(query.getDataSource() .getNames())); return partitionChiefs == null ? new NoopQueryRunner<T>() : factory.getToolchest().mergeResults( factory.mergeRunners( Execs.directExecutor(), // Chaining query runners which wait on submitted chain query runners can make executor pools deadlock Iterables.transform( partitionChiefs.values(), new Function<FireChief, QueryRunner<T>>() { @Override public QueryRunner<T> apply(FireChief fireChief) { return fireChief.getQueryRunner(query); } } ) ) ); }
private List<QueryRunner<Row>> getRunner2(int qIndexNumber) { List<QueryRunner<Row>> runners = new ArrayList<>(); QueryableIndex index2 = groupByIndices.get(qIndexNumber); QueryRunner<Row> tooSmallRunner = makeQueryRunner( groupByFactory2, SegmentId.dummy(index2.toString()), new QueryableIndexSegment(index2, SegmentId.dummy(index2.toString())) ); runners.add(groupByFactory2.getToolchest().preMergeQueryDecoration(tooSmallRunner)); return runners; }
if (types == null) { final TypeFactory typeFactory = objectMapper.getTypeFactory(); JavaType baseType = typeFactory.constructType(toolChest.getResultTypeReference()); JavaType bySegmentType = typeFactory.constructParametricType( Result.class, typeFactory.constructParametricType(BySegmentResultValueClass.class, baseType) retVal = Sequences.map( retVal, toolChest.makePreComputeManipulatorFn( query, MetricManipulatorFns.deserializing()
SpecificQueryRunnable(final QueryPlus<T> queryPlus, final Map<String, Object> responseContext) { this.queryPlus = queryPlus; this.responseContext = responseContext; this.query = queryPlus.getQuery(); this.toolChest = warehouse.getToolChest(query); this.strategy = toolChest.getCacheStrategy(query); this.useCache = CacheUtil.useCacheOnBrokers(query, strategy, cacheConfig); this.populateCache = CacheUtil.populateCacheOnBrokers(query, strategy, cacheConfig); this.isBySegment = QueryContexts.isBySegment(query); // Note that enabling this leads to putting uncovered intervals information in the response headers // and might blow up in some cases https://github.com/apache/incubator-druid/issues/2108 this.uncoveredIntervalsLimit = QueryContexts.getUncoveredIntervalsLimit(query); this.downstreamQuery = query.withOverriddenContext(makeDownstreamQueryContext()); // For nested queries, we need to look at the intervals of the inner most query. this.intervals = query.getIntervalsOfInnerMostQuery(); }
@Override public Sequence<T> run(QueryPlus<T> queryPlus, Map<String, Object> responseContext) { Query<T> query = queryPlus.getQuery(); List<TimelineObjectHolder> segments = new ArrayList<>(); for (Interval interval : query.getIntervals()) { segments.addAll(timeline.lookup(interval)); } List<Sequence<T>> sequences = new ArrayList<>(); for (TimelineObjectHolder<String, Segment> holder : toolChest.filterSegments(query, segments)) { Segment segment = holder.getObject().getChunk(0).getObject(); QueryPlus queryPlusRunning = queryPlus.withQuerySegmentSpec( new SpecificSegmentSpec( new SegmentDescriptor( holder.getInterval(), holder.getVersion(), 0 ) ) ); sequences.add(factory.createRunner(segment).run(queryPlusRunning, responseContext)); } return new MergeSequence<>(query.getResultOrdering(), Sequences.simple(sequences)); } }
new QueryableIndexSegment(qIndexes.get(i), segmentId) ); singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner)); QueryRunner theRunner = toolChest.postMergeQueryDecoration( new FinalizeResultsQueryRunner<>( toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)), toolChest
private static <T> List<T> runQuery(QueryRunnerFactory factory, QueryRunner runner, Query<T> query) { QueryToolChest toolChest = factory.getToolchest(); QueryRunner<T> theRunner = new FinalizeResultsQueryRunner<>( toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner)), toolChest ); Sequence<T> queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); return queryResult.toList(); }
@Override public Sequence run(QueryPlus queryPlus, Map responseContext) { Query realQuery = getRealQuery(queryPlus.getQuery()); return warehouse.getToolChest(realQuery).mergeResults(runner).run(queryPlus.withQuery(realQuery), responseContext); } };
private List<QueryRunner<Row>> getRunner2() { List<QueryRunner<Row>> runners = new ArrayList<>(); QueryableIndex index2 = groupByIndices.get(1); QueryRunner<Row> tooSmallRunner = makeQueryRunner( tooSmallGroupByFactory, SegmentId.dummy(index2.toString()), new QueryableIndexSegment(index2, SegmentId.dummy(index2.toString())) ); runners.add(tooSmallGroupByFactory.getToolchest().preMergeQueryDecoration(tooSmallRunner)); return runners; }