@Override public List<Interval> getIntervals() { return query.getIntervals(); }
default List<Interval> getIntervalsOfInnerMostQuery() { if (getDataSource() instanceof QueryDataSource) { //noinspection unchecked return ((QueryDataSource) getDataSource()).getQuery().getIntervalsOfInnerMostQuery(); } else { return getIntervals(); } } }
@Override public <T> QueryRunner<T> getQueryRunner(final Query<T> query) { // Calling getQueryRunnerForIntervals here works because there's only one segment per interval for RealtimePlumber. return texasRanger.getQueryRunnerForIntervals(query, query.getIntervals()); }
@Override public void interval(QueryType query) { checkModifiedFromOwnerThread(); builder.setDimension( DruidMetrics.INTERVAL, query.getIntervals().stream().map(Interval::toString).toArray(String[]::new) ); }
@POST @Path("/candidates") @Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE}) @Consumes({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE, APPLICATION_SMILE}) @ResourceFilters(StateResourceFilter.class) public Response getQueryTargets( InputStream in, @QueryParam("pretty") String pretty, @QueryParam("numCandidates") @DefaultValue("-1") int numCandidates, @Context final HttpServletRequest req ) throws IOException { final ResponseContext context = createContext(req.getContentType(), pretty != null); try { Query<?> query = context.getObjectMapper().readValue(in, Query.class); return context.ok( ServerViewUtil.getTargetLocations( brokerServerView, query.getDataSource(), query.getIntervals(), numCandidates ) ); } catch (Exception e) { return context.gotError(e); } } }
.create(queryPlus.getQuery().getIntervals()) .transformCat( new Function<Interval, Iterable<Interval>>()
public Sequence<Object[]> runQuery(final DruidQuery druidQuery) { final Query query = druidQuery.getQuery(); final Query innerMostQuery = findInnerMostQuery(query); if (plannerContext.getPlannerConfig().isRequireTimeCondition() && innerMostQuery.getIntervals().equals(Intervals.ONLY_ETERNITY)) { throw new CannotBuildQueryException( "requireTimeCondition is enabled, all queries must include a filter condition on the __time column" ); } if (query instanceof TimeseriesQuery) { return executeTimeseries(druidQuery, (TimeseriesQuery) query); } else if (query instanceof TopNQuery) { return executeTopN(druidQuery, (TopNQuery) query); } else if (query instanceof GroupByQuery) { return executeGroupBy(druidQuery, (GroupByQuery) query); } else if (query instanceof ScanQuery) { return executeScan(druidQuery, (ScanQuery) query); } else if (query instanceof SelectQuery) { return executeSelect(druidQuery, (SelectQuery) query); } else { throw new ISE("Cannot run query of class[%s]", query.getClass().getName()); } }
private static <T> Sequence<T> runQuery( CachingClusteredClient client, final Query<T> query, final Map<String, Object> responseContext ) { return client.getQueryRunnerForIntervals(query, query.getIntervals()).run( QueryPlus.wrap(query), responseContext ); } }
LoadRule baseRule = null; for (Interval interval : query.getIntervals()) { int currRulePosition = 0; for (Rule rule : rules) { "WTF?! No brokerServiceName found for datasource[%s], intervals[%s]. Using default[%s].", query.getDataSource(), query.getIntervals(), tierConfig.getDefaultBrokerServiceName() );
@Override @SuppressWarnings("unchecked") public Sequence<T> run(final QueryPlus<T> queryPlus, Map<String, Object> responseContext) { if (QueryContexts.isBySegment(queryPlus.getQuery())) { final Sequence<T> baseSequence = base.run(queryPlus, responseContext); final List<T> results = baseSequence.toList(); return Sequences.simple( Collections.singletonList( (T) new Result<>( timestamp, new BySegmentResultValueClass<>( results, segmentId.toString(), queryPlus.getQuery().getIntervals().get(0) ) ) ) ); } return base.run(queryPlus, responseContext); } }
final long offset = computeOffset(now, tz); final Interval interval = queryPlus.getQuery().getIntervals().get(0); final Interval modifiedInterval = new Interval( Math.min(interval.getStartMillis() + offset, now + offset),
@Override public Sequence<Result<TimeseriesResultValue>> run( QueryPlus<Result<TimeseriesResultValue>> queryPlus, Map<String, Object> responseContext ) { final Query<Result<TimeseriesResultValue>> query = queryPlus.getQuery(); return Sequences.simple( ImmutableList.of( new Result<>( query.getIntervals().get(0).getStart(), new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( query.getIntervals().get(0).getEnd(), new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ) ) ); } },
@Override public Sequence run(final QueryPlus queryPlus, final Map responseContext) { return client.getQueryRunnerForIntervals(queryPlus.getQuery(), queryPlus.getQuery().getIntervals()) .run(queryPlus, responseContext); } };
@Test public void testSerde() throws Exception { String queryStr = "{\n" + " \"queryType\":\"segmentMetadata\",\n" + " \"dataSource\":\"test_ds\",\n" + " \"intervals\":[\"2013-12-04T00:00:00.000Z/2013-12-05T00:00:00.000Z\"],\n" + " \"analysisTypes\":[\"cardinality\",\"size\"]\n" + "}"; EnumSet<SegmentMetadataQuery.AnalysisType> expectedAnalysisTypes = EnumSet.of( SegmentMetadataQuery.AnalysisType.CARDINALITY, SegmentMetadataQuery.AnalysisType.SIZE ); Query query = MAPPER.readValue(queryStr, Query.class); Assert.assertTrue(query instanceof SegmentMetadataQuery); Assert.assertEquals("test_ds", Iterables.getOnlyElement(query.getDataSource().getNames())); Assert.assertEquals( Intervals.of("2013-12-04T00:00:00.000Z/2013-12-05T00:00:00.000Z"), query.getIntervals().get(0) ); Assert.assertEquals(expectedAnalysisTypes, ((SegmentMetadataQuery) query).getAnalysisTypes()); // test serialize and deserialize Assert.assertEquals(query, MAPPER.readValue(MAPPER.writeValueAsString(query), Query.class)); }
@Test public void testSerdeWithDefaultInterval() throws Exception { String queryStr = "{\n" + " \"queryType\":\"segmentMetadata\",\n" + " \"dataSource\":\"test_ds\"\n" + "}"; Query query = MAPPER.readValue(queryStr, Query.class); Assert.assertTrue(query instanceof SegmentMetadataQuery); Assert.assertEquals("test_ds", Iterables.getOnlyElement(query.getDataSource().getNames())); Assert.assertEquals(Intervals.ETERNITY, query.getIntervals().get(0)); Assert.assertTrue(((SegmentMetadataQuery) query).isUsingDefaultInterval()); // test serialize and deserialize Assert.assertEquals(query, MAPPER.readValue(MAPPER.writeValueAsString(query), Query.class)); // test copy Assert.assertEquals(query, Druids.SegmentMetadataQueryBuilder.copy((SegmentMetadataQuery) query).build()); }
@Override public Sequence<Result<TimeseriesResultValue>> run( QueryPlus<Result<TimeseriesResultValue>> queryPlus, Map<String, Object> responseContext ) { return Sequences.simple( ImmutableList.of( new Result<>( DateTimes.of("2014-01-09T-07"), new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( DateTimes.of("2014-01-11T-07"), new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ), new Result<>( queryPlus.getQuery().getIntervals().get(0).getEnd(), new TimeseriesResultValue(ImmutableMap.of("metric", 5)) ) ) ); } },
@Override public Sequence<Result<TimeseriesResultValue>> run( QueryPlus<Result<TimeseriesResultValue>> queryPlus, Map<String, Object> responseContext ) { return Sequences.simple( ImmutableList.of( new Result<>( DateTimes.of("2014-01-09"), new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( DateTimes.of("2014-01-11"), new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ), new Result<>( queryPlus.getQuery().getIntervals().get(0).getEnd(), new TimeseriesResultValue(ImmutableMap.of("metric", 5)) ) ) ); } },
@Override public Sequence<Result<TimeseriesResultValue>> run( QueryPlus<Result<TimeseriesResultValue>> queryPlus, Map<String, Object> responseContext ) { return Sequences.simple( ImmutableList.of( new Result<>( DateTimes.of("2014-01-09T-08"), new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( DateTimes.of("2014-01-11T-08"), new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ), new Result<>( queryPlus.getQuery().getIntervals().get(0).getEnd(), new TimeseriesResultValue(ImmutableMap.of("metric", 5)) ) ) ); } },
Assert.assertEquals(1, query.getIntervals().size());
@Override public Sequence<T> run(QueryPlus<T> queryPlus, Map<String, Object> responseContext) { Query<T> query = queryPlus.getQuery(); List<TimelineObjectHolder> segments = new ArrayList<>(); for (Interval interval : query.getIntervals()) { segments.addAll(timeline.lookup(interval)); } List<Sequence<T>> sequences = new ArrayList<>(); for (TimelineObjectHolder<String, Segment> holder : toolChest.filterSegments(query, segments)) { Segment segment = holder.getObject().getChunk(0).getObject(); QueryPlus queryPlusRunning = queryPlus.withQuerySegmentSpec( new SpecificSegmentSpec( new SegmentDescriptor( holder.getInterval(), holder.getVersion(), 0 ) ) ); sequences.add(factory.createRunner(segment).run(queryPlusRunning, responseContext)); } return new MergeSequence<>(query.getResultOrdering(), Sequences.simple(sequences)); } }