private PerSegmentQueryOptimizationContext getOptimizationContext(Interval segmentInterval) { return new PerSegmentQueryOptimizationContext( new SegmentDescriptor(segmentInterval, "0", 0) ); } }
public SegmentDescriptor toDescriptor() { return new SegmentDescriptor(getInterval(), getVersion(), shardSpec.getPartitionNum()); }
@Override public Sequence<Result<TimeseriesResultValue>> run( QueryPlus<Result<TimeseriesResultValue>> queryPlus, Map<String, Object> context ) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); return Sequences.empty(); } },
@Override public Sequence<Result<TimeseriesResultValue>> run(QueryPlus queryPlus, Map context) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); return Sequences.empty(); } },
final SegmentMetadataQuery query = new SegmentMetadataQuery( new TableDataSource("dataSource"), new SpecificSegmentSpec(new SegmentDescriptor(index.getDataInterval(), "0", 0)), new ListColumnIncluderator(getColumnsToInclude(index)), false,
private Set<ServerToSegment> computeSegmentsToQuery(TimelineLookup<String, ServerSelector> timeline) { final List<TimelineObjectHolder<String, ServerSelector>> serversLookup = toolChest.filterSegments( query, intervals.stream().flatMap(i -> timeline.lookup(i).stream()).collect(Collectors.toList()) ); final Set<ServerToSegment> segments = new LinkedHashSet<>(); final Map<String, Optional<RangeSet<String>>> dimensionRangeCache = new HashMap<>(); // Filter unneeded chunks based on partition dimension for (TimelineObjectHolder<String, ServerSelector> holder : serversLookup) { final Set<PartitionChunk<ServerSelector>> filteredChunks = DimFilterUtils.filterShards( query.getFilter(), holder.getObject(), partitionChunk -> partitionChunk.getObject().getSegment().getShardSpec(), dimensionRangeCache ); for (PartitionChunk<ServerSelector> chunk : filteredChunks) { ServerSelector server = chunk.getObject(); final SegmentDescriptor segment = new SegmentDescriptor( holder.getInterval(), holder.getVersion(), chunk.getChunkNumber() ); segments.add(new ServerToSegment(server, segment)); } } return segments; }
@Override public Sequence<Result<TimeseriesResultValue>> run( QueryPlus<Result<TimeseriesResultValue>> queryPlus, Map<String, Object> context ) { if ((int) context.get("count") < 3) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); context.put("count", (int) context.get("count") + 1); return Sequences.empty(); } else { return Sequences.simple( Collections.singletonList( new Result<>( DateTimes.nowUtc(), new TimeseriesResultValue( new HashMap<>() ) ) ) ); } } },
@Override public Sequence<Result<TimeseriesResultValue>> run( QueryPlus<Result<TimeseriesResultValue>> queryPlus, Map<String, Object> context ) { if ((int) context.get("count") == 0) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); context.put("count", 1); return Sequences.empty(); } else { return Sequences.simple( Collections.singletonList( new Result<>( DateTimes.nowUtc(), new TimeseriesResultValue( new HashMap<>() ) ) ) ); } } },
public static List<LocatedSegmentDescriptor> getTargetLocations( TimelineServerView serverView, DataSource datasource, List<Interval> intervals, int numCandidates ) { TimelineLookup<String, ServerSelector> timeline = serverView.getTimeline(datasource); if (timeline == null) { return Collections.emptyList(); } List<LocatedSegmentDescriptor> located = new ArrayList<>(); for (Interval interval : intervals) { for (TimelineObjectHolder<String, ServerSelector> holder : timeline.lookup(interval)) { for (PartitionChunk<ServerSelector> chunk : holder.getObject()) { ServerSelector selector = chunk.getObject(); final SegmentDescriptor descriptor = new SegmentDescriptor( holder.getInterval(), holder.getVersion(), chunk.getChunkNumber() ); long size = selector.getSegment().getSize(); List<DruidServerMetadata> candidates = selector.getCandidates(numCandidates); located.add(new LocatedSegmentDescriptor(descriptor, size, candidates)); } } } return located; } }
Assert.assertEquals( ImmutableList.of( new SegmentDescriptor(Intervals.of("2011-07-01/2011-10-10"), "1", 0), new SegmentDescriptor(Intervals.of("2011-07-01/2011-10-10"), "1", 1), new SegmentDescriptor(Intervals.of("2011-11-01/2011-11-10"), "2", 10) ), ((MultipleSpecificSegmentSpec) spec).getDescriptors()
@Test public void testDimensionsSpecSerde() throws Exception { LocatedSegmentDescriptor expected = new LocatedSegmentDescriptor( new SegmentDescriptor(Intervals.utc(100, 200), "version", 100), 65535, Arrays.asList( new DruidServerMetadata("server1", "host1", null, 30000L, ServerType.HISTORICAL, "tier1", 0), new DruidServerMetadata("server2", "host2", null, 40000L, ServerType.HISTORICAL, "tier1", 1), new DruidServerMetadata("server3", "host3", null, 50000L, ServerType.REALTIME, "tier2", 2) ) ); LocatedSegmentDescriptor actual = mapper.readValue( mapper.writeValueAsString(expected), LocatedSegmentDescriptor.class ); Assert.assertEquals(expected, actual); } }
@Test public void testHandoffCallbackNotCalled() { Interval interval = Intervals.of("2011-04-01/2011-04-02"); SegmentDescriptor descriptor = new SegmentDescriptor(interval, "v1", 2); CoordinatorClient coordinatorClient = EasyMock.createMock(CoordinatorClient.class); EasyMock.expect(coordinatorClient.isHandOffComplete("test_ds", descriptor)) .andReturn(false) .anyTimes(); EasyMock.replay(coordinatorClient); CoordinatorBasedSegmentHandoffNotifier notifier = new CoordinatorBasedSegmentHandoffNotifier( "test_ds", coordinatorClient, notifierConfig ); final AtomicBoolean callbackCalled = new AtomicBoolean(false); notifier.registerSegmentHandoffCallback( descriptor, Execs.directExecutor(), () -> callbackCalled.set(true) ); notifier.checkForSegmentHandoffs(); // callback should have registered Assert.assertEquals(1, notifier.getHandOffCallbacks().size()); Assert.assertTrue(notifier.getHandOffCallbacks().containsKey(descriptor)); Assert.assertFalse(callbackCalled.get()); EasyMock.verify(coordinatorClient); }
SegmentDescriptor descriptor = new SegmentDescriptor(interval, "v1", 2);
@Test public void testSegmentLoadChecksForAssignableServer() { Interval interval = Intervals.of( "2011-04-01/2011-04-02" ); Assert.assertTrue( DataSourcesResource.isSegmentLoaded( Collections.singletonList( new ImmutableSegmentLoadInfo( createSegment(interval, "v1", 2), Sets.newHashSet(createHistoricalServerMetadata("a")) ) ), new SegmentDescriptor(interval, "v1", 2) ) ); Assert.assertFalse( DataSourcesResource.isSegmentLoaded( Collections.singletonList( new ImmutableSegmentLoadInfo( createSegment(interval, "v1", 2), Sets.newHashSet(createRealtimeServerMetadata("a")) ) ), new SegmentDescriptor(interval, "v1", 2) ) ); }
@Test public void testSegmentLoadChecksForPartitionNumber() { Interval interval = Intervals.of( "2011-04-01/2011-04-02" ); Assert.assertTrue( DataSourcesResource.isSegmentLoaded( Collections.singletonList( new ImmutableSegmentLoadInfo( createSegment(interval, "v1", 1), Sets.newHashSet(createHistoricalServerMetadata("a")) ) ), new SegmentDescriptor(interval, "v1", 1) ) ); Assert.assertFalse( DataSourcesResource.isSegmentLoaded( Collections.singletonList( new ImmutableSegmentLoadInfo( createSegment(interval, "v1", 1), Sets.newHashSet(createHistoricalServerMetadata("a")) ) ), new SegmentDescriptor(interval, "v1", 2) ) ); }
new SegmentDescriptor(interval, "v2", 2) new SegmentDescriptor(interval, "v1", 2) new SegmentDescriptor(interval, "v1", 2)
@Test public void testSegmentLoadChecksForInterval() { Assert.assertFalse( DataSourcesResource.isSegmentLoaded( Collections.singletonList( new ImmutableSegmentLoadInfo( createSegment(Intervals.of("2011-04-01/2011-04-02"), "v1", 1), Sets.newHashSet(createHistoricalServerMetadata("a")) ) ), new SegmentDescriptor(Intervals.of("2011-04-01/2011-04-03"), "v1", 1) ) ); Assert.assertTrue( DataSourcesResource.isSegmentLoaded( Collections.singletonList( new ImmutableSegmentLoadInfo( createSegment(Intervals.of("2011-04-01/2011-04-04"), "v1", 1), Sets.newHashSet(createHistoricalServerMetadata("a")) ) ), new SegmentDescriptor(Intervals.of("2011-04-02/2011-04-03"), "v1", 1) ) ); }
@Override public Sequence<T> run(QueryPlus<T> queryPlus, Map<String, Object> responseContext) { Query<T> query = queryPlus.getQuery(); List<TimelineObjectHolder> segments = new ArrayList<>(); for (Interval interval : query.getIntervals()) { segments.addAll(timeline.lookup(interval)); } List<Sequence<T>> sequences = new ArrayList<>(); for (TimelineObjectHolder<String, Segment> holder : toolChest.filterSegments(query, segments)) { Segment segment = holder.getObject().getChunk(0).getObject(); QueryPlus queryPlusRunning = queryPlus.withQuerySegmentSpec( new SpecificSegmentSpec( new SegmentDescriptor( holder.getInterval(), holder.getVersion(), 0 ) ) ); sequences.add(factory.createRunner(segment).run(queryPlusRunning, responseContext)); } return new MergeSequence<>(query.getResultOrdering(), Sequences.simple(sequences)); } }
new SegmentDescriptor( publishedSegment.getInterval(), publishedSegment.getVersion(),
SegmentDescriptor segmentDescriptor = new SegmentDescriptor(Intervals.of("2011/2012"), "version", 0);