public static VersionedIntervalTimeline<String, DataSegment> forSegments(Iterator<DataSegment> segments) { final VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<>(Ordering.natural()); addSegments(timeline, segments); return timeline; }
private VersionedIntervalTimeline<String, Integer> makeStringIntegerTimeline() { return new VersionedIntervalTimeline<String, Integer>(Ordering.natural()); }
public Builder withDataSources(Collection<ImmutableDruidDataSource> dataSourcesCollection) { dataSourcesCollection.forEach( dataSource -> { VersionedIntervalTimeline<String, DataSegment> timeline = dataSources.computeIfAbsent( dataSource.getName(), k -> new VersionedIntervalTimeline<>(String.CASE_INSENSITIVE_ORDER) ); dataSource.getSegments().forEach( segment -> timeline.add( segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment) ) ); } ); return this; }
private void serverAddedSegment(final DruidServerMetadata server, final DataSegment segment) { SegmentId segmentId = segment.getId(); synchronized (lock) { log.debug("Adding segment[%s] for server[%s]", segment, server); SegmentLoadInfo segmentLoadInfo = segmentLoadInfos.get(segmentId); if (segmentLoadInfo == null) { // servers escape the scope of this object so use ConcurrentSet segmentLoadInfo = new SegmentLoadInfo(segment); VersionedIntervalTimeline<String, SegmentLoadInfo> timeline = timelines.get(segment.getDataSource()); if (timeline == null) { timeline = new VersionedIntervalTimeline<>(Ordering.natural()); timelines.put(segment.getDataSource(), timeline); } timeline.add( segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segmentLoadInfo) ); segmentLoadInfos.put(segmentId, segmentLoadInfo); } segmentLoadInfo.addServer(server); } }
final String dataSource = DATA_SOURCE_PREFIX + i; VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<>( String.CASE_INSENSITIVE_ORDER );
private void serverAddedSegment(final DruidServerMetadata server, final DataSegment segment) { SegmentId segmentId = segment.getId(); synchronized (lock) { log.debug("Adding segment[%s] for server[%s]", segment, server); ServerSelector selector = selectors.get(segmentId); if (selector == null) { selector = new ServerSelector(segment, tierSelectorStrategy); VersionedIntervalTimeline<String, ServerSelector> timeline = timelines.get(segment.getDataSource()); if (timeline == null) { timeline = new VersionedIntervalTimeline<>(Ordering.natural()); timelines.put(segment.getDataSource(), timeline); } timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(selector)); selectors.put(segmentId, selector); } QueryableDruidServer queryableDruidServer = clients.get(server.getName()); if (queryableDruidServer == null) { queryableDruidServer = addServer(baseView.getInventoryValue(server.getName())); } selector.addServerAndUpdateSegment(queryableDruidServer, segment); runTimelineCallbacks(callback -> callback.segmentAdded(server, segment)); } }
@Override public Sequence<T> run(final QueryPlus<T> queryPlus, final Map<String, Object> responseContext) { return CachingClusteredClient.this.run( queryPlus, responseContext, timeline -> { final VersionedIntervalTimeline<String, ServerSelector> timeline2 = new VersionedIntervalTimeline<>(Ordering.natural()); for (SegmentDescriptor spec : specs) { final PartitionHolder<ServerSelector> entry = timeline.findEntry(spec.getInterval(), spec.getVersion()); if (entry != null) { final PartitionChunk<ServerSelector> chunk = entry.getChunk(spec.getPartitionNumber()); if (chunk != null) { timeline2.add(spec.getInterval(), spec.getVersion(), chunk); } } } return timeline2; } ); } };
public SpecificSegmentsQuerySegmentWalker add( final DataSegment descriptor, final QueryableIndex index ) { final Segment segment = new QueryableIndexSegment(index, descriptor.getId()); if (!timelines.containsKey(descriptor.getDataSource())) { timelines.put(descriptor.getDataSource(), new VersionedIntervalTimeline<>(Ordering.natural())); } final VersionedIntervalTimeline<String, Segment> timeline = timelines.get(descriptor.getDataSource()); timeline.add(descriptor.getInterval(), descriptor.getVersion(), descriptor.getShardSpec().createChunk(segment)); segments.add(descriptor); closeables.add(index); return this; }
VersionedIntervalTimeline<String, DataSegment> timeline = timelines.get(dataSource.getName()); if (timeline == null) { timeline = new VersionedIntervalTimeline<>(Comparator.naturalOrder()); timelines.put(dataSource.getName(), timeline);
private void testVersionedIntervalTimelineBehaviorForNumberedShardSpec( List<PartitionChunk<String>> chunks, Set<String> expectedObjects ) { VersionedIntervalTimeline<String, String> timeline = new VersionedIntervalTimeline<>(Ordering.natural()); Interval interval = Intervals.of("2000/3000"); String version = "v1"; for (PartitionChunk<String> chunk : chunks) { timeline.add(interval, version, chunk); } Set<String> actualObjects = new HashSet<>(); List<TimelineObjectHolder<String, String>> entries = timeline.lookup(interval); for (TimelineObjectHolder<String, String> entry : entries) { for (PartitionChunk<String> chunk : entry.getObject()) { actualObjects.add(chunk.getObject()); } } Assert.assertEquals(expectedObjects, actualObjects); } }
VersionedIntervalTimeline<Integer, File> timeline = new VersionedIntervalTimeline<Integer, File>( Comparators.naturalNullsFirst() );
@Before public void setUp() { timeline = new VersionedIntervalTimeline<>(Ordering.natural()); serverView = EasyMock.createNiceMock(TimelineServerView.class); cache = MapCache.create(100000); client = makeClient( new ForegroundCachePopulator(OBJECT_MAPPER, new CachePopulatorStats(), -1) ); }
expectedDataSources.computeIfAbsent( segment.getDataSource(), k -> new VersionedIntervalTimeline<>(Ordering.natural()) ); expectedTimeline.add(
timeline = new VersionedIntervalTimeline<>(Ordering.natural());
VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<>( String.CASE_INSENSITIVE_ORDER );
@Override public Set<DataSegment> findUsedSegments(Set<SegmentIdWithShardSpec> identifiers) { final VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<>(Ordering.natural()); for (DataSegment dataSegment : appenderatorTester.getPushedSegments()) { timeline.add( dataSegment.getInterval(), dataSegment.getVersion(), dataSegment.getShardSpec().createChunk(dataSegment) ); } final Set<DataSegment> retVal = new HashSet<>(); for (SegmentIdWithShardSpec identifier : identifiers) { for (TimelineObjectHolder<String, DataSegment> holder : timeline.lookup(identifier.getInterval())) { for (PartitionChunk<DataSegment> chunk : holder.getObject()) { if (identifiers.contains(SegmentIdWithShardSpec.fromDataSegment(chunk.getObject()))) { retVal.add(chunk.getObject()); } } } } return retVal; } }
@Before public void setUp() { timeline = new VersionedIntervalTimeline<>(Ordering.natural()); serverView = EasyMock.createNiceMock(TimelineServerView.class); cache = MapCache.create(100000); client = makeClient(new ForegroundCachePopulator(JSON_MAPPER, new CachePopulatorStats(), -1)); servers = new DruidServer[]{ new DruidServer("test1", "test1", null, 10, ServerType.HISTORICAL, "bye", 0), new DruidServer("test2", "test2", null, 10, ServerType.HISTORICAL, "bye", 0), new DruidServer("test3", "test3", null, 10, ServerType.HISTORICAL, "bye", 0), new DruidServer("test4", "test4", null, 10, ServerType.HISTORICAL, "bye", 0), new DruidServer("test5", "test5", null, 10, ServerType.HISTORICAL, "bye", 0) }; }
private QueryRunner getCustomRunner() throws IOException { CharSource v_0112 = CharSource.wrap(StringUtils.join(V_0112, "\n")); CharSource v_0113 = CharSource.wrap(StringUtils.join(V_0113, "\n")); IncrementalIndex index0 = TestIndex.loadIncrementalIndex(newIndex("2011-01-12T00:00:00.000Z"), v_0112); IncrementalIndex index1 = TestIndex.loadIncrementalIndex(newIndex("2011-01-14T00:00:00.000Z"), v_0113); segment0 = new IncrementalIndexSegment(index0, makeIdentifier(index0, "v1")); segment1 = new IncrementalIndexSegment(index1, makeIdentifier(index1, "v1")); VersionedIntervalTimeline<String, Segment> timeline = new VersionedIntervalTimeline<>(StringComparators.LEXICOGRAPHIC); timeline.add(index0.getInterval(), "v1", new SingleElementPartitionChunk<>(segment0)); timeline.add(index1.getInterval(), "v1", new SingleElementPartitionChunk<>(segment1)); return QueryRunnerTestHelper.makeFilteringQueryRunner(timeline, factory); }
@BeforeClass public static void setup() throws IOException { CharSource v_0112 = CharSource.wrap(StringUtils.join(V_0112, "\n")); CharSource v_0113 = CharSource.wrap(StringUtils.join(V_0113, "\n")); CharSource v_override = CharSource.wrap(StringUtils.join(V_OVERRIDE, "\n")); IncrementalIndex index0 = TestIndex.loadIncrementalIndex(newIndex("2011-01-12T00:00:00.000Z"), v_0112); IncrementalIndex index1 = TestIndex.loadIncrementalIndex(newIndex("2011-01-13T00:00:00.000Z"), v_0113); IncrementalIndex index2 = TestIndex.loadIncrementalIndex(newIndex("2011-01-12T04:00:00.000Z"), v_override); segment0 = new IncrementalIndexSegment(index0, makeIdentifier(index0, "v1")); segment1 = new IncrementalIndexSegment(index1, makeIdentifier(index1, "v1")); segment_override = new IncrementalIndexSegment(index2, makeIdentifier(index2, "v2")); VersionedIntervalTimeline<String, Segment> timeline = new VersionedIntervalTimeline<>(StringComparators.LEXICOGRAPHIC); timeline.add(index0.getInterval(), "v1", new SingleElementPartitionChunk<>(segment0)); timeline.add(index1.getInterval(), "v1", new SingleElementPartitionChunk<>(segment1)); timeline.add(index2.getInterval(), "v2", new SingleElementPartitionChunk<>(segment_override)); segmentIdentifiers = new ArrayList<>(); for (TimelineObjectHolder<String, ?> holder : timeline.lookup(Intervals.of("2011-01-12/2011-01-14"))) { segmentIdentifiers.add(makeIdentifier(holder.getInterval(), holder.getVersion()).toString()); } runner = QueryRunnerTestHelper.makeFilteringQueryRunner(timeline, factory); }
@Before public void setup() { dataSources = new HashMap<>(); for (int i = 0; i < 3; i++) { final String dataSource = DATA_SOURCE_PREFIX + i; VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<>( String.CASE_INSENSITIVE_ORDER ); for (int j = 0; j < 4; j++) { for (int k = 0; k < 2; k++) { DataSegment segment = createSegment(dataSource, j, true, k); timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); segment = createSegment(dataSource, j, false, k); timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); } } for (int j = 7; j < 9; j++) { for (int k = 0; k < 2; k++) { DataSegment segment = createSegment(dataSource, j, true, k); timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); segment = createSegment(dataSource, j, false, k); timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); } } dataSources.put(dataSource, timeline); } }