private void testVersionedIntervalTimelineBehaviorForNumberedShardSpec( List<PartitionChunk<String>> chunks, Set<String> expectedObjects ) { VersionedIntervalTimeline<String, String> timeline = new VersionedIntervalTimeline<>(Ordering.natural()); Interval interval = Intervals.of("2000/3000"); String version = "v1"; for (PartitionChunk<String> chunk : chunks) { timeline.add(interval, version, chunk); } Set<String> actualObjects = new HashSet<>(); List<TimelineObjectHolder<String, String>> entries = timeline.lookup(interval); for (TimelineObjectHolder<String, String> entry : entries) { for (PartitionChunk<String> chunk : entry.getObject()) { actualObjects.add(chunk.getObject()); } } Assert.assertEquals(expectedObjects, actualObjects); } }
private static Pair<Map<DataSegment, File>, List<TimelineObjectHolder<String, DataSegment>>> prepareSegments( TaskToolbox toolbox, SegmentProvider segmentProvider ) throws IOException, SegmentLoadingException { final List<DataSegment> usedSegments = segmentProvider.checkAndGetSegments(toolbox); final Map<DataSegment, File> segmentFileMap = toolbox.fetchSegments(usedSegments); final List<TimelineObjectHolder<String, DataSegment>> timelineSegments = VersionedIntervalTimeline .forSegments(usedSegments) .lookup(segmentProvider.interval); return Pair.of(segmentFileMap, timelineSegments); }
@Test public void testRemovePartitionMakesIncomplete() { testIncompletePartitionDoesNotOvershadow(); final IntegerPartitionChunk<Integer> chunk = IntegerPartitionChunk.make(null, 10, 0, 60); Assert.assertEquals(chunk, timeline.remove(Intervals.of("2011-10-05/2011-10-07"), "6", chunk)); assertValues( ImmutableList.of(createExpected("2011-10-05/2011-10-06", "5", 5)), timeline.lookup(Intervals.of("2011-10-05/2011-10-07")) ); Assert.assertTrue("Expected no overshadowed entries", timeline.findOvershadowed().isEmpty()); }
public static VersionedIntervalTimeline<String, DataSegment> forSegments(Iterator<DataSegment> segments) { final VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<>(Ordering.natural()); addSegments(timeline, segments); return timeline; }
public Builder withDataSources(Collection<ImmutableDruidDataSource> dataSourcesCollection) { dataSourcesCollection.forEach( dataSource -> { VersionedIntervalTimeline<String, DataSegment> timeline = dataSources.computeIfAbsent( dataSource.getName(), k -> new VersionedIntervalTimeline<>(String.CASE_INSENSITIVE_ORDER) ); dataSource.getSegments().forEach( segment -> timeline.add( segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment) ) ); } ); return this; }
@Test public void testMay2() { Assert.assertNotNull(timeline.remove(Intervals.of("2011-05-01/2011-05-10"), "4", makeSingle(1))); assertValues( Arrays.asList( createExpected("2011-05-01/2011-05-03", "2", 7), createExpected("2011-05-03/2011-05-04", "3", 8), createExpected("2011-05-04/2011-05-05", "2", 7) ), timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); }
@Override public List<TimelineObjectHolder<VersionType, ObjectType>> lookupWithIncompletePartitions(Interval interval) { try { lock.readLock().lock(); return lookup(interval, true); } finally { lock.readLock().unlock(); } }
private void add(Interval interval, String version, PartitionChunk<Integer> value) { timeline.add(interval, version, value); }
expectedDataSources.computeIfAbsent( segment.getDataSource(), k -> new VersionedIntervalTimeline<>(Ordering.natural()) ); expectedTimeline.add( segment.getInterval(), segment.getVersion(), Assert.assertEquals(expectedDataSourceSizes.get(sourceName).longValue(), dataSourceState.getTotalSegmentSize()); Assert.assertEquals( expectedDataSources.get(sourceName).getAllTimelineEntries(), dataSourceState.getTimeline().getAllTimelineEntries() );
VersionedIntervalTimeline<String, DataSegment> timeline = timelines.get(segment.getDataSource()); if (timeline == null) { timeline = new VersionedIntervalTimeline<>(Ordering.natural()); timelines.put(segment.getDataSource(), timeline); timeline.add( segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment) ); for (TimelineObjectHolder<String, DataSegment> holder : timeline.findOvershadowed()) { for (DataSegment dataSegment : holder.getObject().payloads()) { overshadowed.add(dataSegment);
VersionedIntervalTimeline<String, DataSegment> timeline = timelines.get(dataSource.getName()); if (timeline == null) { timeline = new VersionedIntervalTimeline<>(Comparator.naturalOrder()); timelines.put(dataSource.getName(), timeline); VersionedIntervalTimeline.addSegments(timeline, dataSource.getSegments().iterator()); if (timeline != null && timeline.isOvershadowed(dataSegment.getInterval(), dataSegment.getVersion())) { coordinator.removeSegment(dataSegment); stats.addToGlobalStat("overShadowedCount", 1);
private VersionedIntervalTimeline<String, Integer> makeStringIntegerTimeline() { return new VersionedIntervalTimeline<String, Integer>(Ordering.natural()); }
final VersionedIntervalTimeline<String, ReferenceCountingSegment> loadedIntervals = dataSourceState.getTimeline(); final PartitionHolder<ReferenceCountingSegment> entry = loadedIntervals.findEntry( segment.getInterval(), segment.getVersion() resultSupplier.set(false); } else { loadedIntervals.add( segment.getInterval(), segment.getVersion(),
private void remove( NavigableMap<Interval, TimelineEntry> timeline, Interval interval, TimelineEntry entry, boolean incompleteOk ) { List<Interval> intervalsToRemove = new ArrayList<>(); TimelineEntry removed = timeline.get(interval); if (removed == null) { Iterator<Map.Entry<Interval, TimelineEntry>> iter = timeline.entrySet().iterator(); while (iter.hasNext()) { Map.Entry<Interval, TimelineEntry> timelineEntry = iter.next(); if (timelineEntry.getValue() == entry) { intervalsToRemove.add(timelineEntry.getKey()); } } } else { intervalsToRemove.add(interval); } for (Interval i : intervalsToRemove) { remove(timeline, i, incompleteOk); } }
public static VersionedIntervalTimeline<String, DataSegment> forSegments(Iterable<DataSegment> segments) { return forSegments(segments.iterator()); }
@Test public void testRemove() { for (TimelineObjectHolder<String, Integer> holder : timeline.findOvershadowed()) { for (PartitionChunk<Integer> chunk : holder.getObject()) { timeline.remove(holder.getInterval(), holder.getVersion(), chunk); } } Assert.assertTrue(timeline.findOvershadowed().isEmpty()); }
VersionedIntervalTimeline.forSegments(segments).lookupWithIncompletePartitions(Intervals.ETERNITY); for (TimelineObjectHolder<String, DataSegment> holder : segmentHolders) { for (PartitionChunk<DataSegment> chunk : holder.getObject()) {
@Override @SuppressWarnings("unchecked") public Iterable<QueryRunner<T>> apply(SegmentDescriptor input) { final PartitionHolder<ReferenceCountingSegment> entry = timeline.findEntry( input.getInterval(), input.getVersion() ); if (entry == null) { return Collections.singletonList( new ReportTimelineMissingSegmentQueryRunner<T>(input)); } final PartitionChunk<ReferenceCountingSegment> chunk = entry.getChunk(input.getPartitionNumber()); if (chunk == null) { return Collections.singletonList(new ReportTimelineMissingSegmentQueryRunner<T>(input)); } final ReferenceCountingSegment adapter = chunk.getObject(); return Collections.singletonList( buildAndDecorateQueryRunner(factory, toolChest, adapter, input, cpuTimeAccumulator) ); } }
private void serverAddedSegment(final DruidServerMetadata server, final DataSegment segment) { SegmentId segmentId = segment.getId(); synchronized (lock) { log.debug("Adding segment[%s] for server[%s]", segment, server); SegmentLoadInfo segmentLoadInfo = segmentLoadInfos.get(segmentId); if (segmentLoadInfo == null) { // servers escape the scope of this object so use ConcurrentSet segmentLoadInfo = new SegmentLoadInfo(segment); VersionedIntervalTimeline<String, SegmentLoadInfo> timeline = timelines.get(segment.getDataSource()); if (timeline == null) { timeline = new VersionedIntervalTimeline<>(Ordering.natural()); timelines.put(segment.getDataSource(), timeline); } timeline.add( segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segmentLoadInfo) ); segmentLoadInfos.put(segmentId, segmentLoadInfo); } segmentLoadInfo.addServer(server); } }