private void add(Interval interval, String version, PartitionChunk<Integer> value) { timeline.add(interval, version, value); }
private void remove( NavigableMap<Interval, TimelineEntry> timeline, Interval interval, boolean incompleteOk ) { timeline.remove(interval); for (Map.Entry<Interval, TreeMap<VersionType, TimelineEntry>> versionEntry : allTimelineEntries.entrySet()) { if (versionEntry.getKey().overlap(interval) != null) { if (incompleteOk) { add(timeline, versionEntry.getKey(), versionEntry.getValue().lastEntry().getValue()); } else { for (VersionType ver : versionEntry.getValue().descendingKeySet()) { TimelineEntry timelineEntry = versionEntry.getValue().get(ver); if (timelineEntry.getPartitionHolder().isComplete()) { add(timeline, versionEntry.getKey(), timelineEntry); break; } } } } } }
public Builder withDataSources(Collection<ImmutableDruidDataSource> dataSourcesCollection) { dataSourcesCollection.forEach( dataSource -> { VersionedIntervalTimeline<String, DataSegment> timeline = dataSources.computeIfAbsent( dataSource.getName(), k -> new VersionedIntervalTimeline<>(String.CASE_INSENSITIVE_ORDER) ); dataSource.getSegments().forEach( segment -> timeline.add( segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment) ) ); } ); return this; }
private void serverAddedSegment(final DruidServerMetadata server, final DataSegment segment) { SegmentId segmentId = segment.getId(); synchronized (lock) { log.debug("Adding segment[%s] for server[%s]", segment, server); SegmentLoadInfo segmentLoadInfo = segmentLoadInfos.get(segmentId); if (segmentLoadInfo == null) { // servers escape the scope of this object so use ConcurrentSet segmentLoadInfo = new SegmentLoadInfo(segment); VersionedIntervalTimeline<String, SegmentLoadInfo> timeline = timelines.get(segment.getDataSource()); if (timeline == null) { timeline = new VersionedIntervalTimeline<>(Ordering.natural()); timelines.put(segment.getDataSource(), timeline); } timeline.add( segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segmentLoadInfo) ); segmentLoadInfos.put(segmentId, segmentLoadInfo); } segmentLoadInfo.addServer(server); } }
private void serverAddedSegment(final DruidServerMetadata server, final DataSegment segment) { SegmentId segmentId = segment.getId(); synchronized (lock) { log.debug("Adding segment[%s] for server[%s]", segment, server); ServerSelector selector = selectors.get(segmentId); if (selector == null) { selector = new ServerSelector(segment, tierSelectorStrategy); VersionedIntervalTimeline<String, ServerSelector> timeline = timelines.get(segment.getDataSource()); if (timeline == null) { timeline = new VersionedIntervalTimeline<>(Ordering.natural()); timelines.put(segment.getDataSource(), timeline); } timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(selector)); selectors.put(segmentId, selector); } QueryableDruidServer queryableDruidServer = clients.get(server.getName()); if (queryableDruidServer == null) { queryableDruidServer = addServer(baseView.getInventoryValue(server.getName())); } selector.addServerAndUpdateSegment(queryableDruidServer, segment); runTimelineCallbacks(callback -> callback.segmentAdded(server, segment)); } }
@Override public Sequence<T> run(final QueryPlus<T> queryPlus, final Map<String, Object> responseContext) { return CachingClusteredClient.this.run( queryPlus, responseContext, timeline -> { final VersionedIntervalTimeline<String, ServerSelector> timeline2 = new VersionedIntervalTimeline<>(Ordering.natural()); for (SegmentDescriptor spec : specs) { final PartitionHolder<ServerSelector> entry = timeline.findEntry(spec.getInterval(), spec.getVersion()); if (entry != null) { final PartitionChunk<ServerSelector> chunk = entry.getChunk(spec.getPartitionNumber()); if (chunk != null) { timeline2.add(spec.getInterval(), spec.getVersion(), chunk); } } } return timeline2; } ); } };
public SpecificSegmentsQuerySegmentWalker add( final DataSegment descriptor, final QueryableIndex index ) { final Segment segment = new QueryableIndexSegment(index, descriptor.getId()); if (!timelines.containsKey(descriptor.getDataSource())) { timelines.put(descriptor.getDataSource(), new VersionedIntervalTimeline<>(Ordering.natural())); } final VersionedIntervalTimeline<String, Segment> timeline = timelines.get(descriptor.getDataSource()); timeline.add(descriptor.getInterval(), descriptor.getVersion(), descriptor.getShardSpec().createChunk(segment)); segments.add(descriptor); closeables.add(index); return this; }
private void testVersionedIntervalTimelineBehaviorForNumberedShardSpec( List<PartitionChunk<String>> chunks, Set<String> expectedObjects ) { VersionedIntervalTimeline<String, String> timeline = new VersionedIntervalTimeline<>(Ordering.natural()); Interval interval = Intervals.of("2000/3000"); String version = "v1"; for (PartitionChunk<String> chunk : chunks) { timeline.add(interval, version, chunk); } Set<String> actualObjects = new HashSet<>(); List<TimelineObjectHolder<String, String>> entries = timeline.lookup(interval); for (TimelineObjectHolder<String, String> entry : entries) { for (PartitionChunk<String> chunk : entry.getObject()) { actualObjects.add(chunk.getObject()); } } Assert.assertEquals(expectedObjects, actualObjects); } }
private void addSink(final Sink sink) { sinks.put(sink.getInterval().getStartMillis(), sink); metrics.setSinkCount(sinks.size()); sinkTimeline.add( sink.getInterval(), sink.getVersion(), new SingleElementPartitionChunk<Sink>(sink) ); try { segmentAnnouncer.announceSegment(sink.getSegment()); } catch (IOException e) { log.makeAlert(e, "Failed to announce new segment[%s]", schema.getDataSource()) .addData("interval", sink.getInterval()) .emit(); } clearDedupCache(); }
private void addMoreData(String dataSource, int day) { for (int i = 0; i < 2; i++) { DataSegment newSegment = createSegment(dataSource, day, true, i); dataSources.get(dataSource).add( newSegment.getInterval(), newSegment.getVersion(), newSegment.getShardSpec().createChunk(newSegment) ); newSegment = createSegment(dataSource, day, false, i); dataSources.get(dataSource).add( newSegment.getInterval(), newSegment.getVersion(), newSegment.getShardSpec().createChunk(newSegment) ); } }
private Sink getOrCreateSink(final SegmentIdWithShardSpec identifier) { Sink retVal = sinks.get(identifier); if (retVal == null) { retVal = new Sink( identifier.getInterval(), schema, identifier.getShardSpec(), identifier.getVersion(), tuningConfig.getMaxRowsInMemory(), maxBytesTuningConfig, tuningConfig.isReportParseExceptions(), null ); try { segmentAnnouncer.announceSegment(retVal.getSegment()); } catch (IOException e) { log.makeAlert(e, "Failed to announce new segment[%s]", schema.getDataSource()) .addData("interval", retVal.getInterval()) .emit(); } sinks.put(identifier, retVal); metrics.setSinkCount(sinks.size()); sinkTimeline.add(retVal.getInterval(), retVal.getVersion(), identifier.getShardSpec().createChunk(retVal)); } return retVal; }
k -> new VersionedIntervalTimeline<>(Ordering.natural()) ); expectedTimeline.add( segment.getInterval(), segment.getVersion(),
private QueryRunner getCustomRunner() throws IOException { CharSource v_0112 = CharSource.wrap(StringUtils.join(V_0112, "\n")); CharSource v_0113 = CharSource.wrap(StringUtils.join(V_0113, "\n")); IncrementalIndex index0 = TestIndex.loadIncrementalIndex(newIndex("2011-01-12T00:00:00.000Z"), v_0112); IncrementalIndex index1 = TestIndex.loadIncrementalIndex(newIndex("2011-01-14T00:00:00.000Z"), v_0113); segment0 = new IncrementalIndexSegment(index0, makeIdentifier(index0, "v1")); segment1 = new IncrementalIndexSegment(index1, makeIdentifier(index1, "v1")); VersionedIntervalTimeline<String, Segment> timeline = new VersionedIntervalTimeline<>(StringComparators.LEXICOGRAPHIC); timeline.add(index0.getInterval(), "v1", new SingleElementPartitionChunk<>(segment0)); timeline.add(index1.getInterval(), "v1", new SingleElementPartitionChunk<>(segment1)); return QueryRunnerTestHelper.makeFilteringQueryRunner(timeline, factory); }
@Override public Set<DataSegment> findUsedSegments(Set<SegmentIdWithShardSpec> identifiers) { final VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<>(Ordering.natural()); for (DataSegment dataSegment : appenderatorTester.getPushedSegments()) { timeline.add( dataSegment.getInterval(), dataSegment.getVersion(), dataSegment.getShardSpec().createChunk(dataSegment) ); } final Set<DataSegment> retVal = new HashSet<>(); for (SegmentIdWithShardSpec identifier : identifiers) { for (TimelineObjectHolder<String, DataSegment> holder : timeline.lookup(identifier.getInterval())) { for (PartitionChunk<DataSegment> chunk : holder.getObject()) { if (identifiers.contains(SegmentIdWithShardSpec.fromDataSegment(chunk.getObject()))) { retVal.add(chunk.getObject()); } } } } return retVal; } }
@Before public void setup() { dataSources = new HashMap<>(); for (int i = 0; i < 3; i++) { final String dataSource = DATA_SOURCE_PREFIX + i; VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<>( String.CASE_INSENSITIVE_ORDER ); for (int j = 0; j < 4; j++) { for (int k = 0; k < 2; k++) { DataSegment segment = createSegment(dataSource, j, true, k); timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); segment = createSegment(dataSource, j, false, k); timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); } } for (int j = 7; j < 9; j++) { for (int k = 0; k < 2; k++) { DataSegment segment = createSegment(dataSource, j, true, k); timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); segment = createSegment(dataSource, j, false, k); timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); } } dataSources.put(dataSource, timeline); } }
@BeforeClass public static void setup() throws IOException { CharSource v_0112 = CharSource.wrap(StringUtils.join(V_0112, "\n")); CharSource v_0113 = CharSource.wrap(StringUtils.join(V_0113, "\n")); CharSource v_override = CharSource.wrap(StringUtils.join(V_OVERRIDE, "\n")); IncrementalIndex index0 = TestIndex.loadIncrementalIndex(newIndex("2011-01-12T00:00:00.000Z"), v_0112); IncrementalIndex index1 = TestIndex.loadIncrementalIndex(newIndex("2011-01-13T00:00:00.000Z"), v_0113); IncrementalIndex index2 = TestIndex.loadIncrementalIndex(newIndex("2011-01-12T04:00:00.000Z"), v_override); segment0 = new IncrementalIndexSegment(index0, makeIdentifier(index0, "v1")); segment1 = new IncrementalIndexSegment(index1, makeIdentifier(index1, "v1")); segment_override = new IncrementalIndexSegment(index2, makeIdentifier(index2, "v2")); VersionedIntervalTimeline<String, Segment> timeline = new VersionedIntervalTimeline<>(StringComparators.LEXICOGRAPHIC); timeline.add(index0.getInterval(), "v1", new SingleElementPartitionChunk<>(segment0)); timeline.add(index1.getInterval(), "v1", new SingleElementPartitionChunk<>(segment1)); timeline.add(index2.getInterval(), "v2", new SingleElementPartitionChunk<>(segment_override)); segmentIdentifiers = new ArrayList<>(); for (TimelineObjectHolder<String, ?> holder : timeline.lookup(Intervals.of("2011-01-12/2011-01-14"))) { segmentIdentifiers.add(makeIdentifier(holder.getInterval(), holder.getVersion()).toString()); } runner = QueryRunnerTestHelper.makeFilteringQueryRunner(timeline, factory); }
); selector.addServerAndUpdateSegment(new QueryableDruidServer(servers[0], null), dataSegment); timeline.add(interval, "ver", new SingleElementPartitionChunk<>(selector));
resultSupplier.set(false); } else { loadedIntervals.add( segment.getInterval(), segment.getVersion(),
private void addSegment( VersionedIntervalTimeline<String, ServerSelector> timeline, DruidServer server, String interval, List<String> dims, List<String> metrics, String version ) { DataSegment segment = DataSegment.builder() .dataSource(dataSource) .interval(Intervals.of(interval)) .version(version) .dimensions(dims) .metrics(metrics) .size(1) .build(); server.addDataSegment(segment); ServerSelector ss = new ServerSelector(segment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy())); timeline.add(Intervals.of(interval), version, new SingleElementPartitionChunk<ServerSelector>(ss)); }
private void addSegmentWithShardSpec( VersionedIntervalTimeline<String, ServerSelector> timeline, DruidServer server, String interval, List<String> dims, List<String> metrics, String version, ShardSpec shardSpec ) { DataSegment segment = DataSegment.builder() .dataSource(dataSource) .interval(Intervals.of(interval)) .version(version) .dimensions(dims) .metrics(metrics) .shardSpec(shardSpec) .size(1) .build(); server.addDataSegment(segment); ServerSelector ss = new ServerSelector(segment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy())); timeline.add(Intervals.of(interval), version, shardSpec.createChunk(ss)); }