timeline = new VersionedIntervalTimeline<>(Ordering.natural()); } else { List<TimelineObjectHolder<String, DataSegment>> existingChunks = timeline.lookup(segment.getInterval()); if (existingChunks.size() > 1) { dataSegmentPusher); finalSegmentsToPublish.add(publishedSegment); timeline.add(publishedSegment.getInterval(), publishedSegment.getVersion(), publishedSegment.getShardSpec().createChunk(publishedSegment));
private static VersionedIntervalTimeline<String, DataSegment> getTimelineForIntervalWithHandle(final Handle handle, final String dataSource, final Interval interval, final MetadataStorageTablesConfig dbTables) throws IOException { Query<Map<String, Object>> sql = handle.createQuery(String.format( "SELECT payload FROM %s WHERE used = true AND dataSource = ? AND start <= ? AND \"end\" >= ?", dbTables.getSegmentsTable())) .bind(0, dataSource) .bind(1, interval.getEnd().toString()) .bind(2, interval.getStart().toString()); final VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<>(Ordering.natural()); try (ResultIterator<byte[]> dbSegments = sql.map(ByteArrayMapper.FIRST).iterator()) { while (dbSegments.hasNext()) { final byte[] payload = dbSegments.next(); DataSegment segment = JSON_MAPPER.readValue(payload, DataSegment.class); timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); } } return timeline; }
VersionedIntervalTimeline<String, DataSegment> timeline = timelines.get(segment.getDataSource()); if (timeline == null) { timeline = new VersionedIntervalTimeline<>(Ordering.natural()); timelines.put(segment.getDataSource(), timeline); timeline.add( segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment) ); for (TimelineObjectHolder<String, DataSegment> holder : timeline.findOvershadowed()) { for (DataSegment dataSegment : holder.getObject().payloads()) { overshadowed.add(dataSegment);
private void remove( NavigableMap<Interval, TimelineEntry> timeline, Interval interval, boolean incompleteOk ) { timeline.remove(interval); for (Map.Entry<Interval, TreeMap<VersionType, TimelineEntry>> versionEntry : allTimelineEntries.entrySet()) { if (versionEntry.getKey().overlap(interval) != null) { TimelineEntry timelineEntry = versionEntry.getValue().lastEntry().getValue(); if (timelineEntry.getPartitionHolder().isComplete() || incompleteOk) { add(timeline, versionEntry.getKey(), timelineEntry); } } } }
/** * Does a lookup for the objects representing the given time interval. Will *only* return * PartitionHolders that are complete. * * @param interval interval to find objects for * * @return Holders representing the interval that the objects exist for, PartitionHolders * are guaranteed to be complete */ public List<TimelineObjectHolder<VersionType, ObjectType>> lookup(Interval interval) { try { lock.readLock().lock(); return lookup(interval, false); } finally { lock.readLock().unlock(); } }
loadedIntervals = new VersionedIntervalTimeline<String, ReferenceCountingSegment>(Ordering.natural()); dataSources.put(dataSource, loadedIntervals); PartitionHolder<ReferenceCountingSegment> entry = loadedIntervals.findEntry( segment.getInterval(), segment.getVersion() loadedIntervals.add( segment.getInterval(), segment.getVersion(),
VersionedIntervalTimeline<String, DataSegment> timeline = timelines.get(dataSource.getName()); if (timeline == null) { timeline = new VersionedIntervalTimeline<>(Comparators.comparable()); timelines.put(dataSource.getName(), timeline); timeline.add( segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment) ); if (timeline != null && timeline.isOvershadowed(dataSegment.getInterval(), dataSegment.getVersion())) { coordinator.removeSegment(dataSegment); stats.addToGlobalStat("overShadowedCount", 1);
private static Pair<Map<DataSegment, File>, List<TimelineObjectHolder<String, DataSegment>>> prepareSegments( TaskToolbox toolbox, SegmentProvider segmentProvider ) throws IOException, SegmentLoadingException { final List<DataSegment> usedSegments = segmentProvider.checkAndGetSegments(toolbox); final Map<DataSegment, File> segmentFileMap = toolbox.fetchSegments(usedSegments); final List<TimelineObjectHolder<String, DataSegment>> timelineSegments = VersionedIntervalTimeline .forSegments(usedSegments) .lookup(segmentProvider.interval); return Pair.of(segmentFileMap, timelineSegments); }
final VersionedIntervalTimeline<String, ReferenceCountingSegment> loadedIntervals = dataSourceState.getTimeline(); final PartitionHolder<ReferenceCountingSegment> entry = loadedIntervals.findEntry( segment.getInterval(), segment.getVersion() resultSupplier.set(false); } else { loadedIntervals.add( segment.getInterval(), segment.getVersion(),
private void remove( NavigableMap<Interval, TimelineEntry> timeline, Interval interval, TimelineEntry entry, boolean incompleteOk ) { List<Interval> intervalsToRemove = Lists.newArrayList(); TimelineEntry removed = timeline.get(interval); if (removed == null) { Iterator<Map.Entry<Interval, TimelineEntry>> iter = timeline.entrySet().iterator(); while (iter.hasNext()) { Map.Entry<Interval, TimelineEntry> timelineEntry = iter.next(); if (timelineEntry.getValue() == entry) { intervalsToRemove.add(timelineEntry.getKey()); } } } else { intervalsToRemove.add(interval); } for (Interval i : intervalsToRemove) { remove(timeline, i, incompleteOk); } }
new VersionedIntervalTimeline<String, DataSegment>(Ordering.natural()), new Folder3<VersionedIntervalTimeline<String, DataSegment>, Map<String, Object>>()
VersionedIntervalTimeline.forSegments(segments).lookupWithIncompletePartitions(Intervals.ETERNITY); for (TimelineObjectHolder<String, DataSegment> holder : segmentHolders) { for (PartitionChunk<DataSegment> chunk : holder.getObject()) {
@Override @SuppressWarnings("unchecked") public Iterable<QueryRunner<T>> apply(SegmentDescriptor input) { final PartitionHolder<ReferenceCountingSegment> entry = timeline.findEntry( input.getInterval(), input.getVersion() ); if (entry == null) { return Arrays.<QueryRunner<T>>asList(new ReportTimelineMissingSegmentQueryRunner<T>(input)); } final PartitionChunk<ReferenceCountingSegment> chunk = entry.getChunk(input.getPartitionNumber()); if (chunk == null) { return Collections.singletonList(new ReportTimelineMissingSegmentQueryRunner<T>(input)); } final ReferenceCountingSegment adapter = chunk.getObject(); return Collections.singletonList( buildAndDecorateQueryRunner(factory, toolChest, adapter, input, cpuTimeAccumulator) ); } }
private void serverAddedSegment(final DruidServerMetadata server, final DataSegment segment) { String segmentId = segment.getIdentifier(); synchronized (lock) { log.debug("Adding segment[%s] for server[%s]", segment, server); SegmentLoadInfo segmentLoadInfo = segmentLoadInfos.get(segmentId); if (segmentLoadInfo == null) { // servers escape the scope of this object so use ConcurrentSet segmentLoadInfo = new SegmentLoadInfo(segment); VersionedIntervalTimeline<String, SegmentLoadInfo> timeline = timelines.get(segment.getDataSource()); if (timeline == null) { timeline = new VersionedIntervalTimeline<>(Ordering.natural()); timelines.put(segment.getDataSource(), timeline); } timeline.add( segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segmentLoadInfo) ); segmentLoadInfos.put(segmentId, segmentLoadInfo); } segmentLoadInfo.addServer(server); } }
VersionedIntervalTimeline<String, DataSegment> timeline = timelines.get(dataSource.getName()); if (timeline == null) { timeline = new VersionedIntervalTimeline<String, DataSegment>(Comparators.comparable()); timelines.put(dataSource.getName(), timeline); timeline.add( segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment) ); for (TimelineObjectHolder<String, DataSegment> holder : timeline.findOvershadowed()) { for (DataSegment dataSegment : holder.getObject().payloads()) { coordinator.removeSegment(dataSegment);
public void add(final Interval interval, VersionType version, PartitionChunk<ObjectType> object) { try { lock.writeLock().lock(); Map<VersionType, TimelineEntry> exists = allTimelineEntries.get(interval); TimelineEntry entry = null; if (exists == null) { entry = new TimelineEntry(interval, version, new PartitionHolder<ObjectType>(object)); TreeMap<VersionType, TimelineEntry> versionEntry = new TreeMap<VersionType, TimelineEntry>(versionComparator); versionEntry.put(version, entry); allTimelineEntries.put(interval, versionEntry); } else { entry = exists.get(version); if (entry == null) { entry = new TimelineEntry(interval, version, new PartitionHolder<ObjectType>(object)); exists.put(version, entry); } else { PartitionHolder<ObjectType> partitionHolder = entry.getPartitionHolder(); partitionHolder.add(object); } } if (entry.getPartitionHolder().isComplete()) { add(completePartitionsTimeline, interval, entry); } add(incompletePartitionsTimeline, interval, entry); } finally { lock.writeLock().unlock(); } }
timeline.lookup(interval), new Function<TimelineObjectHolder<String, DataSegment>, DataSegment>()
final Map<DataSegment, File> segmentFileMap = taskToolbox.fetchSegments(usedSegments); final List<TimelineObjectHolder<String, DataSegment>> timeLineSegments = VersionedIntervalTimeline .forSegments(usedSegments) .lookup(interval);
public PartitionChunk<ObjectType> remove(Interval interval, VersionType version, PartitionChunk<ObjectType> chunk) { try { lock.writeLock().lock(); Map<VersionType, TimelineEntry> versionEntries = allTimelineEntries.get(interval); if (versionEntries == null) { return null; } TimelineEntry entry = versionEntries.get(version); if (entry == null) { return null; } PartitionChunk<ObjectType> retVal = entry.getPartitionHolder().remove(chunk); if (entry.getPartitionHolder().isEmpty()) { versionEntries.remove(version); if (versionEntries.isEmpty()) { allTimelineEntries.remove(interval); } remove(incompletePartitionsTimeline, interval, entry, true); } remove(completePartitionsTimeline, interval, entry, false); return retVal; } finally { lock.writeLock().unlock(); } }
.map(ByteArrayMapper.FIRST) .fold( new VersionedIntervalTimeline<String, DataSegment>(Ordering.natural()), new Folder3<VersionedIntervalTimeline<String, DataSegment>, byte[]>()