for (PartitionChunk<DataSegment> existing : existingHolder.getObject()) { if (max == null || max.getShardSpec().getPartitionNum() < existing.getObject() .getShardSpec()
@Override public Iterable<DataSegment> apply(Pair<TimelineObjectHolder<String, DataSegment>, Interval> input) { return Iterables.transform( input.lhs.getObject(), new Function<PartitionChunk<DataSegment>, DataSegment>() { @Override public DataSegment apply(PartitionChunk<DataSegment> input) { return input.getObject(); } } ); } }
@Override public Iterable<DataSegment> apply(Pair<TimelineObjectHolder<String, DataSegment>, Interval> input) { return Iterables.transform( input.lhs.getObject(), new Function<PartitionChunk<DataSegment>, DataSegment>() { @Override public DataSegment apply(PartitionChunk<DataSegment> input) { return input.getObject(); } } ); } }
@Override public Iterable<SegmentToMergeHolder> apply(final TimelineObjectHolder<String, DataSegment> input) { return Iterables.transform( input.getObject(), new Function<PartitionChunk<DataSegment>, SegmentToMergeHolder>() { @Nullable @Override public SegmentToMergeHolder apply(PartitionChunk<DataSegment> chunkInput) { DataSegment segment = chunkInput.getObject(); return new SegmentToMergeHolder( input.getInterval(), Preconditions.checkNotNull( segments.get(segment), "File for segment %s", segment.getIdentifier() ) ); } } ); } }
holder.getObject(), new Function<PartitionChunk<DataSegment>, WindowedStorageAdapter>()
@Override public DataSegment apply(TimelineObjectHolder<String, DataSegment> input) { return input.getObject().getChunk(0).getObject(); } }
@VisibleForTesting static List<String> getUniqueMetrics(List<TimelineObjectHolder<String, DataSegment>> timelineSegments) { final BiMap<String, Integer> uniqueMetrics = HashBiMap.create(); // Here, we try to retain the order of metrics as they were specified. Metrics are extracted from the recent // segments to olders. // timelineSegments are sorted in order of interval int index = 0; for (TimelineObjectHolder<String, DataSegment> timelineHolder : Lists.reverse(timelineSegments)) { for (PartitionChunk<DataSegment> chunk : timelineHolder.getObject()) { for (String metric : chunk.getObject().getMetrics()) { if (!uniqueMetrics.containsKey(metric)) { uniqueMetrics.put(metric, index++); } } } } final BiMap<Integer, String> orderedMetrics = uniqueMetrics.inverse(); return IntStream.range(0, orderedMetrics.size()) .mapToObj(orderedMetrics::get) .collect(Collectors.toList()); } }
@VisibleForTesting static List<String> getUniqueDimensions( List<TimelineObjectHolder<String, DataSegment>> timelineSegments, @Nullable Set<String> excludeDimensions ) { final BiMap<String, Integer> uniqueDims = HashBiMap.create(); // Here, we try to retain the order of dimensions as they were specified since the order of dimensions may be // optimized for performance. // Dimensions are extracted from the recent segments to olders because recent segments are likely to be queried more // frequently, and thus the performance should be optimized for recent ones rather than old ones. // timelineSegments are sorted in order of interval int index = 0; for (TimelineObjectHolder<String, DataSegment> timelineHolder : Lists.reverse(timelineSegments)) { for (PartitionChunk<DataSegment> chunk : timelineHolder.getObject()) { for (String dimension : chunk.getObject().getDimensions()) { if (!uniqueDims.containsKey(dimension) && (excludeDimensions == null || !excludeDimensions.contains(dimension))) { uniqueDims.put(dimension, index++); } } } } final BiMap<Integer, String> orderedDims = uniqueDims.inverse(); return IntStream.range(0, orderedDims.size()) .mapToObj(orderedDims::get) .collect(Collectors.toList()); }
private static List<Pair<QueryableIndex, DataSegment>> loadSegments( List<TimelineObjectHolder<String, DataSegment>> timelineSegments, Map<DataSegment, File> segmentFileMap, IndexIO indexIO ) throws IOException { final List<Pair<QueryableIndex, DataSegment>> segments = new ArrayList<>(); for (TimelineObjectHolder<String, DataSegment> timelineSegment : timelineSegments) { final PartitionHolder<DataSegment> partitionHolder = timelineSegment.getObject(); for (PartitionChunk<DataSegment> chunk : partitionHolder) { final DataSegment segment = chunk.getObject(); final QueryableIndex queryableIndex = indexIO.loadIndex( Preconditions.checkNotNull(segmentFileMap.get(segment), "File for segment %s", segment.getIdentifier()) ); segments.add(Pair.of(queryableIndex, segment)); } } return segments; }
@Override public SegmentToMergeHolder apply(TimelineObjectHolder<String, DataSegment> input) { final DataSegment segment = input.getObject().getChunk(0).getObject(); final File file = Preconditions.checkNotNull( segments.get(segment), "File for segment %s", segment.getIdentifier() ); return new SegmentToMergeHolder(segment, input.getInterval(), file); } }
(TimelineObjectHolder<String, SegmentLoadInfo> input) -> Iterables.transform( input.getObject(), (PartitionChunk<SegmentLoadInfo> chunk) -> chunk.getObject().toImmutableSegmentLoadInfo()
/** * Remove timelineObjects from this holder until we have a complete set with total size <= maxSize. * * @return number of timeline object holders removed */ public int backtrack(long maxSize) { Preconditions.checkArgument(maxSize >= 0, "maxSize >= 0"); int removed = 0; while (!isComplete() || byteCount > maxSize) { removed++; final TimelineObjectHolder<String, DataSegment> removedHolder = timelineObjects.remove( timelineObjects.size() - 1 ).lhs; for (final PartitionChunk<DataSegment> segment : removedHolder.getObject()) { segments.remove(segment.getObject()); if (segments.count(segment.getObject()) == 0) { byteCount -= segment.getObject().getSize(); } } } return removed; } }
/** * Remove timelineObjects from this holder until we have a complete set with total size <= maxSize. * * @return number of timeline object holders removed */ public int backtrack(long maxSize) { Preconditions.checkArgument(maxSize >= 0, "maxSize >= 0"); int removed = 0; while (!isComplete() || byteCount > maxSize) { removed++; final TimelineObjectHolder<String, DataSegment> removedHolder = timelineObjects.remove( timelineObjects.size() - 1 ).lhs; for (final PartitionChunk<DataSegment> segment : removedHolder.getObject()) { segments.remove(segment.getObject()); if (segments.count(segment.getObject()) == 0) { byteCount -= segment.getObject().getSize(); } } } return removed; } }
private Set<ServerToSegment> computeSegmentsToQuery(TimelineLookup<String, ServerSelector> timeline) { final List<TimelineObjectHolder<String, ServerSelector>> serversLookup = toolChest.filterSegments( query, query.getIntervals().stream().flatMap(i -> timeline.lookup(i).stream()).collect(Collectors.toList()) ); final Set<ServerToSegment> segments = Sets.newLinkedHashSet(); final Map<String, Optional<RangeSet<String>>> dimensionRangeCache = Maps.newHashMap(); // Filter unneeded chunks based on partition dimension for (TimelineObjectHolder<String, ServerSelector> holder : serversLookup) { final Set<PartitionChunk<ServerSelector>> filteredChunks = DimFilterUtils.filterShards( query.getFilter(), holder.getObject(), partitionChunk -> partitionChunk.getObject().getSegment().getShardSpec(), dimensionRangeCache ); for (PartitionChunk<ServerSelector> chunk : filteredChunks) { ServerSelector server = chunk.getObject(); final SegmentDescriptor segment = new SegmentDescriptor( holder.getInterval(), holder.getVersion(), chunk.getChunkNumber() ); segments.add(new ServerToSegment(server, segment)); } } return segments; }
PartitionChunk<DataSegment> firstChunk = Iterables.getFirst(timelineObject.getObject(), null); if (firstChunk == null) { throw new ISE("Unable to find an underlying interval"); for (final PartitionChunk<DataSegment> segment : timelineObject.getObject()) { if (!(segment.getObject().getShardSpec() instanceof NoneShardSpec)) { return false;
PartitionChunk<DataSegment> firstChunk = Iterables.getFirst(timelineObject.getObject(), null); if (firstChunk == null) { throw new ISE("Unable to find an underlying interval"); for (final PartitionChunk<DataSegment> segment : timelineObject.getObject()) { if (!(segment.getObject().getShardSpec() instanceof NoneShardSpec)) { return false;
public static List<LocatedSegmentDescriptor> getTargetLocations( TimelineServerView serverView, DataSource datasource, List<Interval> intervals, int numCandidates ) { TimelineLookup<String, ServerSelector> timeline = serverView.getTimeline(datasource); if (timeline == null) { return Collections.emptyList(); } List<LocatedSegmentDescriptor> located = Lists.newArrayList(); for (Interval interval : intervals) { for (TimelineObjectHolder<String, ServerSelector> holder : timeline.lookup(interval)) { for (PartitionChunk<ServerSelector> chunk : holder.getObject()) { ServerSelector selector = chunk.getObject(); final SegmentDescriptor descriptor = new SegmentDescriptor( holder.getInterval(), holder.getVersion(), chunk.getChunkNumber() ); long size = selector.getSegment().getSize(); List<DruidServerMetadata> candidates = selector.getCandidates(numCandidates); located.add(new LocatedSegmentDescriptor(descriptor, size, candidates)); } } } return located; } }
for (DataSegment dataSegment : holder.getObject().payloads()) { coordinator.removeSegment(dataSegment); stats.addToGlobalStat("overShadowedCount", 1);
@Override public QueryRunner<T> apply(TimelineObjectHolder<String, Sink> holder) final Sink theSink = holder.getObject().getChunk(0).getObject(); return new SpecificSegmentQueryRunner<T>( new MetricsEmittingQueryRunner<T>(