@Override public T apply(PartitionChunk<T> input) { return input.getObject(); } }
@Override public DataSegment apply(TimelineObjectHolder<String, DataSegment> input) { return input.getObject().getChunk(0).getObject(); } }
@VisibleForTesting static List<String> getUniqueMetrics(List<TimelineObjectHolder<String, DataSegment>> timelineSegments) { final BiMap<String, Integer> uniqueMetrics = HashBiMap.create(); // Here, we try to retain the order of metrics as they were specified. Metrics are extracted from the recent // segments to olders. // timelineSegments are sorted in order of interval int index = 0; for (TimelineObjectHolder<String, DataSegment> timelineHolder : Lists.reverse(timelineSegments)) { for (PartitionChunk<DataSegment> chunk : timelineHolder.getObject()) { for (String metric : chunk.getObject().getMetrics()) { if (!uniqueMetrics.containsKey(metric)) { uniqueMetrics.put(metric, index++); } } } } final BiMap<Integer, String> orderedMetrics = uniqueMetrics.inverse(); return IntStream.range(0, orderedMetrics.size()) .mapToObj(orderedMetrics::get) .collect(Collectors.toList()); } }
/** * Remove timelineObjects from this holder until we have a complete set with total size <= maxSize. * * @return number of timeline object holders removed */ public int backtrack(long maxSize) { Preconditions.checkArgument(maxSize >= 0, "maxSize >= 0"); int removed = 0; while (!isComplete() || byteCount > maxSize) { removed++; final TimelineObjectHolder<String, DataSegment> removedHolder = timelineObjects.remove( timelineObjects.size() - 1 ).lhs; for (final PartitionChunk<DataSegment> segment : removedHolder.getObject()) { segments.remove(segment.getObject()); if (segments.count(segment.getObject()) == 0) { byteCount -= segment.getObject().getSize(); } } } return removed; } }
/** * Remove timelineObjects from this holder until we have a complete set with total size <= maxSize. * * @return number of timeline object holders removed */ public int backtrack(long maxSize) { Preconditions.checkArgument(maxSize >= 0, "maxSize >= 0"); int removed = 0; while (!isComplete() || byteCount > maxSize) { removed++; final TimelineObjectHolder<String, DataSegment> removedHolder = timelineObjects.remove( timelineObjects.size() - 1 ).lhs; for (final PartitionChunk<DataSegment> segment : removedHolder.getObject()) { segments.remove(segment.getObject()); if (segments.count(segment.getObject()) == 0) { byteCount -= segment.getObject().getSize(); } } } return removed; } }
@VisibleForTesting static List<String> getUniqueDimensions( List<TimelineObjectHolder<String, DataSegment>> timelineSegments, @Nullable Set<String> excludeDimensions ) { final BiMap<String, Integer> uniqueDims = HashBiMap.create(); // Here, we try to retain the order of dimensions as they were specified since the order of dimensions may be // optimized for performance. // Dimensions are extracted from the recent segments to olders because recent segments are likely to be queried more // frequently, and thus the performance should be optimized for recent ones rather than old ones. // timelineSegments are sorted in order of interval int index = 0; for (TimelineObjectHolder<String, DataSegment> timelineHolder : Lists.reverse(timelineSegments)) { for (PartitionChunk<DataSegment> chunk : timelineHolder.getObject()) { for (String dimension : chunk.getObject().getDimensions()) { if (!uniqueDims.containsKey(dimension) && (excludeDimensions == null || !excludeDimensions.contains(dimension))) { uniqueDims.put(dimension, index++); } } } } final BiMap<Integer, String> orderedDims = uniqueDims.inverse(); return IntStream.range(0, orderedDims.size()) .mapToObj(orderedDims::get) .collect(Collectors.toList()); }
private static List<Pair<QueryableIndex, DataSegment>> loadSegments( List<TimelineObjectHolder<String, DataSegment>> timelineSegments, Map<DataSegment, File> segmentFileMap, IndexIO indexIO ) throws IOException { final List<Pair<QueryableIndex, DataSegment>> segments = new ArrayList<>(); for (TimelineObjectHolder<String, DataSegment> timelineSegment : timelineSegments) { final PartitionHolder<DataSegment> partitionHolder = timelineSegment.getObject(); for (PartitionChunk<DataSegment> chunk : partitionHolder) { final DataSegment segment = chunk.getObject(); final QueryableIndex queryableIndex = indexIO.loadIndex( Preconditions.checkNotNull(segmentFileMap.get(segment), "File for segment %s", segment.getIdentifier()) ); segments.add(Pair.of(queryableIndex, segment)); } } return segments; }
@Override public SegmentToMergeHolder apply(TimelineObjectHolder<String, DataSegment> input) { final DataSegment segment = input.getObject().getChunk(0).getObject(); final File file = Preconditions.checkNotNull( segments.get(segment), "File for segment %s", segment.getIdentifier() ); return new SegmentToMergeHolder(segment, input.getInterval(), file); } }
input.getObject(), (PartitionChunk<SegmentLoadInfo> chunk) -> chunk.getObject().toImmutableSegmentLoadInfo()
throw new ISE("Unable to find an underlying interval"); Interval underlyingInterval = firstChunk.getObject().getInterval(); if (!(segment.getObject().getShardSpec() instanceof NoneShardSpec)) { return false; segments.add(segment.getObject()); if (segments.count(segment.getObject()) == 1) { byteCount += segment.getObject().getSize();
private Set<ServerToSegment> computeSegmentsToQuery(TimelineLookup<String, ServerSelector> timeline) { final List<TimelineObjectHolder<String, ServerSelector>> serversLookup = toolChest.filterSegments( query, query.getIntervals().stream().flatMap(i -> timeline.lookup(i).stream()).collect(Collectors.toList()) ); final Set<ServerToSegment> segments = Sets.newLinkedHashSet(); final Map<String, Optional<RangeSet<String>>> dimensionRangeCache = Maps.newHashMap(); // Filter unneeded chunks based on partition dimension for (TimelineObjectHolder<String, ServerSelector> holder : serversLookup) { final Set<PartitionChunk<ServerSelector>> filteredChunks = DimFilterUtils.filterShards( query.getFilter(), holder.getObject(), partitionChunk -> partitionChunk.getObject().getSegment().getShardSpec(), dimensionRangeCache ); for (PartitionChunk<ServerSelector> chunk : filteredChunks) { ServerSelector server = chunk.getObject(); final SegmentDescriptor segment = new SegmentDescriptor( holder.getInterval(), holder.getVersion(), chunk.getChunkNumber() ); segments.add(new ServerToSegment(server, segment)); } } return segments; }
@Override @SuppressWarnings("unchecked") public Iterable<QueryRunner<T>> apply(SegmentDescriptor input) { final PartitionHolder<ReferenceCountingSegment> entry = timeline.findEntry( input.getInterval(), input.getVersion() ); if (entry == null) { return Arrays.<QueryRunner<T>>asList(new ReportTimelineMissingSegmentQueryRunner<T>(input)); } final PartitionChunk<ReferenceCountingSegment> chunk = entry.getChunk(input.getPartitionNumber()); if (chunk == null) { return Collections.singletonList(new ReportTimelineMissingSegmentQueryRunner<T>(input)); } final ReferenceCountingSegment adapter = chunk.getObject(); return Collections.singletonList( buildAndDecorateQueryRunner(factory, toolChest, adapter, input, cpuTimeAccumulator) ); } }
throw new ISE("Unable to find an underlying interval"); Interval underlyingInterval = firstChunk.getObject().getInterval(); if (!(segment.getObject().getShardSpec() instanceof NoneShardSpec)) { return false; segments.add(segment.getObject()); if (segments.count(segment.getObject()) == 1) { byteCount += segment.getObject().getSize();
@Override @SuppressWarnings("unchecked") public Iterable<QueryRunner<T>> apply(SegmentDescriptor input) { final PartitionHolder<ReferenceCountingSegment> entry = timeline.findEntry( input.getInterval(), input.getVersion() ); if (entry == null) { return null; } final PartitionChunk<ReferenceCountingSegment> chunk = entry.getChunk(input.getPartitionNumber()); if (chunk == null) { return null; } final ReferenceCountingSegment adapter = chunk.getObject(); return Arrays.asList( buildAndDecorateQueryRunner(factory, toolChest, adapter, input) ); } }
public static List<LocatedSegmentDescriptor> getTargetLocations( TimelineServerView serverView, DataSource datasource, List<Interval> intervals, int numCandidates ) { TimelineLookup<String, ServerSelector> timeline = serverView.getTimeline(datasource); if (timeline == null) { return Collections.emptyList(); } List<LocatedSegmentDescriptor> located = Lists.newArrayList(); for (Interval interval : intervals) { for (TimelineObjectHolder<String, ServerSelector> holder : timeline.lookup(interval)) { for (PartitionChunk<ServerSelector> chunk : holder.getObject()) { ServerSelector selector = chunk.getObject(); final SegmentDescriptor descriptor = new SegmentDescriptor( holder.getInterval(), holder.getVersion(), chunk.getChunkNumber() ); long size = selector.getSegment().getSize(); List<DruidServerMetadata> candidates = selector.getCandidates(numCandidates); located.add(new LocatedSegmentDescriptor(descriptor, size, candidates)); } } } return located; } }
@Override public QueryRunner<T> apply(TimelineObjectHolder<String, Sink> holder) final Sink theSink = holder.getObject().getChunk(0).getObject(); return new SpecificSegmentQueryRunner<T>( new MetricsEmittingQueryRunner<T>(
final Sink theSink = chunk.getObject(); final String sinkSegmentIdentifier = theSink.getSegment().getIdentifier();
segment.getShardSpec().createChunk((ReferenceCountingSegment) null) ); ReferenceCountingSegment oldQueryable = (removed == null) ? null : removed.getObject();
segment.getShardSpec().createChunk(null) ); final ReferenceCountingSegment oldQueryable = (removed == null) ? null : removed.getObject();