private PerSegmentQueryOptimizationContext getOptimizationContext(Interval segmentInterval) { return new PerSegmentQueryOptimizationContext( new SegmentDescriptor(segmentInterval, "0", 0) ); } }
public LocatedSegmentDescriptor(SegmentDescriptor descriptor, long size, List<DruidServerMetadata> candidates) { this(descriptor.getInterval(), descriptor.getVersion(), descriptor.getPartitionNumber(), size, candidates); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SpecificSegmentSpec that = (SpecificSegmentSpec) o; if (descriptor != null ? !descriptor.equals(that.descriptor) : that.descriptor != null) { return false; } return true; }
@Override public Iterable<QueryRunner<T>> apply(final SegmentDescriptor descriptor) { final PartitionHolder<Segment> holder = timeline.findEntry( descriptor.getInterval(), descriptor.getVersion() ); return Iterables.transform( holder, new Function<PartitionChunk<Segment>, QueryRunner<T>>() { @Override public QueryRunner<T> apply(PartitionChunk<Segment> chunk) { return new SpecificSegmentQueryRunner<T>( factory.createRunner(chunk.getObject()), new SpecificSegmentSpec(descriptor) ); } } ); } }
@Override public Interval apply(SegmentDescriptor input) { return input.getInterval(); } }
private Sequence<Result<TimeseriesResultValue>> toFilteredQueryableTimeseriesResults( TimeseriesQuery query, List<SegmentId> segmentIds, List<Interval> queryIntervals, List<Iterable<Result<TimeseriesResultValue>>> results ) { MultipleSpecificSegmentSpec spec = (MultipleSpecificSegmentSpec) query.getQuerySegmentSpec(); List<Result<TimeseriesResultValue>> ret = new ArrayList<>(); for (SegmentDescriptor descriptor : spec.getDescriptors()) { SegmentId id = SegmentId.dummy( StringUtils.format("%s_%s", queryIntervals.indexOf(descriptor.getInterval()), descriptor.getPartitionNumber()) ); int index = segmentIds.indexOf(id); if (index != -1) { Result result = new Result( results.get(index).iterator().next().getTimestamp(), new BySegmentResultValueClass( Lists.newArrayList(results.get(index)), id.toString(), descriptor.getInterval() ) ); ret.add(result); } else { throw new ISE("Descriptor %s not found in server", id); } } return Sequences.simple(ret); }
@Override public QueryRunner<T> apply(SegmentDescriptor spec) { final FireChief retVal = partitionChiefs.get(spec.getPartitionNumber()); return retVal == null ? new NoopQueryRunner<T>() : retVal.getQueryRunner(query.withQuerySegmentSpec(new SpecificSegmentSpec(spec))); } }
@Override public List<Interval> getIntervals() { return Collections.singletonList(descriptor.getInterval()); }
@Override public QueryRunner<T> apply(SegmentDescriptor spec) { final FireChief retVal = partitionChiefs.get(spec.getPartitionNumber()); return retVal == null ? new NoopQueryRunner<T>() : retVal.getQueryRunner(query.withQuerySegmentSpec(new SpecificSegmentSpec(spec))); } }
public SegmentDescriptor toDescriptor() { return new SegmentDescriptor(getInterval(), getVersion(), shardSpec.getPartitionNum()); }
@Override public Sequence<T> run(final QueryPlus<T> queryPlus, final Map<String, Object> responseContext) { return CachingClusteredClient.this.run( queryPlus, responseContext, timeline -> { final VersionedIntervalTimeline<String, ServerSelector> timeline2 = new VersionedIntervalTimeline<>(Ordering.natural()); for (SegmentDescriptor spec : specs) { final PartitionHolder<ServerSelector> entry = timeline.findEntry(spec.getInterval(), spec.getVersion()); if (entry != null) { final PartitionChunk<ServerSelector> chunk = entry.getChunk(spec.getPartitionNumber()); if (chunk != null) { timeline2.add(spec.getInterval(), spec.getVersion(), chunk); } } } return timeline2; } ); } };
private List<Pair<Interval, byte[]>> pruneSegmentsWithCachedResults( final byte[] queryCacheKey, final Set<ServerToSegment> segments ) { if (queryCacheKey == null) { return Collections.emptyList(); } final List<Pair<Interval, byte[]>> alreadyCachedResults = new ArrayList<>(); Map<ServerToSegment, Cache.NamedKey> perSegmentCacheKeys = computePerSegmentCacheKeys(segments, queryCacheKey); // Pull cached segments from cache and remove from set of segments to query final Map<Cache.NamedKey, byte[]> cachedValues = computeCachedValues(perSegmentCacheKeys); perSegmentCacheKeys.forEach((segment, segmentCacheKey) -> { final Interval segmentQueryInterval = segment.getSegmentDescriptor().getInterval(); final byte[] cachedValue = cachedValues.get(segmentCacheKey); if (cachedValue != null) { // remove cached segment from set of segments to query segments.remove(segment); alreadyCachedResults.add(Pair.of(segmentQueryInterval, cachedValue)); } else if (populateCache) { // otherwise, if populating cache, add segment to list of segments to cache final SegmentId segmentId = segment.getServer().getSegment().getId(); addCachePopulatorKey(segmentCacheKey, segmentId, segmentQueryInterval); } }); return alreadyCachedResults; }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SpecificSegmentSpec that = (SpecificSegmentSpec) o; if (descriptor != null ? !descriptor.equals(that.descriptor) : that.descriptor != null) { return false; } return true; }
@Override public Sequence<Result<TimeseriesResultValue>> run( QueryPlus<Result<TimeseriesResultValue>> queryPlus, Map<String, Object> context ) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); return Sequences.empty(); } },
public static Cache.NamedKey computeSegmentCacheKey( String segmentId, SegmentDescriptor descriptor, byte[] queryCacheKey ) { final Interval segmentQueryInterval = descriptor.getInterval(); final byte[] versionBytes = StringUtils.toUtf8(descriptor.getVersion()); return new Cache.NamedKey( segmentId, ByteBuffer .allocate(16 + versionBytes.length + 4 + queryCacheKey.length) .putLong(segmentQueryInterval.getStartMillis()) .putLong(segmentQueryInterval.getEndMillis()) .put(versionBytes) .putInt(descriptor.getPartitionNumber()) .put(queryCacheKey) .array() ); }
Interval segmentInterval = optimizationContext.getSegmentDescriptor().getInterval(); List<Interval> filterIntervals = intervalDimFilter.getIntervals(); List<Interval> excludedFilterIntervals = new ArrayList<>();
@Override public Sequence<Result<TimeseriesResultValue>> run(QueryPlus queryPlus, Map context) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); return Sequences.empty(); } },
@Override @SuppressWarnings("unchecked") public Iterable<QueryRunner<T>> apply(SegmentDescriptor input) { final PartitionHolder<ReferenceCountingSegment> entry = timeline.findEntry( input.getInterval(), input.getVersion() ); if (entry == null) { return Collections.singletonList( new ReportTimelineMissingSegmentQueryRunner<T>(input)); } final PartitionChunk<ReferenceCountingSegment> chunk = entry.getChunk(input.getPartitionNumber()); if (chunk == null) { return Collections.singletonList(new ReportTimelineMissingSegmentQueryRunner<T>(input)); } final ReferenceCountingSegment adapter = chunk.getObject(); return Collections.singletonList( buildAndDecorateQueryRunner(factory, toolChest, adapter, input, cpuTimeAccumulator) ); } }
runners.get(descriptor.getInterval()), new SpecificSegmentSpec(descriptor) );
final List<Rule> rules = databaseRuleManager.getRulesWithDefault(dataSourceName); final Interval theInterval = Intervals.of(interval); final SegmentDescriptor descriptor = new SegmentDescriptor(theInterval, version, partitionNumber); final DateTime now = DateTimes.nowUtc();