public static boolean eligibleForLoad(Interval src, Interval target) { return src.overlaps(target); }
@Override public boolean apply(T input) { return (input.getInterval().overlaps(targetInterval)); } }
boolean inCalculationInterval(DataSegment dataSegment) { return calculationInterval.overlaps(dataSegment.getInterval()); }
@Override public int compare(Interval o1, Interval o2) { if (o1.equals(o2) || o1.overlaps(o2)) { return 0; } else { return o1.isBefore(o2) ? -1 : 1; } } }
@Override public boolean apply(T input) { return max != null && input.getInterval().overlaps(max.getInterval()); } }
public int deletePendingSegments(String dataSource, Interval deleteInterval) { // Check the given interval overlaps the interval(minCreatedDateOfActiveTasks, MAX) final Optional<DateTime> minCreatedDateOfActiveTasks = taskStorageQueryAdapter .getActiveTaskInfo(dataSource) .stream() .map(TaskInfo::getCreatedTime) .min(Comparator.naturalOrder()); final Interval activeTaskInterval = new Interval( minCreatedDateOfActiveTasks.orElse(DateTimes.MAX), DateTimes.MAX ); Preconditions.checkArgument( !deleteInterval.overlaps(activeTaskInterval), "Cannot delete pendingSegments because there is at least one active task created at %s", activeTaskInterval.getStart() ); return indexerMetadataStorageCoordinator.deletePendingSegments(dataSource, deleteInterval); } }
if (interval.overlaps(skipFromLatest)) { overlapIntervals.add(interval); } else {
.filter(java.util.Objects::nonNull) .flatMap(sortedMap -> sortedMap.entrySet().stream()) .filter(entry -> entry.getKey().overlaps(interval)) .flatMap(entry -> entry.getValue().stream()) .collect(Collectors.toList());
@Deprecated @GET @Path("/{dataSourceName}/metrics") @Produces(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Iterable<String> getDataSourceMetrics( @PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") String interval ) { final Set<DataSegment> segments = getAllSegmentsForDataSource(dataSourceName); final Interval theInterval; if (interval == null || interval.isEmpty()) { DateTime now = getCurrentTime(); theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now); } else { theInterval = Intervals.of(interval); } final Set<String> metrics = new HashSet<>(); for (DataSegment segment : segments) { if (theInterval.overlaps(segment.getInterval())) { metrics.addAll(segment.getMetrics()); } } return metrics; }
@Deprecated @GET @Path("/{dataSourceName}/dimensions") @Produces(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Iterable<String> getDataSourceDimensions( @PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") String interval ) { final Set<DataSegment> segments = getAllSegmentsForDataSource(dataSourceName); final Interval theInterval; if (interval == null || interval.isEmpty()) { DateTime now = getCurrentTime(); theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now); } else { theInterval = Intervals.of(interval); } final Set<String> dims = new HashSet<>(); for (DataSegment segment : segments) { if (theInterval.overlaps(segment.getInterval())) { dims.addAll(segment.getDimensions()); } } return dims; }
private double rightCost(DataSegment dataSegment, double t0, double t1, int index) { double rightCost = 0.0; // add all right-overlapping segments int rightIndex = index; while (rightIndex < sortedSegments.size() && sortedSegments.get(rightIndex).getInterval().overlaps(dataSegment.getInterval())) { double start = convertStart(sortedSegments.get(rightIndex), interval); double end = convertEnd(sortedSegments.get(rightIndex), interval); rightCost += CostBalancerStrategy.intervalCost(t1 - t0, start - t0, end - t0); ++rightIndex; } // add right-non-overlapping segments if (rightIndex < sortedSegments.size()) { rightCost += rightSum[rightIndex] * (FastMath.exp(t0) - FastMath.exp(t1)); } return rightCost; }
@JsonCreator public ArbitraryGranularitySpec( @JsonProperty("queryGranularity") Granularity queryGranularity, @JsonProperty("rollup") Boolean rollup, @JsonProperty("intervals") List<Interval> inputIntervals ) { this.queryGranularity = queryGranularity == null ? Granularities.NONE : queryGranularity; this.rollup = rollup == null ? Boolean.TRUE : rollup; this.intervals = new TreeSet<>(Comparators.intervalsByStartThenEnd()); if (inputIntervals == null) { inputIntervals = new ArrayList<>(); } // Insert all intervals for (final Interval inputInterval : inputIntervals) { intervals.add(inputInterval); } // Ensure intervals are non-overlapping (but they may abut each other) final PeekingIterator<Interval> intervalIterator = Iterators.peekingIterator(intervals.iterator()); while (intervalIterator.hasNext()) { final Interval currentInterval = intervalIterator.next(); if (intervalIterator.hasNext()) { final Interval nextInterval = intervalIterator.peek(); if (currentInterval.overlaps(nextInterval)) { throw new IAE("Overlapping intervals: %s, %s", currentInterval, nextInterval); } } } }
} else if (currInterval.overlaps(next)) { DateTime nextEnd = next.getEnd(); DateTime currEnd = currInterval.getEnd();
private double addLeftCost(DataSegment dataSegment, double t0, double t1, int index) { double leftCost = 0.0; // add to cost all left-overlapping segments int leftIndex = index - 1; while (leftIndex >= 0 && sortedSegments.get(leftIndex).getInterval().overlaps(dataSegment.getInterval())) { double start = convertStart(sortedSegments.get(leftIndex), interval); double end = convertEnd(sortedSegments.get(leftIndex), interval); leftCost += CostBalancerStrategy.intervalCost(end - start, t0 - start, t1 - start); --leftIndex; } // add left-non-overlapping segments if (leftIndex >= 0) { leftCost += leftSum[leftIndex] * (FastMath.exp(-t1) - FastMath.exp(-t0)); } return leftCost; }
); if (lower == null || !lower.overlaps(interval)) { return false;
TimelineEntry val = entry.getValue(); if (timelineInterval.overlaps(interval)) { retVal.add( new TimelineObjectHolder<VersionType, ObjectType>( if (interval.overlaps(firstEntry.getInterval()) && interval.getStart() .isAfter(firstEntry.getInterval().getStart())) { retVal.set( if (interval.overlaps(lastEntry.getInterval()) && interval.getEnd().isBefore(lastEntry.getInterval().getEnd())) { retVal.set( retVal.size() - 1,
@Override public Sequence<Cursor> makeCursors( @Nullable final Filter filter, final Interval interval, final VirtualColumns virtualColumns, final Granularity gran, final boolean descending, @Nullable QueryMetrics<?> queryMetrics ) { if (index.isEmpty()) { return Sequences.empty(); } final Interval dataInterval = new Interval(getMinTime(), gran.bucketEnd(getMaxTime())); if (!interval.overlaps(dataInterval)) { return Sequences.empty(); } final Interval actualInterval = interval.overlap(dataInterval); Iterable<Interval> intervals = gran.getIterable(actualInterval); if (descending) { intervals = Lists.reverse(ImmutableList.copyOf(intervals)); } return Sequences .simple(intervals) .map(i -> new IncrementalIndexCursor(virtualColumns, descending, filter, i, actualInterval, gran)); }
if (overlaps(interval) == false) { return null;
private List<SegmentIdWithShardSpec> getPendingSegmentsForIntervalWithHandle( final Handle handle, final String dataSource, final Interval interval ) throws IOException { final List<SegmentIdWithShardSpec> identifiers = new ArrayList<>(); final ResultIterator<byte[]> dbSegments = handle.createQuery( StringUtils.format( "SELECT payload FROM %1$s WHERE dataSource = :dataSource AND start <= :end and %2$send%2$s >= :start", dbTables.getPendingSegmentsTable(), connector.getQuoteString() ) ) .bind("dataSource", dataSource) .bind("start", interval.getStart().toString()) .bind("end", interval.getEnd().toString()) .map(ByteArrayMapper.FIRST) .iterator(); while (dbSegments.hasNext()) { final byte[] payload = dbSegments.next(); final SegmentIdWithShardSpec identifier = jsonMapper.readValue(payload, SegmentIdWithShardSpec.class); if (interval.overlaps(identifier.getInterval())) { identifiers.add(identifier); } } dbSegments.close(); return identifiers; }