Refine search
@Override public int compare(Interval lhs, Interval rhs) { if (lhs.getChronology().equals(rhs.getChronology())) { int compare = Long.compare(lhs.getStartMillis(), rhs.getStartMillis()); if (compare == 0) { return Long.compare(lhs.getEndMillis(), rhs.getEndMillis()); } return compare; } int retVal = dateTimeComp.compare(lhs.getStart(), rhs.getStart()); if (retVal == 0) { retVal = dateTimeComp.compare(lhs.getEnd(), rhs.getEnd()); } return retVal; } };
/** * Removes {@code smallInterval} from {@code largeInterval}. The end of both intervals should be same. * * @return an interval of {@code largeInterval} - {@code smallInterval}. */ static Interval removeIntervalFromEnd(Interval largeInterval, Interval smallInterval) { Preconditions.checkArgument( largeInterval.getEnd().equals(smallInterval.getEnd()), "end should be same. largeInterval[%s] smallInterval[%s]", largeInterval, smallInterval ); return new Interval(largeInterval.getStart(), smallInterval.getStart()); }
@Override public int compare(Interval lhs, Interval rhs) { if (lhs.getChronology().equals(rhs.getChronology())) { int compare = Long.compare(lhs.getEndMillis(), rhs.getEndMillis()); if (compare == 0) { return Long.compare(lhs.getStartMillis(), rhs.getStartMillis()); } return compare; } int retVal = dateTimeComp.compare(lhs.getEnd(), rhs.getEnd()); if (retVal == 0) { retVal = dateTimeComp.compare(lhs.getStart(), rhs.getStart()); } return retVal; } };
Bucket(Interval interval, ArrayList<DataSegment> sortedSegments, double[] leftSum, double[] rightSum) { this.interval = Preconditions.checkNotNull(interval, "interval"); this.sortedSegments = Preconditions.checkNotNull(sortedSegments, "sortedSegments"); this.leftSum = Preconditions.checkNotNull(leftSum, "leftSum"); this.rightSum = Preconditions.checkNotNull(rightSum, "rightSum"); Preconditions.checkArgument(sortedSegments.size() == leftSum.length && sortedSegments.size() == rightSum.length); Preconditions.checkArgument(SEGMENT_ORDERING.isOrdered(sortedSegments)); this.calculationInterval = new Interval( interval.getStart().minus(LIFE_THRESHOLD), interval.getEnd().plus(LIFE_THRESHOLD) ); }
private static Iterable<Interval> splitInterval(Interval interval, Period period) { if (interval.getEndMillis() == interval.getStartMillis()) { return Collections.singletonList(interval); } List<Interval> intervals = new ArrayList<>(); Iterator<Interval> timestamps = new PeriodGranularity(period, null, null).getIterable(interval).iterator(); DateTime start = DateTimes.max(timestamps.next().getStart(), interval.getStart()); while (timestamps.hasNext()) { DateTime end = timestamps.next().getStart(); intervals.add(new Interval(start, end)); start = end; } if (start.compareTo(interval.getEnd()) < 0) { intervals.add(new Interval(start, interval.getEnd())); } return intervals; }
private QueueEntry(List<DataSegment> segments) { Preconditions.checkArgument(segments != null && !segments.isEmpty()); Collections.sort(segments); this.interval = new Interval( segments.get(0).getInterval().getStart(), segments.get(segments.size() - 1).getInterval().getEnd() ); this.segments = segments; }
static String getOrMakeId(String id, final String typeName, String dataSource, @Nullable Interval interval) { if (id != null) { return id; } final List<Object> objects = new ArrayList<>(); objects.add(typeName); objects.add(dataSource); if (interval != null) { objects.add(interval.getStart()); objects.add(interval.getEnd()); } objects.add(DateTimes.nowUtc().toString()); return joinId(objects); }
public static Interval umbrellaInterval(Iterable<Interval> intervals) { ArrayList<DateTime> startDates = new ArrayList<>(); ArrayList<DateTime> endDates = new ArrayList<>(); for (Interval interval : intervals) { startDates.add(interval.getStart()); endDates.add(interval.getEnd()); } DateTime minStart = minDateTime(startDates.toArray(new DateTime[0])); DateTime maxEnd = maxDateTime(endDates.toArray(new DateTime[0])); if (minStart == null || maxEnd == null) { throw new IllegalArgumentException("Empty list of intervals"); } return new Interval(minStart, maxEnd); }
private static Interval computeMergedInterval(final List<DataSegment> segments) { Preconditions.checkArgument(segments.size() > 0, "segments.size() > 0"); DateTime start = null; DateTime end = null; for (final DataSegment segment : segments) { if (start == null || segment.getInterval().getStart().isBefore(start)) { start = segment.getInterval().getStart(); } if (end == null || segment.getInterval().getEnd().isAfter(end)) { end = segment.getInterval().getEnd(); } } return new Interval(start, end); }
DateTime remainingStart = totalInterval.getStart(); DateTime remainingEnd = totalInterval.getEnd(); for (Interval skipInterval : skipIntervals) { if (skipInterval.getStart().isBefore(remainingStart) && skipInterval.getEnd().isAfter(remainingStart)) { remainingStart = skipInterval.getEnd(); } else if (skipInterval.getStart().isBefore(remainingEnd) && skipInterval.getEnd().isAfter(remainingEnd)) { remainingEnd = skipInterval.getStart(); } else if (!remainingStart.isAfter(skipInterval.getStart()) && !remainingEnd.isBefore(skipInterval.getEnd())) { filteredIntervals.add(new Interval(remainingStart, skipInterval.getStart())); remainingStart = skipInterval.getEnd(); } else {
/** * Due to https://issues.apache.org/jira/browse/HDFS-13 ":" are not allowed in * path names. So we format paths differently for HDFS. */ @Override public String getStorageDir(DataSegment segment, boolean useUniquePath) { // This is only called by HdfsDataSegmentPusher.push(), which will always set useUniquePath to false since any // 'uniqueness' will be applied not to the directory but to the filename along with the shard number. This is done // to avoid performance issues due to excessive HDFS directories. Hence useUniquePath is ignored here and we // expect it to be false. Preconditions.checkArgument( !useUniquePath, "useUniquePath must be false for HdfsDataSegmentPusher.getStorageDir()" ); return JOINER.join( segment.getDataSource(), StringUtils.format( "%s_%s", segment.getInterval().getStart().toString(ISODateTimeFormat.basicDateTime()), segment.getInterval().getEnd().toString(ISODateTimeFormat.basicDateTime()) ), segment.getVersion().replace(':', '_') ); }
@Override public void publishSegment(final DataSegment segment) throws IOException { publishSegment( segment.getId().toString(), segment.getDataSource(), DateTimes.nowUtc().toString(), segment.getInterval().getStart().toString(), segment.getInterval().getEnd().toString(), (segment.getShardSpec() instanceof NoneShardSpec) ? false : true, segment.getVersion(), true, jsonMapper.writeValueAsBytes(segment) ); }
timelineObjectInterval.getStart().getMillis() >= timelineObjects.get(timelineObjects.size() - 1).lhs.getInterval().getEnd().getMillis(), "timeline objects must be provided in order" ); timelineObjects.add(Pair.of(timelineObject, underlyingInterval)); } else { final DateTime start = underlyingInterval.getStart().isBefore(mergedUnderlyingInterval.getStart()) ? underlyingInterval.getStart() : mergedUnderlyingInterval.getStart(); final DateTime end = underlyingInterval.getEnd().isAfter(mergedUnderlyingInterval.getEnd()) ? underlyingInterval.getEnd() : mergedUnderlyingInterval.getEnd();