@Deprecated public static Comparator<Interval> intervals() { return intervalsByStartThenEnd(); }
@Override public Comparator getComparator() { return Comparators.naturalNullsFirst(); }
.filter(identifier -> SegmentId.tryParse(dataSource, identifier) != null) .map(SegmentId.makeIntervalExtractor(dataSource)) .sorted(query.isDescending() ? Comparators.intervalsByEndThenStart() : Comparators.intervalsByStartThenEnd()) .forEach(interval -> { if (query.isDescending()) {
@GET @Produces(MediaType.APPLICATION_JSON) public Response getIntervals(@Context final HttpServletRequest req) { final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd()); final Set<ImmutableDruidDataSource> datasources = InventoryViewUtils.getSecuredDataSources( req, serverInventoryView, authorizerMapper ); final Map<Interval, Map<String, Map<String, Object>>> retVal = Maps.newTreeMap(comparator); for (ImmutableDruidDataSource dataSource : datasources) { for (DataSegment dataSegment : dataSource.getSegments()) { Map<String, Map<String, Object>> interval = retVal.get(dataSegment.getInterval()); if (interval == null) { Map<String, Map<String, Object>> tmp = Maps.newHashMap(); retVal.put(dataSegment.getInterval(), tmp); } setProperties(retVal, dataSource, dataSegment); } } return Response.ok(retVal).build(); }
public class DruidCoordinator public static Comparator<DataSegment> SEGMENT_COMPARATOR = Ordering.from(Comparators.intervalsByEndThenStart()) .onResultOf(DataSegment::getInterval) .compound(Ordering.<DataSegment>natural())
@Override public Comparator getComparator() { return Comparators.alwaysEqual(); }
@Override public Comparator getComparator( final List<AggregatorFactory> aggregatorSpecs, final List<PostAggregator> postAggregatorSpecs ) { return Comparators.inverse( new Comparator() { @Override public int compare(Object o1, Object o2) { // nulls last if (o1 == null) { return 1; } if (o2 == null) { return -1; } return delegate.getComparator(aggregatorSpecs, postAggregatorSpecs).compare(o1, o2); } } ); }
final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());
@Test public void testIntervalsByEndThenStart() Comparator<Interval> comp = Comparators.intervalsByEndThenStart();
private Ordering<SegmentAnalysis> makeOrdering(SegmentMetadataQuery query) { if (query.isMerge()) { // Merge everything always return Comparators.alwaysEqual(); } return query.getResultOrdering(); // No two elements should be equal, so it should never merge }
@Override protected Queue<EventHolder> instantiatePQueue() { int threshold = pagingSpec.getThreshold(); return MinMaxPriorityQueue.orderedBy(descending ? Comparators.inverse(comparator) : comparator) .maximumSize(threshold > 0 ? threshold : Integer.MAX_VALUE) .create(); }
@Override public boolean isReady(TaskActionClient taskActionClient) throws Exception { final SortedSet<Interval> intervals = new TreeSet<>(Comparators.intervalsByStartThenEnd()); intervals.add(segmentProvider.interval); return IndexTask.isReady(taskActionClient, intervals); }
static <T extends Comparable<? super T>> ArrayList<T> mergeIndexed(List<Iterable<T>> indexedLists) { Set<T> retVal = new TreeSet<>(Comparators.naturalNullsFirst()); for (Iterable<T> indexedList : indexedLists) { for (T val : indexedList) { retVal.add(val); } } return Lists.newArrayList(retVal); }
final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd()); if (full != null) { final Map<Interval, Map<String, Object>> retVal = Maps.newTreeMap(comparator); final Set<String> retVal = Sets.newTreeSet(Comparators.inverse(String.CASE_INSENSITIVE_ORDER)); for (DataSegment dataSegment : dataSource.getSegments()) { if (theInterval.contains(dataSegment.getInterval())) {
); Collections.sort( intervals, query.isDescending() ? Comparators.intervalsByEndThenStart() : Comparators.intervalsByStartThenEnd() );
public class DruidCoordinator public static Comparator<DataSegment> SEGMENT_COMPARATOR = Ordering.from(Comparators.intervalsByEndThenStart()) .onResultOf( (Function<DataSegment, Interval>) segment -> segment
private Sequence<Integer> combine(Sequence<Integer> sequence) { return CombiningSequence.create(sequence, Comparators.alwaysEqual(), plus); }
public static SortedSet<Interval> computeCompactIntervals(SortedSet<Interval> intervals) { final SortedSet<Interval> compactIntervals = new TreeSet<>(Comparators.intervalsByStartThenEnd()); List<Interval> toBeAccumulated = new ArrayList<>(); for (Interval interval : intervals) { if (toBeAccumulated.size() == 0) { toBeAccumulated.add(interval); } else { if (toBeAccumulated.get(toBeAccumulated.size() - 1).abuts(interval)) { toBeAccumulated.add(interval); } else { compactIntervals.add(JodaUtils.umbrellaInterval(toBeAccumulated)); toBeAccumulated.clear(); toBeAccumulated.add(interval); } } } if (toBeAccumulated.size() > 0) { compactIntervals.add(JodaUtils.umbrellaInterval(toBeAccumulated)); } return compactIntervals; } }
@Override public int compare(String o1, String o2) { return Comparators.<String>naturalNullsFirst().compare(o1, o2); } };
Map<Interval, String> derivativeVersion = derivativeSegmentsSnapshot.lhs; SortedMap<Interval, String> sortedToBuildInterval = Maps.newTreeMap( Comparators.inverse(Comparators.intervalsByStartThenEnd()) );