@Override public Sequence<Row> apply(Sequence<Row> input) { return input.limit(limit); } }
@Override @SuppressWarnings("unchecked") public Sequence<Object[]> runQuery() { // Lazy: run each query in sequence, not all at once. if (limit == 0) { return Sequences.empty(); } else { final Sequence baseSequence = Sequences.concat( FluentIterable.from(rels).transform(rel -> ((DruidRel) rel).runQuery()) ); return limit > 0 ? baseSequence.limit(limit) : baseSequence; } }
public DruidStatement execute() { synchronized (lock) { ensure(State.PREPARED); try { final Sequence<Object[]> baseSequence = yielderOpenCloseExecutor.submit( sqlLifecycle::execute ).get(); // We can't apply limits greater than Integer.MAX_VALUE, ignore them. final Sequence<Object[]> retSequence = maxRowCount >= 0 && maxRowCount <= Integer.MAX_VALUE ? baseSequence.limit((int) maxRowCount) : baseSequence; yielder = Yielders.each(retSequence); state = State.RUNNING; } catch (Throwable t) { this.throwable = t; try { close(); } catch (Throwable t1) { t.addSuppressed(t1); } throw Throwables.propagate(t); } return this; } }
private DateTime getTimeBoundary(StorageAdapter adapter, TimeBoundaryQuery legacyQuery, boolean descending) { final Sequence<Result<DateTime>> resultSequence = QueryRunnerHelper.makeCursorBasedQuery( adapter, legacyQuery.getQuerySegmentSpec().getIntervals(), Filters.toFilter(legacyQuery.getFilter()), VirtualColumns.EMPTY, descending, Granularities.ALL, this.skipToFirstMatching ); final List<Result<DateTime>> resultList = resultSequence.limit(1).toList(); if (resultList.size() > 0) { return resultList.get(0).getValue(); } return null; }
@Override public Sequence<Result<TimeseriesResultValue>> doRun( QueryRunner<Result<TimeseriesResultValue>> baseRunner, QueryPlus<Result<TimeseriesResultValue>> queryPlus, Map<String, Object> context ) { int limit = ((TimeseriesQuery) queryPlus.getQuery()).getLimit(); Sequence<Result<TimeseriesResultValue>> result = super.doRun( baseRunner, // Don't do post aggs until makePostComputeManipulatorFn() is called queryPlus.withQuery(((TimeseriesQuery) queryPlus.getQuery()).withPostAggregatorSpecs(ImmutableList.of())), context ); if (limit < Integer.MAX_VALUE) { return result.limit(limit); } return result; }
@Test public void testTwo() throws Exception { final List<Integer> nums = Arrays.asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9); final int threshold = 2; SequenceTestHelper.testAll( Sequences.simple(nums).limit(threshold), Lists.newArrayList(Iterables.limit(nums, threshold)) ); }
@Test public void testOne() throws Exception { final List<Integer> nums = Arrays.asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9); final int threshold = 1; SequenceTestHelper.testAll( Sequences.simple(nums).limit(threshold), Lists.newArrayList(Iterables.limit(nums, threshold)) ); }
@Test public void testSanityAccumulate() throws Exception { final List<Integer> nums = Arrays.asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9); final int threshold = 5; SequenceTestHelper.testAll( Sequences.simple(nums).limit(threshold), Lists.newArrayList(Iterables.limit(nums, threshold)) ); }
public Sequence<Result<TimeseriesResultValue>> process(final TimeseriesQuery query, final StorageAdapter adapter) { if (adapter == null) { throw new SegmentMissingException( "Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped." ); } final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter())); final int limit = query.getLimit(); Sequence<Result<TimeseriesResultValue>> result = generateTimeseriesResult(adapter, query, filter); if (limit < Integer.MAX_VALUE) { return result.limit(limit); } return result; }
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void read(Blackhole blackhole) { IncrementalIndexStorageAdapter sa = new IncrementalIndexStorageAdapter(incIndex); Sequence<Cursor> cursors = makeCursors(sa, null); Cursor cursor = cursors.limit(1).toList().get(0); List<DimensionSelector> selectors = new ArrayList<>(); selectors.add(makeDimensionSelector(cursor, "dimSequential")); selectors.add(makeDimensionSelector(cursor, "dimZipf")); selectors.add(makeDimensionSelector(cursor, "dimUniform")); selectors.add(makeDimensionSelector(cursor, "dimSequentialHalfNull")); cursor.reset(); while (!cursor.isDone()) { for (DimensionSelector selector : selectors) { IndexedInts row = selector.getRow(); blackhole.consume(selector.lookupName(row.get(0))); } cursor.advance(); } }
@Test public void testConsistentCloseOrder() { final AtomicInteger closed1 = new AtomicInteger(); final AtomicInteger closed2 = new AtomicInteger(); final AtomicInteger counter = new AtomicInteger(); Sequence<Integer> sequence = Sequences .simple(Arrays.asList(1, 2, 3)) .withBaggage(() -> closed1.set(counter.incrementAndGet())) .withBaggage(() -> closed2.set(counter.incrementAndGet())); // Run sequence via accumulate sequence.toList(); Assert.assertEquals(1, closed1.get()); Assert.assertEquals(2, closed2.get()); // Ensure sequence runs via Yielder, because LimitedSequence extends YieldingSequenceBase Sequence<Integer> yieldingSequence = sequence.limit(1); yieldingSequence.toList(); Assert.assertEquals(3, closed1.get()); Assert.assertEquals(4, closed2.get()); } }
@Test public void testNoSideEffects() { final List<Integer> nums = Arrays.asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9); final AtomicLong accumulated = new AtomicLong(0); final Sequence<Integer> seq = Sequences.simple( Iterables.transform( nums, input -> { accumulated.addAndGet(input); return input; } ) ).limit(5); Assert.assertEquals(10, seq.accumulate(0, new IntAdditionAccumulator()).intValue()); Assert.assertEquals(10, accumulated.get()); Assert.assertEquals(10, seq.accumulate(0, new IntAdditionAccumulator()).intValue()); Assert.assertEquals(20, accumulated.get()); }
@Test public void testConsistentEffectApplicationOrder() { final AtomicInteger effect1 = new AtomicInteger(); final AtomicInteger effect2 = new AtomicInteger(); final AtomicInteger counter = new AtomicInteger(); Sequence<Integer> sequence = Sequences .simple(Arrays.asList(1, 2, 3)) .withEffect( () -> effect1.set(counter.incrementAndGet()), Execs.directExecutor() ) .withEffect( () -> effect2.set(counter.incrementAndGet()), Execs.directExecutor() ); // Run sequence via accumulate sequence.toList(); Assert.assertEquals(1, effect1.get()); Assert.assertEquals(2, effect2.get()); // Ensure sequence runs via Yielder, because LimitedSequence extends YieldingSequenceBase which // implements accumulate() via yielder(). // "Limiting" a sequence of 3 elements with 4 to let effects be executed. If e. g. limit with 1 or 2, effects are // not executed. Sequence<Integer> yieldingSequence = sequence.limit(4); yieldingSequence.toList(); Assert.assertEquals(3, effect1.get()); Assert.assertEquals(4, effect2.get()); }
Cursor cursor = cursors.limit(1).toList().get(0);
); Cursor cursor = cursorSequence.limit(1).toList().get(0); DimensionSelector dimSelector;
@Override public Sequence<Row> apply(Sequence<Row> input) { return input.limit(limit); } }
@Override @SuppressWarnings("unchecked") public Sequence<Object[]> runQuery() { // Lazy: run each query in sequence, not all at once. if (limit == 0) { return Sequences.empty(); } else { final Sequence baseSequence = Sequences.concat( FluentIterable.from(rels).transform(rel -> ((DruidRel) rel).runQuery()) ); return limit > 0 ? baseSequence.limit(limit) : baseSequence; } }
private DateTime getTimeBoundary(StorageAdapter adapter, TimeBoundaryQuery legacyQuery, boolean descending) { final Sequence<Result<DateTime>> resultSequence = QueryRunnerHelper.makeCursorBasedQuery( adapter, legacyQuery.getQuerySegmentSpec().getIntervals(), Filters.toFilter(legacyQuery.getFilter()), VirtualColumns.EMPTY, descending, Granularities.ALL, this.skipToFirstMatching ); final List<Result<DateTime>> resultList = resultSequence.limit(1).toList(); if (resultList.size() > 0) { return resultList.get(0).getValue(); } return null; }
@Override public Sequence<Result<TimeseriesResultValue>> doRun( QueryRunner<Result<TimeseriesResultValue>> baseRunner, QueryPlus<Result<TimeseriesResultValue>> queryPlus, Map<String, Object> context ) { int limit = ((TimeseriesQuery) queryPlus.getQuery()).getLimit(); Sequence<Result<TimeseriesResultValue>> result = super.doRun( baseRunner, // Don't do post aggs until makePostComputeManipulatorFn() is called queryPlus.withQuery(((TimeseriesQuery) queryPlus.getQuery()).withPostAggregatorSpecs(ImmutableList.of())), context ); if (limit < Integer.MAX_VALUE) { return result.limit(limit); } return result; }
public Sequence<Result<TimeseriesResultValue>> process(final TimeseriesQuery query, final StorageAdapter adapter) { if (adapter == null) { throw new SegmentMissingException( "Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped." ); } final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter())); final int limit = query.getLimit(); Sequence<Result<TimeseriesResultValue>> result = generateTimeseriesResult(adapter, query, filter); if (limit < Integer.MAX_VALUE) { return result.limit(limit); } return result; }