@Override public void advance() { cursorOffset.increment(); // Must call BaseQuery.checkInterrupted() after cursorOffset.increment(), not before, because // FilteredOffset.increment() is a potentially long, not an "instant" operation (unlike to all other subclasses // of Offset) and it returns early on interruption, leaving itself in an illegal state. We should not let // aggregators, etc. access this illegal state and throw a QueryInterruptedException by calling // BaseQuery.checkInterrupted(). BaseQuery.checkInterrupted(); }
@Override protected long scanAndAggregate( final PooledTopNParams params, final int[] positions, final BufferAggregator[] theAggregators ) { for (ScanAndAggregate specializedScanAndAggregate : specializedScanAndAggregateImplementations) { long processedRows = specializedScanAndAggregate.scanAndAggregate(params, positions, theAggregators); if (processedRows >= 0) { BaseQuery.checkInterrupted(); return processedRows; } } long processedRows = scanAndAggregateDefault(params, positions, theAggregators); BaseQuery.checkInterrupted(); return processedRows; }
private <T> File spill(Iterator<T> iterator) throws IOException { try ( final LimitedTemporaryStorage.LimitedOutputStream out = temporaryStorage.createFile(); final LZ4BlockOutputStream compressedOut = new LZ4BlockOutputStream(out); final JsonGenerator jsonGenerator = spillMapper.getFactory().createGenerator(compressedOut) ) { while (iterator.hasNext()) { BaseQuery.checkInterrupted(); jsonGenerator.writeObject(iterator.next()); } return out.getFile(); } }
private void incrementIfNeededOnCreationOrReset() { if (baseOffset.withinBounds()) { if (!filterMatcher.matches()) { increment(); // increment() returns early if it detects the current Thread is interrupted. It will leave this // FilteredOffset in an illegal state, because it may point to an offset that should be filtered. So must to // call BaseQuery.checkInterrupted() and thereby throw a QueryInterruptedException. BaseQuery.checkInterrupted(); } } }
@Override public void advance() { if (!baseIter.hasNext()) { done = true; return; } while (baseIter.hasNext()) { BaseQuery.checkInterrupted(); IncrementalIndexRow entry = baseIter.next(); if (beyondMaxRowIndex(entry.getRowIndex())) { continue; } currEntry.set(entry); if (filterMatcher.matches()) { return; } } done = true; }
@Override public void reset() { baseIter = cursorIterable.iterator(); if (numAdvanced == -1) { numAdvanced = 0; } else { Iterators.advance(baseIter, numAdvanced); } BaseQuery.checkInterrupted(); boolean foundMatched = false; while (baseIter.hasNext()) { IncrementalIndexRow entry = baseIter.next(); if (beyondMaxRowIndex(entry.getRowIndex())) { numAdvanced++; continue; } currEntry.set(entry); if (filterMatcher.matches()) { foundMatched = true; break; } numAdvanced++; } done = !foundMatched && (emptyRange || !baseIter.hasNext()); }
@Override public AggregateResult accumulate( final AggregateResult priorResult, final Row row ) { BaseQuery.checkInterrupted(); if (priorResult != null && !priorResult.isOk()) { // Pass-through error returns without doing more work. return priorResult; } if (!grouper.isInitialized()) { grouper.init(); } columnSelectorRow.set(row); final Comparable[] key = new Comparable[keySize]; valueExtractFn.apply(row, key); final AggregateResult aggregateResult = grouper.aggregate(new RowBasedKey(key)); columnSelectorRow.set(null); return aggregateResult; } };
@Override public void advance() { cursorOffset.increment(); // Must call BaseQuery.checkInterrupted() after cursorOffset.increment(), not before, because // FilteredOffset.increment() is a potentially long, not an "instant" operation (unlike to all other subclasses // of Offset) and it returns early on interruption, leaving itself in an illegal state. We should not let // aggregators, etc. access this illegal state and throw a QueryInterruptedException by calling // BaseQuery.checkInterrupted(). BaseQuery.checkInterrupted(); }
@Override protected long scanAndAggregate( final PooledTopNParams params, final int[] positions, final BufferAggregator[] theAggregators ) { for (ScanAndAggregate specializedScanAndAggregate : specializedScanAndAggregateImplementations) { long processedRows = specializedScanAndAggregate.scanAndAggregate(params, positions, theAggregators); if (processedRows >= 0) { BaseQuery.checkInterrupted(); return processedRows; } } long processedRows = scanAndAggregateDefault(params, positions, theAggregators); BaseQuery.checkInterrupted(); return processedRows; }
private <T> File spill(Iterator<T> iterator) throws IOException { try ( final LimitedTemporaryStorage.LimitedOutputStream out = temporaryStorage.createFile(); final LZ4BlockOutputStream compressedOut = new LZ4BlockOutputStream(out); final JsonGenerator jsonGenerator = spillMapper.getFactory().createGenerator(compressedOut) ) { while (iterator.hasNext()) { BaseQuery.checkInterrupted(); jsonGenerator.writeObject(iterator.next()); } return out.getFile(); } }
private void incrementIfNeededOnCreationOrReset() { if (baseOffset.withinBounds()) { if (!filterMatcher.matches()) { increment(); // increment() returns early if it detects the current Thread is interrupted. It will leave this // FilteredOffset in an illegal state, because it may point to an offset that should be filtered. So must to // call BaseQuery.checkInterrupted() and thereby throw a QueryInterruptedException. BaseQuery.checkInterrupted(); } } }
@Override public void advance() { if (!baseIter.hasNext()) { done = true; return; } while (baseIter.hasNext()) { BaseQuery.checkInterrupted(); IncrementalIndexRow entry = baseIter.next(); if (beyondMaxRowIndex(entry.getRowIndex())) { continue; } currEntry.set(entry); if (filterMatcher.matches()) { return; } } done = true; }
@Override public void reset() { baseIter = cursorIterable.iterator(); if (numAdvanced == -1) { numAdvanced = 0; } else { Iterators.advance(baseIter, numAdvanced); } BaseQuery.checkInterrupted(); boolean foundMatched = false; while (baseIter.hasNext()) { IncrementalIndexRow entry = baseIter.next(); if (beyondMaxRowIndex(entry.getRowIndex())) { numAdvanced++; continue; } currEntry.set(entry); if (filterMatcher.matches()) { foundMatched = true; break; } numAdvanced++; } done = !foundMatched && (emptyRange || !baseIter.hasNext()); }
@Override public AggregateResult accumulate( final AggregateResult priorResult, final Row row ) { BaseQuery.checkInterrupted(); if (priorResult != null && !priorResult.isOk()) { // Pass-through error returns without doing more work. return priorResult; } if (!grouper.isInitialized()) { grouper.init(); } columnSelectorRow.set(row); final Comparable[] key = new Comparable[keySize]; valueExtractFn.apply(row, key); final AggregateResult aggregateResult = grouper.aggregate(new RowBasedKey(key)); columnSelectorRow.set(null); return aggregateResult; } };