@Override public Object apply(final OutputStream out) { evaluateSequenceForSideEffects( Sequences.map( executeQuery(injector, index, query), new Function<SegmentAnalysis, Object>() { @Override public Object apply(SegmentAnalysis analysis) { try { objectMapper.writeValue(out, analysis); } catch (IOException e) { throw Throwables.propagate(e); } return null; } } ) ); return null; } }
@Override public Sequence<T> run(final QueryPlus<T> queryPlus, final Map<String, Object> responseContext) { return Sequences.concat( Sequences.map( queryRunners, new Function<QueryRunner<T>, Sequence<T>>() { @Override public Sequence<T> apply(final QueryRunner<T> input) { return input.run(queryPlus, responseContext); } } ) ); } }
@Override public Sequence<T> run( final QueryPlus<T> queryPlus, final Map<String, Object> responseContext ) { return Sequences.map( baseRunner.run(queryPlus, responseContext), new Function<T, T>() { @Override public T apply(T input) { try { return smileMapper.readValue(smileMapper.writeValueAsBytes(input), clazz); } catch (Exception e) { throw new RuntimeException(e); } } } ); } }
public static <T> Sequence<Result<T>> makeCursorBasedQuery( final StorageAdapter adapter, List<Interval> queryIntervals, Filter filter, VirtualColumns virtualColumns, boolean descending, Granularity granularity, final Function<Cursor, Result<T>> mapFn ) { Preconditions.checkArgument( queryIntervals.size() == 1, "Can only handle a single interval, got[%s]", queryIntervals ); return Sequences.filter( Sequences.map( adapter.makeCursors(filter, queryIntervals.get(0), virtualColumns, granularity, descending, null), new Function<Cursor, Result<T>>() { @Override public Result<T> apply(Cursor input) { return mapFn.apply(input); } } ), Predicates.notNull() ); }
final Sequence<T> wrappedSequence = Sequences.map( sequence, input -> {
@Override public Sequence<ScanResultValue> run( final QueryPlus<ScanResultValue> queryPlus, final Map<String, Object> responseContext ) { // Note: this variable is effective only when queryContext has a timeout. // See the comment of CTX_TIMEOUT_AT. final long timeoutAt = System.currentTimeMillis() + QueryContexts.getTimeout(queryPlus.getQuery()); responseContext.put(CTX_TIMEOUT_AT, timeoutAt); return Sequences.concat( Sequences.map( Sequences.simple(queryRunners), new Function<QueryRunner<ScanResultValue>, Sequence<ScanResultValue>>() { @Override public Sequence<ScanResultValue> apply(final QueryRunner<ScanResultValue> input) { return input.run(queryPlus, responseContext); } } ) ); } };
private Sequence<Object[]> executeGroupBy( final DruidQuery druidQuery, final GroupByQuery query ) { final List<RelDataTypeField> fieldList = druidQuery.getOutputRowType().getFieldList(); return Sequences.map( runQuery(query), new Function<Row, Object[]>() { @Override public Object[] apply(final Row row) { final Object[] retVal = new Object[fieldList.size()]; for (RelDataTypeField field : fieldList) { retVal[field.getIndex()] = coerce( row.getRaw(druidQuery.getOutputRowSignature().getRowOrder().get(field.getIndex())), field.getType().getSqlTypeName() ); } return retVal; } } ); }
listOfSequences.add(Sequences.map(cachedSequence, pullFromCacheFunction));
@Override public Sequence<Object[]> get() { if (root.isRefTrivial()) { return druidRel.runQuery(); } else { // Add a mapping on top to accommodate root.fields. return Sequences.map( druidRel.runQuery(), new Function<Object[], Object[]>() { @Override public Object[] apply(final Object[] input) { final Object[] retVal = new Object[root.fields.size()]; for (int i = 0; i < root.fields.size(); i++) { retVal[i] = input[root.fields.get(i).getKey()]; } return retVal; } } ); } } };
private Sequence<Object[]> executeTopN( final DruidQuery druidQuery, final TopNQuery query ) { final List<RelDataTypeField> fieldList = druidQuery.getOutputRowType().getFieldList(); return Sequences.concat( Sequences.map( runQuery(query), new Function<Result<TopNResultValue>, Sequence<Object[]>>() { @Override public Sequence<Object[]> apply(final Result<TopNResultValue> result) { final List<DimensionAndMetricValueExtractor> rows = result.getValue().getValue(); final List<Object[]> retVals = new ArrayList<>(rows.size()); for (DimensionAndMetricValueExtractor row : rows) { final Object[] retVal = new Object[fieldList.size()]; for (final RelDataTypeField field : fieldList) { final String outputName = druidQuery.getOutputRowSignature().getRowOrder().get(field.getIndex()); retVal[field.getIndex()] = coerce(row.getMetric(outputName), field.getType().getSqlTypeName()); } retVals.add(retVal); } return Sequences.simple(retVals); } } ) ); }
public static Sequence<Row> postAggregate(final GroupByQuery query, IncrementalIndex index) { return Sequences.map( Sequences.simple(index.iterableWithPostAggregations(query.getPostAggregatorSpecs(), query.isDescending())), new Function<Row, Row>() { @Override public Row apply(Row input) { final MapBasedRow row = (MapBasedRow) input; return new MapBasedRow( query.getGranularity() .toDateTime(row.getTimestampFromEpoch()), row.getEvent() ); } } ); }
private Sequence<T> deserializeResults(final byte[] cachedResult, CacheStrategy strategy, String resultSetId) { if (cachedResult == null) { log.error("Cached result set is null"); } final Function<Object, T> pullFromCacheFunction = strategy.pullFromCache(true); final TypeReference<Object> cacheObjectClazz = strategy.getCacheObjectClazz(); //Skip the resultsetID and its length bytes Sequence<T> cachedSequence = Sequences.simple(() -> { try { int resultOffset = Integer.BYTES + resultSetId.length(); return objectMapper.readValues( objectMapper.getFactory().createParser( cachedResult, resultOffset, cachedResult.length - resultOffset ), cacheObjectClazz ); } catch (IOException e) { throw new RE(e, "Failed to retrieve results from cache for query ID [%s]", query.getId()); } }); return Sequences.map(cachedSequence, pullFromCacheFunction); }
Sequences.map( runQuery(query), scanResult -> {
.getOutputName(); return Sequences.map( runQuery(query), new Function<Result<TimeseriesResultValue>, Object[]>()
Sequences.map( adapter.makeCursors( filter,
private void readCursors(Sequence<Cursor> cursors, Blackhole blackhole) { final Sequence<Void> voids = Sequences.map( cursors, input -> { List<DimensionSelector> selectors = new ArrayList<>(); selectors.add( input.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("dimSequential", null)) ); while (!input.isDone()) { for (DimensionSelector selector : selectors) { IndexedInts row = selector.getRow(); blackhole.consume(selector.lookupName(row.get(0))); } input.advance(); } return null; } ); blackhole.consume(voids.toList()); }
/** * Selects elements from "selectColumn" from rows matching a filter. selectColumn must be a single valued dimension. */ private List<String> selectColumnValuesMatchingFilter(final DimFilter filter, final String selectColumn) { final Sequence<Cursor> cursors = makeCursorSequence(makeFilter(filter)); Sequence<List<String>> seq = Sequences.map( cursors, new Function<Cursor, List<String>>() { @Override public List<String> apply(Cursor input) { final DimensionSelector selector = input .getColumnSelectorFactory() .makeDimensionSelector(new DefaultDimensionSpec(selectColumn, selectColumn)); final List<String> values = new ArrayList<>(); while (!input.isDone()) { IndexedInts row = selector.getRow(); Preconditions.checkState(row.size() == 1); values.add(selector.lookupName(row.get(0))); input.advance(); } return values; } } ); return seq.toList().get(0); }
private void readCursorsLong(Sequence<Cursor> cursors, final Blackhole blackhole) { final Sequence<Void> voids = Sequences.map( cursors, input -> { BaseLongColumnValueSelector selector = input.getColumnSelectorFactory() .makeColumnValueSelector("sumLongSequential"); while (!input.isDone()) { long rowval = selector.getLong(); blackhole.consume(rowval); input.advance(); } return null; } ); blackhole.consume(voids.toList()); }
private long selectCountUsingFilteredAggregator(final DimFilter filter) { final Sequence<Cursor> cursors = makeCursorSequence(makeFilter(filter)); Sequence<Aggregator> aggSeq = Sequences.map( cursors, new Function<Cursor, Aggregator>() { @Override public Aggregator apply(Cursor input) { Aggregator agg = new FilteredAggregatorFactory( new CountAggregatorFactory("count"), maybeOptimize(filter) ).factorize(input.getColumnSelectorFactory()); for (; !input.isDone(); input.advance()) { agg.aggregate(); } return agg; } } ); return aggSeq.toList().get(0).getLong(); }
return Sequences.map( baseRunner.run(queryPlus.withQuery(queryToRun), responseContext), finalizerFn