/** * Convert a list of DimFilters to a list of Filters. * * @param dimFilters list of DimFilters, should all be non-null * * @return list of Filters */ public static List<Filter> toFilters(List<DimFilter> dimFilters) { return ImmutableList.copyOf( FunctionalIterable .create(dimFilters) .transform( new Function<DimFilter, Filter>() { @Override public Filter apply(DimFilter input) { return input.toFilter(); } } ) ); }
public List<DataSegment> getSegments() { return ImmutableSet.copyOf( FunctionalIterable.create(timelineObjects).transformCat( new Function<Pair<TimelineObjectHolder<String, DataSegment>, Interval>, Iterable<DataSegment>>() { @Override public Iterable<DataSegment> apply(Pair<TimelineObjectHolder<String, DataSegment>, Interval> input) { return Iterables.transform( input.lhs.getObject(), new Function<PartitionChunk<DataSegment>, DataSegment>() { @Override public DataSegment apply(PartitionChunk<DataSegment> input) { return input.getObject(); } } ); } } ) ).asList(); }
return Optional.of( (Iterable<Bucket>) FunctionalIterable .create(intervals.get()) .transformCat( new Function<Interval, Iterable<Bucket>>()
private Iterable<BytesMessageWithOffset> filterAndDecode(Iterable<MessageAndOffset> kafkaMessages, final long offset) { return FunctionalIterable .create(kafkaMessages) .filter( new Predicate<MessageAndOffset>() { @Override public boolean apply(MessageAndOffset msgAndOffset) { return msgAndOffset.offset() >= offset; } } ) .transform( new Function<MessageAndOffset, BytesMessageWithOffset>() { @Override public BytesMessageWithOffset apply(MessageAndOffset msgAndOffset) { ByteBuffer bb = msgAndOffset.message().payload(); byte[] payload = new byte[bb.remaining()]; bb.get(payload); // add nextOffset here, thus next fetch will use nextOffset instead of current offset return new BytesMessageWithOffset(payload, msgAndOffset.nextOffset(), partitionId); } } ); }
@Override public Iterable<Bucket> apply(Interval input) { final DateTime bucketTime = input.getStart(); final List<HadoopyShardSpec> specs = schema.getTuningConfig().getShardSpecs().get(bucketTime.getMillis()); if (specs == null) { return ImmutableList.of(); } return FunctionalIterable .create(specs) .transform( new Function<HadoopyShardSpec, Bucket>() { int i = 0; @Override public Bucket apply(HadoopyShardSpec input) { return new Bucket(input.getShardNum(), bucketTime, i++); } } ); } }
.create(lookup).transformCat( (TimelineObjectHolder<String, SegmentLoadInfo> input) -> Iterables.transform(
@Test public void testDrop() { Assert.assertEquals( Lists.newArrayList( FunctionalIterable.create(Arrays.asList("1", "2", "3")) .drop(2) ), Collections.singletonList("3") ); } }
.create(intervals) .transformCat( new Function<Interval, Iterable<TimelineObjectHolder<String, Sink>>>()
@Test public void testTransformCat() { Assert.assertEquals( Lists.newArrayList( FunctionalIterable.create(Arrays.asList("1,2", "3,4", "5,6")) .transformCat( new Function<String, Iterable<String>>() { @Override public Iterable<String> apply(String input) { return Splitter.on(",").split(input); } } ) ), Arrays.asList("1", "2", "3", "4", "5", "6") ); }
@Test public void testTransform() { Assert.assertEquals( Lists.newArrayList( FunctionalIterable.create(Arrays.asList("1", "2", "3")) .transform( new Function<String, Integer>() { @Override public Integer apply(String input) { return Integer.parseInt(input); } } ) ), Arrays.asList(1, 2, 3) ); }
@Test public void testFilter() { Assert.assertEquals( Lists.newArrayList( FunctionalIterable.create(Arrays.asList("1", "2", "3")) .filter( new Predicate<String>() { @Override public boolean apply(String input) { return !"2".equals(input); } } ) ), Arrays.asList("1", "3") ); }
@Test public void testKeep() { Assert.assertEquals( Lists.newArrayList( FunctionalIterable.create(Arrays.asList("1", "2", "3")) .keep( new Function<String, Integer>() { @Override public Integer apply(String input) { if ("2".equals(input)) { return null; } return Integer.parseInt(input); } } ) ), Arrays.asList(1, 3) ); }
.create(segmentIds) .trinaryTransform( intervals,
.create(segmentIds) .trinaryTransform( intervals,
.create(segmentIds) .trinaryTransform( intervals,
.create(segmentIds) .trinaryTransform( intervals,
return Sequences.simple( FunctionalIterable .create(segmentIds) .trinaryTransform( intervals,
.create(segmentIds) .trinaryTransform( intervals,
factory, FunctionalIterable .create(intervals) .transformCat( new Function<Interval, Iterable<TimelineObjectHolder<String, Segment>>>()
public static <T, QueryType extends Query<T>> List<T> runQuery( final QueryType query, final QueryRunnerFactory<T, QueryType> factory, final List<QueryableIndex> indexes ) { final Sequence<T> results = new FinalizeResultsQueryRunner<>( factory.getToolchest().mergeResults( factory.mergeRunners( Execs.directExecutor(), FunctionalIterable .create(indexes) .transform( index -> factory.createRunner(new QueryableIndexSegment(index, SegmentId.dummy("xxx"))) ) ) ), (QueryToolChest<T, Query<T>>) factory.getToolchest() ).run(QueryPlus.wrap(query), new HashMap<>()); return results.toList(); }