InputProvider ret = new InputProvider(); final UnaryFunctor mapper = flatMap.get().getFunctor(); final ExtractEventTime eventTimeFn = flatMap.get().getEventTimeExtractor(); for (Supplier s : suppliers) { final BlockingQueue<Datum> out = new ArrayBlockingQueue(5000);
InputProvider ret = new InputProvider(); final UnaryFunctor mapper = flatMap.get().getFunctor(); final ExtractEventTime eventTimeFn = flatMap.get().getEventTimeExtractor(); for (Supplier s : suppliers) { final BlockingQueue<Datum> out = new ArrayBlockingQueue(5000);
@Override @SuppressWarnings("unchecked") public JavaRDD<?> translate(FlatMap operator, SparkExecutorContext context) { final JavaRDD<?> input = context.getSingleInput(operator); final UnaryFunctor<?, ?> mapper = operator.getFunctor(); final ExtractEventTime<?> evtTimeFn = operator.getEventTimeExtractor(); LazyAccumulatorProvider accumulators = new LazyAccumulatorProvider(context.getAccumulatorFactory(), context.getSettings()); if (evtTimeFn != null) { return input .flatMap(new EventTimeAssigningUnaryFunctor(mapper, evtTimeFn, accumulators)) .setName(operator.getName() + "::event-time-and-apply-udf"); } else { return input .flatMap(new UnaryFunctorWrapper(mapper, accumulators)) .setName(operator.getName() + "::apply-udf"); } } }
@Override @SuppressWarnings("unchecked") public DataStream<?> translate(FlinkOperator<FlatMap> operator, StreamingExecutorContext context) { Settings settings = context.getSettings(); FlinkAccumulatorFactory accumulatorFactory = context.getAccumulatorFactory(); DataStream input = context.getSingleInputStream(operator); UnaryFunctor mapper = operator.getOriginalOperator().getFunctor(); ExtractEventTime evtTimeFn = operator.getOriginalOperator().getEventTimeExtractor(); if (evtTimeFn != null) { input = input.assignTimestampsAndWatermarks( new EventTimeAssigner(context.getAllowedLateness(), evtTimeFn)) .returns((Class) StreamingElement.class); } return input .flatMap(new StreamingUnaryFunctorWrapper(mapper, accumulatorFactory, settings)) .returns((Class) StreamingElement.class) .name(operator.getName()) .setParallelism(operator.getParallelism()); } }
@Override @SuppressWarnings("unchecked") public DataSet<?> translate(FlinkOperator<FlatMap> operator, BatchExecutorContext context) { Settings settings = context.getSettings(); FlinkAccumulatorFactory accumulatorFactory = context.getAccumulatorFactory(); DataSet<?> input = context.getSingleInputStream(operator); UnaryFunctor mapper = operator.getOriginalOperator().getFunctor(); ExtractEventTime timeAssigner = operator.getOriginalOperator().getEventTimeExtractor(); if (timeAssigner != null) { input = input.map(i -> { BatchElement wel = (BatchElement) i; wel.setTimestamp(timeAssigner.extractTimestamp(wel.getElement())); return wel; }) .returns((Class) BatchElement.class); } return input .flatMap(new BatchUnaryFunctorWrapper(mapper, accumulatorFactory, settings)) .returns((Class) BatchElement.class) .setParallelism(operator.getParallelism()) .name(operator.getName()); } }
@Test public void testBuild() { Flow flow = Flow.create("TEST"); Dataset<String> dataset = Util.createMockDataset(flow, 1); Dataset<String> mapped = FlatMap.named("FlatMap1") .of(dataset) .using((String s, Collector<String> c) -> c.collect(s)) .output(); assertEquals(flow, mapped.getFlow()); assertEquals(1, flow.size()); FlatMap map = (FlatMap) flow.operators().iterator().next(); assertEquals(flow, map.getFlow()); assertEquals("FlatMap1", map.getName()); assertNotNull(map.getFunctor()); assertEquals(mapped, map.output()); assertNull(map.getEventTimeExtractor()); }
@Test public void testBuild_EventTimeExtractor() { Flow flow = Flow.create("TEST"); Dataset<String> dataset = Util.createMockDataset(flow, 1); Dataset<BigDecimal> mapped = FlatMap.named("FlatMap2") .of(dataset) .using((String s, Collector<BigDecimal> c) -> c.collect(null)) .eventTimeBy(Long::parseLong) // ~ consuming the original input elements .output(); assertEquals(flow, mapped.getFlow()); assertEquals(1, flow.size()); FlatMap map = (FlatMap) flow.operators().iterator().next(); assertEquals(flow, map.getFlow()); assertEquals("FlatMap2", map.getName()); assertNotNull(map.getFunctor()); assertEquals(mapped, map.output()); assertNotNull(map.getEventTimeExtractor()); }