@Override public JavaRDD<RecordInfo<Object>> call(JavaRDD<T> input, Time batchTime) throws Exception { if (function == null) { FlatMapFunc<T, RecordInfo<Object>> flatMap = isMultiOutput ? new MultiOutputTransformFunction<T>(dynamicDriverContext.getPluginFunctionContext()) : new TransformFunction<T>(dynamicDriverContext.getPluginFunctionContext()); function = Compat.convert(flatMap); } return input.flatMap(function); } }
@Override public JavaPairRDD<JOIN_KEY, T> call(JavaRDD<T> input, Time batchTime) throws Exception { if (function == null) { function = Compat.convert( new JoinOnFunction<JOIN_KEY, T>(dynamicDriverContext.getPluginFunctionContext(), inputStageName)); } return input.flatMapToPair(function); } }
@Override public JavaRDD<RecordInfo<Object>> call(JavaPairRDD<GROUP_KEY, Iterable<GROUP_VAL>> input, Time batchTime) throws Exception { if (function == null) { function = Compat.convert( new AggregatorAggregateFunction<GROUP_KEY, GROUP_VAL, OUT>(dynamicDriverContext.getPluginFunctionContext())); } return input.flatMap(function); } }
@Override public JavaPairRDD<GROUP_KEY, GROUP_VAL> call(JavaRDD<GROUP_VAL> input, Time batchTime) throws Exception { if (function == null) { function = Compat.convert( new AggregatorGroupByFunction<GROUP_KEY, GROUP_VAL>(dynamicDriverContext.getPluginFunctionContext())); } return input.flatMapToPair(function); } }
@Override public JavaRDD<OUT> call(JavaPairRDD<JOIN_KEY, List<JoinElement<INPUT_RECORD>>> input, Time batchTime) throws Exception { if (function == null) { function = Compat.convert( new JoinMergeFunction<JOIN_KEY, INPUT_RECORD, OUT>(dynamicDriverContext.getPluginFunctionContext())); } return input.flatMap(function); }
private void lazyInit(final JavaSparkContext jsc) throws Exception { if (delegate == null) { PluginFunctionContext pluginFunctionContext = dynamicDriverContext.getPluginFunctionContext(); delegate = pluginFunctionContext.createPlugin(); final StageSpec stageSpec = pluginFunctionContext.getStageSpec(); final JavaSparkExecutionContext sec = dynamicDriverContext.getSparkExecutionContext(); Transactionals.execute(sec, new TxRunnable() { @Override public void run(DatasetContext datasetContext) throws Exception { PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec); SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, jsc, datasetContext, pipelineRuntime, stageSpec); delegate.initialize(sparkPluginContext); } }, Exception.class); } } }