@Override public Tuple<Collection<ExecutionLineageNode>, Collection<ChannelInstance>> evaluate( ChannelInstance[] inputs, ChannelInstance[] outputs, SparkExecutor sparkExecutor, OptimizationContext.OperatorContext operatorContext) { assert inputs.length <= 1; assert outputs.length == this.getNumOutputs(); final Collection<Type> collection; if (this.collection != null) { collection = this.collection; } else { final CollectionChannel.Instance input = (CollectionChannel.Instance) inputs[0]; collection = input.provideCollection(); assert collection != null : String.format("Instance of %s is not providing a collection.", input.getChannel()); } final List<Type> list = RheemCollections.asList(collection); final RddChannel.Instance output = (RddChannel.Instance) outputs[0]; final JavaRDD<Type> rdd = sparkExecutor.sc.parallelize(list, sparkExecutor.getNumDefaultPartitions()); this.name(rdd); output.accept(rdd, sparkExecutor); return ExecutionOperator.modelLazyExecution(inputs, outputs, operatorContext); }