/** * Launch application using the application DAG. * Notice that we launch the DAG one at a time, as the result of a DAG has to be immediately returned to the * Java variable before the application can be resumed. * * @param dag the application DAG. */ // When modifying the signature of this method, see CompilerTestUtil#compileDAG and make corresponding changes public static void launchDAG(final DAG dag) { launchDAG(dag, Collections.emptyMap(), ""); }
/** * @param dag the application DAG. * @param jobId job ID. */ public static void launchDAG(final DAG dag, final String jobId) { launchDAG(dag, Collections.emptyMap(), jobId); }
/** * Method to run the Pipeline. * @param pipeline the Pipeline to run. * @return The result of the pipeline. */ public NemoPipelineResult run(final Pipeline pipeline) { final PipelineVisitor pipelineVisitor = new PipelineVisitor(pipeline, nemoPipelineOptions); pipeline.traverseTopologically(pipelineVisitor); final NemoPipelineResult nemoPipelineResult = new NemoPipelineResult(); JobLauncher.launchDAG(pipelineVisitor.getConvertedPipeline(), nemoPipelineOptions.getJobName()); return nemoPipelineResult; } }
/** * Collect data by running the DAG. * * @param dag the DAG to execute. * @param loopVertexStack loop vertex stack. * @param lastVertex last vertex added to the dag. * @param serializer serializer for the edges. * @param <T> type of the return data. * @return the data collected. */ public static <T> List<T> collect(final DAG<IRVertex, IREdge> dag, final Stack<LoopVertex> loopVertexStack, final IRVertex lastVertex, final Serializer serializer) { final DAGBuilder<IRVertex, IREdge> builder = new DAGBuilder<>(dag); final IRVertex collectVertex = new OperatorVertex(new CollectTransform<>()); builder.addVertex(collectVertex, loopVertexStack); final IREdge newEdge = new IREdge(getEdgeCommunicationPattern(lastVertex, collectVertex), lastVertex, collectVertex); newEdge.setProperty(EncoderProperty.of(new SparkEncoderFactory(serializer))); newEdge.setProperty(DecoderProperty.of(new SparkDecoderFactory(serializer))); newEdge.setProperty(SPARK_KEY_EXTRACTOR_PROP); builder.connectVertices(newEdge); // launch DAG JobLauncher.launchDAG(builder.build(), SparkBroadcastVariables.getAll(), ""); return (List<T>) JobLauncher.getCollectedData(); }