/** * Translates the pipeline by passing this class as a visitor. * @param pipeline The pipeline to be translated */ public void translate(Pipeline pipeline) { pipeline.traverseTopologically(this); }
private static void validateTransforms(Pipeline pipeline) { pipeline.traverseTopologically(Visitor.INSTANCE); }
/** * Translates the pipeline by passing this class as a visitor. * * @param pipeline The pipeline to be translated */ public void translate(Pipeline pipeline) { pipeline.traverseTopologically(this); }
/** * Translates the pipeline by passing this class as a visitor. * * @param pipeline The pipeline to be translated */ public void translate(Pipeline pipeline) { pipeline.traverseTopologically(this); }
private static void validateTransforms(Pipeline pipeline) { pipeline.traverseTopologically(Visitor.INSTANCE); }
private List<TransformHierarchy.Node> recordPipelineNodes(final Pipeline pipeline) { final NodeRecorder nodeRecorder = new NodeRecorder(); pipeline.traverseTopologically(nodeRecorder); return nodeRecorder.visited; }
/** Evaluator that update/populate the cache candidates. */ public static void updateCacheCandidates( Pipeline pipeline, SparkPipelineTranslator translator, EvaluationContext evaluationContext) { CacheVisitor cacheVisitor = new CacheVisitor(translator, evaluationContext); pipeline.traverseTopologically(cacheVisitor); }
public void translate(Pipeline pipeline, DAG dag) { pipeline.traverseTopologically(this); translationContext.populateDAG(dag); }
public static int countAsserts(Pipeline pipeline) { AssertionCountingVisitor visitor = new AssertionCountingVisitor(); pipeline.traverseTopologically(visitor); return visitor.getPAssertCount(); }
private boolean isEmptyPipeline(final Pipeline pipeline) { final IsEmptyVisitor isEmptyVisitor = new IsEmptyVisitor(); pipeline.traverseTopologically(isEmptyVisitor); return isEmptyVisitor.isEmpty(); }
/** Visit the pipeline to determine the translation mode (batch/streaming). */ private void detectTranslationMode(Pipeline pipeline) { TranslationModeDetector detector = new TranslationModeDetector(); pipeline.traverseTopologically(detector); if (detector.getTranslationMode().equals(TranslationMode.STREAMING)) { // set streaming mode if it's a streaming pipeline this.mOptions.setStreaming(true); } }
public static DirectGraph getGraph(Pipeline p) { DirectGraphVisitor visitor = new DirectGraphVisitor(); p.traverseTopologically(visitor); return visitor.getGraph(); }
private static Set<DisplayData> displayDataForPipeline(Pipeline pipeline, PTransform<?, ?> root) { PrimitiveDisplayDataPTransformVisitor visitor = new PrimitiveDisplayDataPTransformVisitor(root); pipeline.traverseTopologically(visitor); return visitor.getPrimitivesDisplayData(); }
private static DummyStatefulDoFn findBatchStatefulDoFn(Pipeline p) { FindBatchStatefulDoFnVisitor findBatchStatefulDoFnVisitor = new FindBatchStatefulDoFnVisitor(); p.traverseTopologically(findBatchStatefulDoFnVisitor); return (DummyStatefulDoFn) findBatchStatefulDoFnVisitor.getStatefulDoFn(); }
/** * Method to run the Pipeline. * @param pipeline the Pipeline to run. * @return The result of the pipeline. */ public NemoPipelineResult run(final Pipeline pipeline) { final PipelineVisitor pipelineVisitor = new PipelineVisitor(pipeline, nemoPipelineOptions); pipeline.traverseTopologically(pipelineVisitor); final NemoPipelineResult nemoPipelineResult = new NemoPipelineResult(); JobLauncher.launchDAG(pipelineVisitor.getConvertedPipeline(), nemoPipelineOptions.getJobName()); return nemoPipelineResult; } }
@Test public void testProtoDirectly() { final RunnerApi.Pipeline pipelineProto = PipelineTranslation.toProto(pipeline, false); pipeline.traverseTopologically(new PipelineProtoVerificationVisitor(pipelineProto, false)); }
@Test public void testProtoDirectlyWithViewTransform() { final RunnerApi.Pipeline pipelineProto = PipelineTranslation.toProto(pipeline, true); pipeline.traverseTopologically(new PipelineProtoVerificationVisitor(pipelineProto, true)); }
@Test public void testNetworkConfigMissing() throws IOException { DataflowPipelineOptions options = buildPipelineOptions(); Pipeline p = buildPipeline(options); p.traverseTopologically(new RecordingPipelineVisitor()); Job job = DataflowPipelineTranslator.fromOptions(options) .translate(p, DataflowRunner.fromOptions(options), Collections.emptyList()) .getJob(); assertEquals(1, job.getEnvironment().getWorkerPools().size()); assertNull(job.getEnvironment().getWorkerPools().get(0).getNetwork()); }
@Test public void testSubnetworkConfigMissing() throws IOException { DataflowPipelineOptions options = buildPipelineOptions(); Pipeline p = buildPipeline(options); p.traverseTopologically(new RecordingPipelineVisitor()); Job job = DataflowPipelineTranslator.fromOptions(options) .translate(p, DataflowRunner.fromOptions(options), Collections.emptyList()) .getJob(); assertEquals(1, job.getEnvironment().getWorkerPools().size()); assertNull(job.getEnvironment().getWorkerPools().get(0).getSubnetwork()); }
@Test public void testSubnetworkConfig() throws IOException { final String testSubnetwork = "regions/REGION/subnetworks/SUBNETWORK"; DataflowPipelineOptions options = buildPipelineOptions(); options.setSubnetwork(testSubnetwork); Pipeline p = buildPipeline(options); p.traverseTopologically(new RecordingPipelineVisitor()); Job job = DataflowPipelineTranslator.fromOptions(options) .translate(p, DataflowRunner.fromOptions(options), Collections.emptyList()) .getJob(); assertEquals(1, job.getEnvironment().getWorkerPools().size()); assertEquals(testSubnetwork, job.getEnvironment().getWorkerPools().get(0).getSubnetwork()); }