@Test public void testUploadGraph() throws IOException { DataflowPipelineOptions options = buildPipelineOptions(); options.setExperiments(Arrays.asList("upload_graph")); Pipeline p = buildDataflowPipeline(options); DataflowPipelineJob job = (DataflowPipelineJob) p.run(); ArgumentCaptor<Job> jobCaptor = ArgumentCaptor.forClass(Job.class); Mockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture()); assertValidJob(jobCaptor.getValue()); assertTrue(jobCaptor.getValue().getSteps().isEmpty()); assertTrue( jobCaptor .getValue() .getStepsLocation() .startsWith("gs://valid-bucket/temp/staging/dataflow_graph")); }
@Test public void testWorkerHarnessContainerImage() { DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class); // default image set options.setWorkerHarnessContainerImage("some-container"); assertThat(getContainerImageForJob(options), equalTo("some-container")); // batch, legacy options.setWorkerHarnessContainerImage("gcr.io/IMAGE/foo"); options.setExperiments(null); options.setStreaming(false); assertThat(getContainerImageForJob(options), equalTo("gcr.io/beam-java-batch/foo")); // streaming, legacy options.setStreaming(true); assertThat(getContainerImageForJob(options), equalTo("gcr.io/beam-java-streaming/foo")); // streaming, fnapi options.setExperiments(ImmutableList.of("experiment1", "beam_fn_api")); assertThat(getContainerImageForJob(options), equalTo("gcr.io/java/foo")); }
: new ArrayList<>(dataflowOptions.getExperiments()); experiments.add("use_staged_dataflow_worker_jar"); dataflowOptions.setExperiments(experiments);
DataflowRunner runner = DataflowRunner.fromOptions(options); options.setStreaming(true); options.setExperiments(Arrays.asList("beam_fn_api")); DataflowPipelineTranslator translator = DataflowPipelineTranslator.fromOptions(options);