public static DataflowClient create(DataflowPipelineOptions options) { return new DataflowClient(options.getDataflowClient(), options); }
public static String getGcloudCancelCommand(DataflowPipelineOptions options, String jobId) { // If using a different Dataflow API than default, prefix command with an API override. String dataflowApiOverridePrefix = ""; String apiUrl = options.getDataflowClient().getBaseUrl(); if (!apiUrl.equals(Dataflow.DEFAULT_BASE_URL)) { dataflowApiOverridePrefix = String.format("%s=%s ", ENDPOINT_OVERRIDE_ENV_VAR, apiUrl); } // Assemble cancel command from optional prefix and project/job parameters. return String.format( "%s%s jobs --project=%s cancel --region=%s %s", dataflowApiOverridePrefix, GCLOUD_DATAFLOW_PREFIX, options.getProject(), options.getRegion(), jobId); }
@Test public void testRunReturnDifferentRequestId() throws IOException { DataflowPipelineOptions options = buildPipelineOptions(); Dataflow mockDataflowClient = options.getDataflowClient(); Dataflow.Projects.Locations.Jobs.Create mockRequest = mock(Dataflow.Projects.Locations.Jobs.Create.class); when(mockDataflowClient .projects() .locations() .jobs() .create(eq(PROJECT_ID), eq(REGION_ID), any(Job.class))) .thenReturn(mockRequest); Job resultJob = new Job(); resultJob.setId("newid"); // Return a different request id. resultJob.setClientRequestId("different_request_id"); when(mockRequest.execute()).thenReturn(resultJob); Pipeline p = buildDataflowPipeline(options); try { p.run(); fail("Expected DataflowJobAlreadyExistsException"); } catch (DataflowJobAlreadyExistsException expected) { assertThat( expected.getMessage(), containsString( "If you want to submit a second job, try again by setting a " + "different name using --jobName.")); assertEquals(expected.getJob().getJobId(), resultJob.getId()); } }
options.setUpdate(true); options.setJobName("oldJobName"); Dataflow mockDataflowClient = options.getDataflowClient(); Dataflow.Projects.Locations.Jobs.Create mockRequest = mock(Dataflow.Projects.Locations.Jobs.Create.class);