@Test public void testInvalidJobName() throws IOException { List<String> invalidNames = Arrays.asList("invalid_name", "0invalid", "invalid-"); List<String> expectedReason = Arrays.asList("JobName invalid", "JobName invalid", "JobName invalid"); for (int i = 0; i < invalidNames.size(); ++i) { DataflowPipelineOptions options = buildPipelineOptions(); options.setJobName(invalidNames.get(i)); try { DataflowRunner.fromOptions(options); fail("Expected IllegalArgumentException for jobName " + options.getJobName()); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString(expectedReason.get(i))); } } }
@Test public void testValidJobName() throws IOException { List<String> names = Arrays.asList("ok", "Ok", "A-Ok", "ok-123", "this-one-is-fairly-long-01234567890123456789"); for (String name : names) { DataflowPipelineOptions options = buildPipelineOptions(); options.setJobName(name); DataflowRunner runner = DataflowRunner.fromOptions(options); assertNotNull(runner); } }
@Test public void testUpdate() throws IOException { DataflowPipelineOptions options = buildPipelineOptions(); options.setUpdate(true); options.setJobName("oldJobName"); Pipeline p = buildDataflowPipeline(options); DataflowPipelineJob job = (DataflowPipelineJob) p.run(); assertEquals("newid", job.getJobId()); ArgumentCaptor<Job> jobCaptor = ArgumentCaptor.forClass(Job.class); Mockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture()); assertValidJob(jobCaptor.getValue()); }
@Test public void testFromOptionsWithUppercaseConvertsToLowercase() throws Exception { String mixedCase = "ThisJobNameHasMixedCase"; DataflowPipelineOptions options = buildPipelineOptions(); options.setJobName(mixedCase); DataflowRunner.fromOptions(options); assertThat(options.getJobName(), equalTo(mixedCase.toLowerCase())); }
dataflowOptions.getJobName()); dataflowOptions.setJobName(jobName);
options.setZone(zone); options.setStreaming(isStreaming); options.setJobName(pipelineName); Pipeline pipeline = Pipeline.create(options);
@Test public void testJobNameIsSet() { DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class); options.setJobName("TestJobName"); assertEquals("TestJobName", options.getJobName()); }
private static DataflowPipelineOptions buildPipelineOptions(String... args) throws IOException { GcsUtil mockGcsUtil = mock(GcsUtil.class); when(mockGcsUtil.expand(any(GcsPath.class))) .then(invocation -> ImmutableList.of((GcsPath) invocation.getArguments()[0])); when(mockGcsUtil.bucketAccessible(any(GcsPath.class))).thenReturn(true); DataflowPipelineOptions options = PipelineOptionsFactory.fromArgs(args).as(DataflowPipelineOptions.class); options.setRunner(DataflowRunner.class); options.setGcpCredential(new TestCredential()); options.setJobName("some-job-name"); options.setProject("some-project"); options.setRegion("some-region"); options.setTempLocation(GcsPath.fromComponents("somebucket", "some/path").toString()); options.setFilesToStage(new ArrayList<>()); options.setGcsUtil(mockGcsUtil); return options; } }
private static DataflowPipelineOptions buildPipelineOptions() throws IOException { GcsUtil mockGcsUtil = mock(GcsUtil.class); when(mockGcsUtil.expand(any(GcsPath.class))) .then(invocation -> ImmutableList.of((GcsPath) invocation.getArguments()[0])); when(mockGcsUtil.bucketAccessible(any(GcsPath.class))).thenReturn(true); DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class); options.setRunner(DataflowRunner.class); options.setGcpCredential(new TestCredential()); options.setJobName("some-job-name"); options.setProject("some-project"); options.setRegion("some-region"); options.setTempLocation(GcsPath.fromComponents("somebucket", "some/path").toString()); options.setFilesToStage(new ArrayList<>()); options.setDataflowClient(buildMockDataflow(new IsValidCreateRequest())); options.setGcsUtil(mockGcsUtil); // Enable the FileSystems API to know about gs:// URIs in this test. FileSystems.setDefaultPipelineOptions(options); return options; }
DataflowPipelineOptions options = buildPipelineOptions(); options.setUpdate(true); options.setJobName("oldJobName"); Dataflow mockDataflowClient = options.getDataflowClient(); Dataflow.Projects.Locations.Jobs.Create mockRequest =
@Test public void testUpdateNonExistentPipeline() throws IOException { thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Could not find running job named badjobname"); DataflowPipelineOptions options = buildPipelineOptions(); options.setUpdate(true); options.setJobName("badJobName"); Pipeline p = buildDataflowPipeline(options); p.run(); }
options.setProject(PROJECT_ID); options.setRegion(REGION_ID); options.setJobName("job"); options.setDataflowClient(buildMockDataflow()); options.setGcsUtil(mockGcsUtil);
/** * Tests that the {@link DataflowRunner} with {@code --templateLocation} returns normally when the * runner is successfully run. */ @Test public void testTemplateRunnerFullCompletion() throws Exception { File existingFile = tmpFolder.newFile(); DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class); options.setJobName("TestJobName"); options.setGcpCredential(new TestCredential()); options.setPathValidatorClass(NoopPathValidator.class); options.setProject("test-project"); options.setRunner(DataflowRunner.class); options.setTemplateLocation(existingFile.getPath()); options.setTempLocation(tmpFolder.getRoot().getPath()); Pipeline p = Pipeline.create(options); p.run(); expectedLogs.verifyInfo("Template successfully created"); }
private static void injectMessages(BigtablePubsubOptions options) { String inputFile = options.getInputFile(); String topic = options.getPubsubTopic(); DataflowPipelineOptions copiedOptions = options.as(DataflowPipelineOptions.class); copiedOptions.setStreaming(false); copiedOptions.setNumWorkers(INJECTORNUMWORKERS); copiedOptions.setJobName(copiedOptions.getJobName() + "-injector"); Pipeline injectorPipeline = Pipeline.create(copiedOptions); injectorPipeline.apply(TextIO.read().from(inputFile)) .apply(ParDo.of(new FilterEmptyStringsFn())) .apply(PubsubIO.writeStrings().to(topic)); injectorPipeline.run().waitUntilFinish(); }
options.setJobName("some-job-name");
@Test public void testToString() { DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class); options.setJobName("TestJobName"); options.setProject("test-project"); options.setTempLocation("gs://test/temp/location"); options.setGcpCredential(new TestCredential()); options.setPathValidatorClass(NoopPathValidator.class); options.setRunner(DataflowRunner.class); assertEquals("DataflowRunner#testjobname", DataflowRunner.fromOptions(options).toString()); }
/** * Tests that the {@link DataflowRunner} with {@code --templateLocation} throws the appropriate * exception when an output file is not writable. */ @Test public void testTemplateRunnerLoggedErrorForFile() throws Exception { DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class); options.setJobName("TestJobName"); options.setRunner(DataflowRunner.class); options.setTemplateLocation("//bad/path"); options.setProject("test-project"); options.setTempLocation(tmpFolder.getRoot().getPath()); options.setGcpCredential(new TestCredential()); options.setPathValidatorClass(NoopPathValidator.class); Pipeline p = Pipeline.create(options); thrown.expectMessage("Cannot create output file at"); thrown.expect(RuntimeException.class); p.run(); }