() -> { Job content = new Job(); content.setProjectId(getProjectId()); content.setId(jobId); content.setRequestedState("JOB_STATE_CANCELLED"); return MonitoringUtil.toState(job.getCurrentState()); } catch (IOException e) { State state = getState(); if (state.isTerminal()) { LOG.warn("Cancel failed because job is already terminated. State is {}", state); state, MonitoringUtil.getJobMonitoringPageURL( getProjectId(), getRegion(), getJobId())); LOG.warn(errorMsg); throw new IOException(errorMsg, e);
terminalState = currentState; replacedByJob = new DataflowPipelineJob( dataflowClient, job.getReplacedByJobId(), dataflowOptions, transformStepNames); LOG.debug("Exception information:", exn); if (!nextBackOff(sleeper, backoff)) { throw exn;
@Override public Void call() throws Exception { while (true) { State jobState = job.getState(); // If we see an error, cancel and note failure if (messageHandler.hasSeenError() && !job.getState().isTerminal()) { job.cancel(); LOG.info("Cancelling Dataflow job {}", job.getJobId()); return null; } if (jobState.isTerminal()) { return null; } Thread.sleep(3000L); } } }
/** Return {@code true} if job state is {@code State.DONE}. {@code false} otherwise. */ private boolean waitForBatchJobTermination( DataflowPipelineJob job, ErrorMonitorMessagesHandler messageHandler) { { try { job.waitUntilFinish(Duration.standardSeconds(-1), messageHandler); } catch (IOException e) { throw new RuntimeException(e); } catch (InterruptedException e) { Thread.interrupted(); return false; } return job.getState() == State.DONE; } }
private static String errorMessage( DataflowPipelineJob job, ErrorMonitorMessagesHandler messageHandler) { return Strings.isNullOrEmpty(messageHandler.getErrorMessage()) ? String.format( "Dataflow job %s terminated in state %s but did not return a failure reason.", job.getJobId(), job.getState()) : messageHandler.getErrorMessage(); }
@Test public void testRunStreamingJobNotUsingPAssertThatSucceeds() throws Exception { options.setStreaming(true); Pipeline p = TestPipeline.create(options); p.apply(Create.of(1, 2, 3)); DataflowPipelineJob mockJob = Mockito.mock(DataflowPipelineJob.class); when(mockJob.getState()).thenReturn(State.DONE); when(mockJob.waitUntilFinish(any(Duration.class), any(JobMessagesHandler.class))) .thenReturn(State.DONE); when(mockJob.getProjectId()).thenReturn("test-project"); when(mockJob.getJobId()).thenReturn("test-job"); DataflowRunner mockRunner = Mockito.mock(DataflowRunner.class); when(mockRunner.run(any(Pipeline.class))).thenReturn(mockJob); when(mockClient.getJobMetrics(anyString())) .thenReturn(generateMockStreamingMetricResponse(ImmutableMap.of())); TestDataflowRunner runner = TestDataflowRunner.fromOptionsAndClient(options, mockClient); runner.run(p, mockRunner); }
@Test public void testRunBatchJobThatSucceeds() throws Exception { Pipeline p = Pipeline.create(options); PCollection<Integer> pc = p.apply(Create.of(1, 2, 3)); PAssert.that(pc).containsInAnyOrder(1, 2, 3); DataflowPipelineJob mockJob = Mockito.mock(DataflowPipelineJob.class); when(mockJob.getState()).thenReturn(State.DONE); when(mockJob.getProjectId()).thenReturn("test-project"); when(mockJob.getJobId()).thenReturn("test-job"); DataflowRunner mockRunner = Mockito.mock(DataflowRunner.class); when(mockRunner.run(any(Pipeline.class))).thenReturn(mockJob); TestDataflowRunner runner = TestDataflowRunner.fromOptionsAndClient(options, mockClient); when(mockClient.getJobMetrics(anyString())) .thenReturn(generateMockMetricResponse(true /* success */, true /* tentative */)); assertEquals(mockJob, runner.run(p, mockRunner)); }
@Test public void testEmptyMetricUpdates() throws IOException { Job modelJob = new Job(); modelJob.setCurrentState(State.RUNNING.toString()); DataflowPipelineJob job = mock(DataflowPipelineJob.class); DataflowPipelineOptions options = mock(DataflowPipelineOptions.class); when(options.isStreaming()).thenReturn(false); when(job.getDataflowOptions()).thenReturn(options); when(job.getState()).thenReturn(State.RUNNING); job.jobId = JOB_ID; JobMetrics jobMetrics = new JobMetrics(); jobMetrics.setMetrics(null /* this is how the APIs represent empty metrics */); DataflowClient dataflowClient = mock(DataflowClient.class); when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics); DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient); MetricQueryResults result = dataflowMetrics.queryMetrics(null); assertThat(ImmutableList.copyOf(result.getCounters()), is(empty())); assertThat(ImmutableList.copyOf(result.getDistributions()), is(empty())); }
getStateWithRetries( BackOffAdapter.toGcpBackOff(STATUS_BACKOFF_FACTORY.withMaxRetries(0).backoff()), sleeper); case DONE: case CANCELLED: LOG.info("Job {} finished with status {}.", getJobId(), state); break; case UPDATED: + "To access the updated job on the Dataflow monitoring console, " + "please navigate to {}", getJobId(), getReplacedByJob().getJobId(), MonitoringUtil.getJobMonitoringPageURL( getReplacedByJob().getProjectId(), getRegion(), getReplacedByJob().getJobId())); break; default: LOG.info("Job {} failed with status {}.", getJobId(), state);
public State mockWaitToFinishInState(State state) throws Exception { Dataflow.Projects.Locations.Jobs.Get statusRequest = mock(Dataflow.Projects.Locations.Jobs.Get.class); Job statusResponse = new Job(); statusResponse.setCurrentState("JOB_STATE_" + state.name()); if (state == State.UPDATED) { statusResponse.setReplacedByJobId(REPLACEMENT_JOB_ID); } when(mockJobs.get(eq(PROJECT_ID), eq(REGION_ID), eq(JOB_ID))).thenReturn(statusRequest); when(statusRequest.execute()).thenReturn(statusResponse); DataflowPipelineJob job = new DataflowPipelineJob(DataflowClient.create(options), JOB_ID, options, ImmutableMap.of()); return job.waitUntilFinish(Duration.standardMinutes(1), null, fastClock, fastClock); }
@Nullable @VisibleForTesting JobMetrics getJobMetrics(DataflowPipelineJob job) { JobMetrics metrics = null; try { metrics = dataflowClient.getJobMetrics(job.getJobId()); } catch (IOException e) { LOG.warn("Failed to get job metrics: ", e); } return metrics; }
try { finalState = job.waitUntilFinish( Duration.standardSeconds(options.getTestTimeoutSeconds()), messageHandler); } catch (IOException e) { LOG.info( "Dataflow job {} took longer than {} seconds to complete, cancelling.", job.getJobId(), options.getTestTimeoutSeconds()); try { job.cancel(); } catch (IOException e) { throw new RuntimeException(e);
@Test public void testCancelUnterminatedJobThatSucceeds() throws IOException { Dataflow.Projects.Locations.Jobs.Update update = mock(Dataflow.Projects.Locations.Jobs.Update.class); when(mockJobs.update(eq(PROJECT_ID), eq(REGION_ID), eq(JOB_ID), any(Job.class))) .thenReturn(update); when(update.execute()).thenReturn(new Job().setCurrentState("JOB_STATE_CANCELLED")); DataflowPipelineJob job = new DataflowPipelineJob(DataflowClient.create(options), JOB_ID, options, null); assertEquals(State.CANCELLED, job.cancel()); Job content = new Job(); content.setProjectId(PROJECT_ID); content.setId(JOB_ID); content.setRequestedState("JOB_STATE_CANCELLED"); verify(mockJobs).update(eq(PROJECT_ID), eq(REGION_ID), eq(JOB_ID), eq(content)); verifyNoMoreInteractions(mockJobs); }
@Test public void testCheckingForSuccessSkipsNonTentativeMetrics() throws Exception { DataflowPipelineJob job = spy(new DataflowPipelineJob(mockClient, "test-job", options, null)); Pipeline p = TestPipeline.create(options); PCollection<Integer> pc = p.apply(Create.of(1, 2, 3)); PAssert.that(pc).containsInAnyOrder(1, 2, 3); when(mockClient.getJobMetrics(anyString())) .thenReturn( buildJobMetrics(generateMockMetrics(true /* success */, false /* tentative */))); TestDataflowRunner runner = TestDataflowRunner.fromOptionsAndClient(options, mockClient); runner.updatePAssertCount(p); doReturn(State.RUNNING).when(job).getState(); assertThat(runner.checkForPAssertSuccess(job), equalTo(Optional.<Boolean>absent())); }
"Job is already running in Google Cloud Platform, Ctrl-C will not cancel it.\n" + "To cancel the job in the cloud, run:\n> {}", MonitoringUtil.getGcloudCancelCommand(dataflowOptions, getJobId()))); return waitUntilFinish( duration, messageHandler,
@Test public void testGetStateReturnsServiceState() throws Exception { Dataflow.Projects.Locations.Jobs.Get statusRequest = mock(Dataflow.Projects.Locations.Jobs.Get.class); Job statusResponse = new Job(); statusResponse.setCurrentState("JOB_STATE_" + State.RUNNING.name()); when(mockJobs.get(eq(PROJECT_ID), eq(REGION_ID), eq(JOB_ID))).thenReturn(statusRequest); when(statusRequest.execute()).thenReturn(statusResponse); DataflowPipelineJob job = new DataflowPipelineJob(DataflowClient.create(options), JOB_ID, options, ImmutableMap.of()); assertEquals( State.RUNNING, job.getStateWithRetries( BackOffAdapter.toGcpBackOff(DataflowPipelineJob.STATUS_BACKOFF_FACTORY.backoff()), fastClock)); }
@Nullable @VisibleForTesting State waitUntilFinish( Duration duration, @Nullable MonitoringUtil.JobMessagesHandler messageHandler, Sleeper sleeper, NanoClock nanoClock) throws IOException, InterruptedException { return waitUntilFinish( duration, messageHandler, sleeper, nanoClock, new MonitoringUtil(dataflowClient)); }
@Test public void testGetJobMetricsThatFailsForException() throws Exception { DataflowPipelineJob job = spy(new DataflowPipelineJob(mockClient, "test-job", options, null)); Pipeline p = TestPipeline.create(options); p.apply(Create.of(1, 2, 3)); when(mockClient.getJobMetrics(anyString())).thenThrow(new IOException()); TestDataflowRunner runner = TestDataflowRunner.fromOptionsAndClient(options, mockClient); assertNull(runner.getJobMetrics(job)); }
@Override public State getState() { if (terminalState != null) { return terminalState; } return getStateWithRetries( BackOffAdapter.toGcpBackOff(STATUS_BACKOFF_FACTORY.backoff()), Sleeper.DEFAULT); }
private JobMetrics getJobMetrics() throws IOException { if (cachedMetricResults != null) { // Metric results have been cached after the job ran. return cachedMetricResults; } JobMetrics result = dataflowClient.getJobMetrics(dataflowPipelineJob.jobId); if (dataflowPipelineJob.getState().isTerminal()) { // Add current query result to the cache. cachedMetricResults = result; } return result; }