private static Predicate<Throwable> isConnectionProblemException() { return (throwable) -> ExceptionUtils.findThrowable(throwable, java.net.ConnectException.class).isPresent() || ExceptionUtils.findThrowable(throwable, java.net.SocketTimeoutException.class).isPresent() || ExceptionUtils.findThrowable(throwable, ConnectTimeoutException.class).isPresent() || ExceptionUtils.findThrowable(throwable, IOException.class).isPresent(); }
@Test public void testTriggerSavepointForNonExistingJob() throws Exception { // Config final int numTaskManagers = 1; final int numSlotsPerTaskManager = 1; final Configuration config = new Configuration(); config.setString(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir.toURI().toString()); final MiniClusterWithClientResource cluster = new MiniClusterWithClientResource( new MiniClusterResourceConfiguration.Builder() .setConfiguration(config) .setNumberTaskManagers(numTaskManagers) .setNumberSlotsPerTaskManager(numSlotsPerTaskManager) .build()); cluster.before(); final ClusterClient<?> client = cluster.getClusterClient(); final JobID jobID = new JobID(); try { client.triggerSavepoint(jobID, null).get(); fail(); } catch (ExecutionException e) { assertTrue(ExceptionUtils.findThrowable(e, FlinkJobNotFoundException.class).isPresent()); assertTrue(ExceptionUtils.findThrowableWithMessage(e, jobID.toString()).isPresent()); } finally { cluster.after(); } }
@Override public void onFailure(ActionRequest action, Throwable failure, int restStatusCode, RequestIndexer indexer) throws Throwable { if (ExceptionUtils.findThrowable(failure, EsRejectedExecutionException.class).isPresent()) { indexer.add(action); } else { // rethrow all other failures throw failure; } }
assertTrue(ExceptionUtils.findThrowable(e, IllegalStateException.class).isPresent()); assertTrue(ExceptionUtils.findThrowableWithMessage(e, graph.getJobID().toString()).isPresent()); assertTrue(ExceptionUtils.findThrowableWithMessage(e, "is not a streaming job").isPresent());
private static Predicate<Throwable> httpExceptionCodePredicate(Predicate<Integer> statusCodePredicate) { return (throwable) -> ExceptionUtils.findThrowable(throwable, RestClientException.class) .map(restClientException -> { final int code = restClientException.getHttpResponseStatus().code(); return statusCodePredicate.test(code); }) .orElse(false); }
@Test public void testFindThrowableByType() { assertTrue(ExceptionUtils.findThrowable( new RuntimeException(new IllegalStateException()), IllegalStateException.class).isPresent()); }
@Override public void run() { try { client.setDetached(false); client.submitJob(jobGraph, KafkaConsumerTestBase.class.getClassLoader()); } catch (Throwable t) { if (!ExceptionUtils.findThrowable(t, JobCancellationException.class).isPresent()) { error.set(t); } } } });
@Override public void run() { try { client.setDetached(false); client.submitJob(jobGraph, KafkaConsumerTestBase.class.getClassLoader()); } catch (Throwable t) { if (!ExceptionUtils.findThrowable(t, JobCancellationException.class).isPresent()) { LOG.warn("Got exception during execution", t); error.f0 = t; } } } };
@Test public void testSubmitEmptyJobGraph() throws Exception { final JobGraph jobGraph = new JobGraph("Testing job"); runJobSubmissionTest( jobGraph, e -> ExceptionUtils.findThrowable( e, throwable -> throwable.getMessage() != null && throwable.getMessage().contains("empty")) .isPresent()); }
@Test public void testNonExistingJobRetrieval() throws Exception { final JobID jobID = new JobID(); try { client.requestJobResult(jobID).get(); fail(); } catch (Exception exception) { Optional<Throwable> expectedCause = ExceptionUtils.findThrowable(exception, candidate -> candidate.getMessage() != null && candidate.getMessage().contains("Could not find Flink job")); if (!expectedCause.isPresent()) { throw exception; } } }
@Test public void testDoNotCancelJobIfSavepointFails() throws Exception { setUpWithCheckpointInterval(10L); try { Files.setPosixFilePermissions(savepointDirectory, Collections.emptySet()); } catch (IOException e) { Assume.assumeNoException(e); } try { cancelWithSavepoint(); } catch (Exception e) { assertThat(ExceptionUtils.findThrowable(e, CheckpointTriggerException.class).isPresent(), equalTo(true)); } final JobStatus jobStatus = clusterClient.getJobStatus(jobGraph.getJobID()).get(60, TimeUnit.SECONDS); assertThat(jobStatus, equalTo(JobStatus.RUNNING)); // assert that checkpoints are continued to be triggered triggerCheckpointLatch = new CountDownLatch(1); assertThat(triggerCheckpointLatch.await(60L, TimeUnit.SECONDS), equalTo(true)); }
private static void assertAccumulatorsShouldFail(JobExecutionResult result) { try { result.getAllAccumulatorResults(); fail("Should have failed"); } catch (Exception ex) { assertTrue(ExceptionUtils.findThrowable(ex, CustomException.class).isPresent()); } } }
@Test public void testMissingJarBlob() throws Exception { final JobGraph jobGraph = getJobGraphWithMissingBlobKey(); runJobSubmissionTest(jobGraph, e -> ExceptionUtils.findThrowable(e, IOException.class).isPresent()); }
private void testFailingConsumerLifecycle(FlinkKafkaConsumerBase<String> testKafkaConsumer, @Nonnull Exception expectedException) throws Exception { try { setupConsumer(testKafkaConsumer); testKafkaConsumer.run(new TestSourceContext<>()); fail("Exception should have been thrown from open / run method of FlinkKafkaConsumerBase."); } catch (Exception e) { assertThat(ExceptionUtils.findThrowable(e, throwable -> throwable.equals(expectedException)).isPresent(), is(true)); } testKafkaConsumer.close(); }
@Test public void testExceptionInInitializeOnMaster() throws Exception { final JobVertex failingJobVertex = new FailingJobVertex("Failing job vertex"); failingJobVertex.setInvokableClass(NoOpInvokable.class); final JobGraph failingJobGraph = new JobGraph("Failing testing job", failingJobVertex); runJobSubmissionTest(failingJobGraph, e -> ExceptionUtils.findThrowable( e, candidate -> "Test exception.".equals(candidate.getMessage())) .isPresent()); }
Optional<Throwable> exception = ExceptionUtils.findThrowable(e, candidate -> candidate.getClass().getCanonicalName().equals("org.apache.flink.test.classloading.jar.CheckpointedStreamingProgram.SuccessException"));
@Test public void testDisposeSavepointUnknownResponse() throws Exception { final Configuration configuration = new Configuration(); final Time timeout = Time.milliseconds(1000L); final String savepointPath = "foobar"; final ActorGateway jobManagerGateway = new TestDisposeWithWrongResponseActorGateway(); final TestClusterClient clusterClient = new TestClusterClient(configuration, jobManagerGateway); CompletableFuture<Acknowledge> acknowledgeCompletableFuture = clusterClient.disposeSavepoint(savepointPath); try { acknowledgeCompletableFuture.get(); fail("Dispose operation should have failed."); } catch (ExecutionException e) { assertTrue(ExceptionUtils.findThrowable(e, FlinkRuntimeException.class).isPresent()); } finally { clusterClient.shutdown(); } }
client.submitJob(jobGraph, SavepointITCase.class.getClassLoader()); } catch (Exception e) { Optional<JobExecutionException> expectedJobExecutionException = ExceptionUtils.findThrowable(e, JobExecutionException.class); Optional<FileNotFoundException> expectedFileNotFoundException = ExceptionUtils.findThrowable(e, FileNotFoundException.class); if (!(expectedJobExecutionException.isPresent() && expectedFileNotFoundException.isPresent())) { throw e;
assertTrue(ExceptionUtils.findThrowable( mockEnvironment.getActualExternalFailureCause().get(), expectedException.get()).isPresent());
assertTrue(ExceptionUtils.findThrowable(e, SuccessException.class).isPresent());