@Override public void go() throws Exception { testHarness.snapshot(1L, 1000L); } };
@Override public void go() throws Exception { testHarness.snapshot(1L, 1000L); } };
@Override public void go() throws Exception { testHarness.snapshot(1L, 1000L); } };
@Override public void go() throws Exception { // this should block at first, since there are still two pending records that needs to be flushed testHarness.snapshot(123L, 123L); } };
@Override public void go() throws Exception { // this should block until all records are flushed; // if the snapshot implementation returns before pending records are flushed, testHarness.snapshot(123L, 123L); } };
@Test public void testNotifyOfCompletedCheckpoint() throws Exception { harness.open(); harness.processElement("42", 0); harness.snapshot(0, 1); harness.processElement("43", 2); harness.snapshot(1, 3); harness.processElement("44", 4); harness.snapshot(2, 5); harness.notifyOfCompletedCheckpoint(1); assertExactlyOnce(Arrays.asList("42", "43")); assertEquals(2, tmpDirectory.listFiles().size()); // one for checkpointId 2 and second for the currentTransaction }
@Test public void testLogTimeoutAlmostReachedWarningDuringCommit() throws Exception { clock.setEpochMilli(0); final long transactionTimeout = 1000; final double warningRatio = 0.5; sinkFunction.setTransactionTimeout(transactionTimeout); sinkFunction.enableTransactionTimeoutWarnings(warningRatio); harness.open(); harness.snapshot(0, 1); final long elapsedTime = (long) ((double) transactionTimeout * warningRatio + 2); clock.setEpochMilli(elapsedTime); harness.notifyOfCompletedCheckpoint(1); final List<String> logMessages = loggingEvents.stream().map(LoggingEvent::getRenderedMessage).collect(Collectors.toList()); assertThat( logMessages, hasItem(containsString("has been open for 502 ms. " + "This is close to or even exceeding the transaction timeout of 1000 ms."))); }
/** Tests that any item failure in the listener callbacks is rethrown on an immediately following checkpoint. */ @Test public void testItemFailureRethrownOnCheckpoint() throws Throwable { final DummyElasticsearchSink<String> sink = new DummyElasticsearchSink<>( new HashMap<String, String>(), new SimpleSinkFunction<String>(), new NoOpFailureHandler()); final OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink)); testHarness.open(); // setup the next bulk request, and its mock item failures sink.setMockItemFailuresListForNextBulkItemResponses(Collections.singletonList(new Exception("artificial failure for record"))); testHarness.processElement(new StreamRecord<>("msg")); verify(sink.getMockBulkProcessor(), times(1)).add(any(IndexRequest.class)); // manually execute the next bulk request sink.manualBulkRequestWithAllPendingRequests(); try { testHarness.snapshot(1L, 1000L); } catch (Exception e) { // the snapshot should have failed with the failure Assert.assertTrue(e.getCause().getCause().getMessage().contains("artificial failure for record")); // test succeeded return; } Assert.fail(); }
@Test public void testFailBeforeNotify() throws Exception { harness.open(); harness.processElement("42", 0); harness.snapshot(0, 1); harness.processElement("43", 2); OperatorSubtaskState snapshot = harness.snapshot(1, 3); tmpDirectory.setWritable(false); try { harness.processElement("44", 4); harness.snapshot(2, 5); fail("something should fail"); } catch (Exception ex) { if (!(ex.getCause() instanceof ContentDump.NotWritableException)) { throw ex; } // ignore } closeTestHarness(); tmpDirectory.setWritable(true); setUpTestHarness(); harness.initializeState(snapshot); assertExactlyOnce(Arrays.asList("42", "43")); closeTestHarness(); assertEquals(0, tmpDirectory.listFiles().size()); }
/** Tests that any bulk failure in the listener callbacks is rethrown on an immediately following checkpoint. */ @Test public void testBulkFailureRethrownOnCheckpoint() throws Throwable { final DummyElasticsearchSink<String> sink = new DummyElasticsearchSink<>( new HashMap<String, String>(), new SimpleSinkFunction<String>(), new NoOpFailureHandler()); final OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink)); testHarness.open(); // setup the next bulk request, and let the whole bulk request fail sink.setFailNextBulkRequestCompletely(new Exception("artificial failure for bulk request")); testHarness.processElement(new StreamRecord<>("msg")); verify(sink.getMockBulkProcessor(), times(1)).add(any(IndexRequest.class)); // manually execute the next bulk request sink.manualBulkRequestWithAllPendingRequests(); try { testHarness.snapshot(1L, 1000L); } catch (Exception e) { // the snapshot should have failed with the bulk request failure Assert.assertTrue(e.getCause().getCause().getMessage().contains("artificial failure for bulk request")); // test succeeded return; } Assert.fail(); }
/** * This test is meant to assure that testAtLeastOnceSink is valid by testing that if flushing is disabled, * the snapshot method does indeed finishes without waiting for pending requests; * we set a timeout because the test will not finish if the logic is broken. */ @Test(timeout = 5000) public void testDoesNotWaitForPendingRequestsIfFlushingDisabled() throws Exception { final DummyElasticsearchSink<String> sink = new DummyElasticsearchSink<>( new HashMap<String, String>(), new SimpleSinkFunction<String>(), new DummyRetryFailureHandler()); sink.disableFlushOnCheckpoint(); // disable flushing final OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink)); testHarness.open(); // setup the next bulk request, and let bulk request succeed sink.setMockItemFailuresListForNextBulkItemResponses(Collections.singletonList(new Exception("artificial failure for record"))); testHarness.processElement(new StreamRecord<>("msg-1")); verify(sink.getMockBulkProcessor(), times(1)).add(any(IndexRequest.class)); // the snapshot should not block even though we haven't flushed the bulk request testHarness.snapshot(1L, 1000L); testHarness.close(); }
@Test public void testLogTimeoutAlmostReachedWarningDuringRecovery() throws Exception { clock.setEpochMilli(0); final long transactionTimeout = 1000; final double warningRatio = 0.5; sinkFunction.setTransactionTimeout(transactionTimeout); sinkFunction.enableTransactionTimeoutWarnings(warningRatio); harness.open(); final OperatorSubtaskState snapshot = harness.snapshot(0, 1); final long elapsedTime = (long) ((double) transactionTimeout * warningRatio + 2); clock.setEpochMilli(elapsedTime); closeTestHarness(); setUpTestHarness(); sinkFunction.setTransactionTimeout(transactionTimeout); sinkFunction.enableTransactionTimeoutWarnings(warningRatio); harness.initializeState(snapshot); harness.open(); final List<String> logMessages = loggingEvents.stream().map(LoggingEvent::getRenderedMessage).collect(Collectors.toList()); closeTestHarness(); assertThat( logMessages, hasItem(containsString("has been open for 502 ms. " + "This is close to or even exceeding the transaction timeout of 1000 ms."))); }
@Test public void testDataPersistenceUponMissedNotify() throws Exception { S sink = createSink(); OneInputStreamOperatorTestHarness<IN, IN> testHarness = new OneInputStreamOperatorTestHarness<>(sink); testHarness.open(); int elementCounter = 1; int snapshotCount = 0; for (int x = 0; x < 20; x++) { testHarness.processElement(new StreamRecord<>(generateValue(elementCounter, 0))); elementCounter++; } testHarness.snapshot(snapshotCount++, 0); testHarness.notifyOfCompletedCheckpoint(snapshotCount - 1); for (int x = 0; x < 20; x++) { testHarness.processElement(new StreamRecord<>(generateValue(elementCounter, 1))); elementCounter++; } testHarness.snapshot(snapshotCount++, 0); for (int x = 0; x < 20; x++) { testHarness.processElement(new StreamRecord<>(generateValue(elementCounter, 2))); elementCounter++; } testHarness.snapshot(snapshotCount++, 0); testHarness.notifyOfCompletedCheckpoint(snapshotCount - 1); verifyResultsDataPersistenceUponMissedNotify(sink); }
/** * This test is meant to assure that testAtLeastOnceProducer is valid by testing that if flushing is disabled, * the snapshot method does indeed finishes without waiting for pending records; * we set a timeout because the test will not finish if the logic is broken. */ @SuppressWarnings("unchecked") @Test(timeout = 5000) public void testDoesNotWaitForPendingRecordsIfFlushingDisabled() throws Throwable { final DummyFlinkKafkaProducer<String> producer = new DummyFlinkKafkaProducer<>( FakeStandardProducerConfig.get(), new KeyedSerializationSchemaWrapper<>(new SimpleStringSchema()), null); producer.setFlushOnCheckpoint(false); final KafkaProducer<?, ?> mockProducer = producer.getMockKafkaProducer(); final OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink<>(producer)); testHarness.open(); testHarness.processElement(new StreamRecord<>("msg")); // make sure that all callbacks have not been completed verify(mockProducer, times(1)).send(any(ProducerRecord.class), any(Callback.class)); // should return even if there are pending records testHarness.snapshot(123L, 123L); testHarness.close(); }
/** * Test ensuring that if a snapshot call happens right after an async exception is caught, it should be rethrown. */ @Test public void testAsyncErrorRethrownOnCheckpoint() throws Throwable { final DummyFlinkKafkaProducer<String> producer = new DummyFlinkKafkaProducer<>( FakeStandardProducerConfig.get(), new KeyedSerializationSchemaWrapper<>(new SimpleStringSchema()), null); OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink<>(producer)); testHarness.open(); testHarness.processElement(new StreamRecord<>("msg-1")); // let the message request return an async exception producer.getPendingCallbacks().get(0).onCompletion(null, new Exception("artificial async exception")); try { testHarness.snapshot(123L, 123L); } catch (Exception e) { // the next invoke should rethrow the async exception Assert.assertTrue(e.getCause().getMessage().contains("artificial async exception")); // test succeeded return; } Assert.fail(); }
@Test public void testIgnoreCommitExceptionDuringRecovery() throws Exception { clock.setEpochMilli(0); harness.open(); harness.processElement("42", 0); final OperatorSubtaskState snapshot = harness.snapshot(0, 1); harness.notifyOfCompletedCheckpoint(1); throwException.set(true); closeTestHarness(); setUpTestHarness(); final long transactionTimeout = 1000; sinkFunction.setTransactionTimeout(transactionTimeout); sinkFunction.ignoreFailuresAfterTransactionTimeout(); try { harness.initializeState(snapshot); fail("Expected exception not thrown"); } catch (RuntimeException e) { assertEquals("Expected exception", e.getMessage()); } clock.setEpochMilli(transactionTimeout + 1); harness.initializeState(snapshot); assertExactlyOnce(Collections.singletonList("42")); }
@Test public void testSnapshotAndRestore() throws Exception { LegacyKeyedProcessOperator<Integer, Integer, String> operator = new LegacyKeyedProcessOperator<>(new BothTriggeringFlatMapFunction()); OneInputStreamOperatorTestHarness<Integer, String> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO); testHarness.setup(); testHarness.open(); testHarness.processElement(new StreamRecord<>(5, 12L)); // snapshot and restore from scratch OperatorSubtaskState snapshot = testHarness.snapshot(0, 0); testHarness.close(); operator = new LegacyKeyedProcessOperator<>(new BothTriggeringFlatMapFunction()); testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO); testHarness.setup(); testHarness.initializeState(snapshot); testHarness.open(); testHarness.setProcessingTime(5); testHarness.processWatermark(new Watermark(6)); ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); expectedOutput.add(new StreamRecord<>("PROC:1777")); expectedOutput.add(new StreamRecord<>("EVENT:1777", 6L)); expectedOutput.add(new Watermark(6)); TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput()); testHarness.close(); }
@Test public void testSnapshotAndRestore() throws Exception { final int expectedKey = 5; KeyedProcessOperator<Integer, Integer, String> operator = new KeyedProcessOperator<>(new BothTriggeringFlatMapFunction(expectedKey)); OneInputStreamOperatorTestHarness<Integer, String> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO); testHarness.setup(); testHarness.open(); testHarness.processElement(new StreamRecord<>(expectedKey, 12L)); // snapshot and restore from scratch OperatorSubtaskState snapshot = testHarness.snapshot(0, 0); testHarness.close(); operator = new KeyedProcessOperator<>(new BothTriggeringFlatMapFunction(expectedKey)); testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO); testHarness.setup(); testHarness.initializeState(snapshot); testHarness.open(); testHarness.setProcessingTime(5); testHarness.processWatermark(new Watermark(6)); ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); expectedOutput.add(new StreamRecord<>("PROC:1777")); expectedOutput.add(new StreamRecord<>("EVENT:1777", 6L)); expectedOutput.add(new Watermark(6)); TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput()); testHarness.close(); }
@Test public void testSnapshotAndRestoreWrappedCheckpointedFunction() throws Exception { StreamMap<Integer, Integer> operator = new StreamMap<>( new WrappingTestFun(new WrappingTestFun(new InnerTestFun()))); OneInputStreamOperatorTestHarness<Integer, Integer> testHarness = new OneInputStreamOperatorTestHarness<>(operator); testHarness.setup(); testHarness.open(); testHarness.processElement(new StreamRecord<>(5, 12L)); // snapshot and restore from scratch OperatorSubtaskState snapshot = testHarness.snapshot(0, 0); testHarness.close(); InnerTestFun innerTestFun = new InnerTestFun(); operator = new StreamMap<>(new WrappingTestFun(new WrappingTestFun(innerTestFun))); testHarness = new OneInputStreamOperatorTestHarness<>(operator); testHarness.setup(); testHarness.initializeState(snapshot); testHarness.open(); Assert.assertTrue(innerTestFun.wasRestored); testHarness.close(); }
@Test public void testSnapshotAndRestoreWrappedListCheckpointed() throws Exception { StreamMap<Integer, Integer> operator = new StreamMap<>( new WrappingTestFun(new WrappingTestFun(new InnerTestFunList()))); OneInputStreamOperatorTestHarness<Integer, Integer> testHarness = new OneInputStreamOperatorTestHarness<>(operator); testHarness.setup(); testHarness.open(); testHarness.processElement(new StreamRecord<>(5, 12L)); // snapshot and restore from scratch OperatorSubtaskState snapshot = testHarness.snapshot(0, 0); testHarness.close(); InnerTestFunList innerTestFun = new InnerTestFunList(); operator = new StreamMap<>(new WrappingTestFun(new WrappingTestFun(innerTestFun))); testHarness = new OneInputStreamOperatorTestHarness<>(operator); testHarness.setup(); testHarness.initializeState(snapshot); testHarness.open(); Assert.assertTrue(innerTestFun.wasRestored); testHarness.close(); }