private static void testUDF(TestUserFunction userFunction) throws Exception { OperatorSubtaskState snapshot; try (AbstractStreamOperatorTestHarness<Integer> testHarness = createTestHarness(userFunction)) { testHarness.open(); snapshot = testHarness.snapshot(0L, 0L); assertFalse(userFunction.isRestored()); } try (AbstractStreamOperatorTestHarness<Integer> testHarness = createTestHarness(userFunction)) { testHarness.initializeState(snapshot); testHarness.open(); assertTrue(userFunction.isRestored()); } }
@Test public void testInitializeAfterOpenning() throws Throwable { expectedException.expect(IllegalStateException.class); expectedException.expectMessage(containsString("TestHarness has already been initialized.")); AbstractStreamOperatorTestHarness<Integer> result; result = new AbstractStreamOperatorTestHarness<>( new AbstractStreamOperator<Integer>() { }, 1, 1, 0); result.setup(); result.open(); result.initializeState(new OperatorSubtaskState()); } }
"kafka-consumer-migration-test-flink" + testMigrateVersion + "-empty-state-snapshot")); testHarness.open();
testHarness.open();
/** * Test restoring from an legacy empty state, when no partitions could be found for topics. */ @Test public void testRestoreFromEmptyStateNoPartitions() throws Exception { final DummyFlinkKafkaConsumer<String> consumerFunction = new DummyFlinkKafkaConsumer<>( Collections.<KafkaTopicPartition>emptyList(), FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED); StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator = new StreamSource<>(consumerFunction); final AbstractStreamOperatorTestHarness<String> testHarness = new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0); testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime); testHarness.setup(); // restore state from binary snapshot file testHarness.initializeState( OperatorSnapshotUtil.getResourceFilename( "kafka-consumer-migration-test-flink" + testMigrateVersion + "-empty-state-snapshot")); testHarness.open(); // assert that no partitions were found and is empty assertTrue(consumerFunction.getSubscribedPartitionsToStartOffsets() != null); assertTrue(consumerFunction.getSubscribedPartitionsToStartOffsets().isEmpty()); // assert that no state was restored assertTrue(consumerFunction.getRestoredState().isEmpty()); consumerOperator.close(); consumerOperator.cancel(); }
"kafka-consumer-migration-test-flink" + testMigrateVersion + "-snapshot")); testHarness.open();
@Override public void run() { try { testHarness.open(); sourceOperator.run(checkpointLock, new TestStreamStatusMaintainer(),
() -> { try { testHarness.open(); sourceOperator.run( checkpointLock,
testHarness.open();
testHarness.open(); sourceOperator.run( testHarness.getCheckpointLock(),
testHarness.open(); sourceOperator.run(checkpointLock, new TestStreamStatusMaintainer(),
testHarness.open(); sourceOperator.run( testHarness.getCheckpointLock(),