@Test
public void testComputeWorkUnitsWithExistentCheckpoint() throws IOException {
this.fileSystem.mkdirs(new Path(this.dataPath, PARTITION_1));
this.fileSystem.mkdirs(new Path(this.dataPath, PARTITION_3));
this.partitionManager = new HDFSPartitionManager(JOB_NAME,
this.metadataPath,
this.dataPath,
this.fileSystem);
this.metadataManager = new HDFSMetadataManager(this.fileSystem,
new Path(this.metadataPath, JOB_NAME).toString(),
new AtomicBoolean(true));
final StringValue val1 = new StringValue(PARTITION_2);
this.metadataManager.set(MetadataConstants.CHECKPOINT_KEY, val1);
this.metadataManager.saveChanges();
final ParquetWorkUnitCalculator calculator = new ParquetWorkUnitCalculator(this.hiveConfig, this.fileSystem);
calculator.initPreviousRunState(this.metadataManager);
final IWorkUnitCalculator.IWorkUnitCalculatorResult iresult = calculator.computeWorkUnits();
Assert.assertTrue(iresult instanceof ParquetWorkUnitCalculatorResult);
final ParquetWorkUnitCalculatorResult result =
(ParquetWorkUnitCalculatorResult) iresult;
final List<String> workUnits = result.getWorkUnits();
Assert.assertEquals(1, workUnits.size());
Assert.assertEquals(PARTITION_3, workUnits.get(0));
Assert.assertTrue(result.getNextRunState().getPartition().isPresent());
Assert.assertEquals(PARTITION_3, result.getNextRunState().getPartition().get());
}