@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys1) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { dataset.getPartitionOutput(partitionKey).addPartition(); } });
@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys1) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { dataset.getPartitionOutput(partitionKey).addPartition(); } });
@Override public void apply() throws Exception { dataset.getPartitionOutput(partitionKey1).addPartition(); } });
@Override public void apply() throws Exception { // add 2 more partitions after the first 3. We do not need to keep track of these, because they will be dropped // and not consumed for (int i = 0; i < 2; i++) { dataset.getPartitionOutput(generateUniqueKey()).addPartition(); } } });
@Override public void apply() throws Exception { dataset.getPartitionOutput(partitionKey2).addPartition(); } });
@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys2) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys2) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { for (final PartitionKey partitionKey : partitionKeys1) { dataset.getPartitionOutput(partitionKey).addPartition(); } for (final PartitionKey partitionKey : partitionKeys2) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys2) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys1) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { dataset.getPartitionOutput(partitionKey).addPartition(); } });
@Override public void apply() throws Exception { for (final PartitionKey partitionKey : partitionKeys) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { dataset.getPartitionOutput(partitionKey).addPartition(); } });
@Override public BasicPartition call() throws Exception { PartitionOutput p = dataset.getPartitionOutput(key); p.addPartition(); return new BasicPartition((PartitionedFileSetDataset) dataset, p.getRelativePath(), p.getPartitionKey()); } });
@Override public void apply() throws Exception { // drop all existing partitions (2 of which are not consumed) for (PartitionDetail partitionDetail : dataset.getPartitions(PartitionFilter.ALWAYS_MATCH)) { dataset.dropPartition(partitionDetail.getPartitionKey()); } // add 5 new ones for (PartitionKey partitionKey : partitionKeys2) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
private Location createPartition(PartitionedFileSet pfs, PartitionKey key, String fileName, int intToWrite) throws IOException { PartitionOutput partitionOutput = pfs.getPartitionOutput(key); Location outputLocation = partitionOutput.getLocation().append(fileName); Assert.assertFalse(outputLocation.exists()); try (OutputStream outputStream = outputLocation.getOutputStream()) { outputStream.write(intToWrite); } Assert.assertTrue(outputLocation.exists()); partitionOutput.addPartition(); return outputLocation; }