public void deleteMatchingPartitionsByTime(long upperLimit) throws IOException { if (upperLimit > 0 && upperLimit < Long.MAX_VALUE) { PartitionFilter filter = PartitionFilter.builder().addRangeCondition(SNAPSHOT_FIELD, null, upperLimit).build(); Set<PartitionDetail> partitions = files.getPartitions(filter); for (PartitionDetail partition : partitions) { files.dropPartition(partition.getPartitionKey()); } } }
@Override public TimeseriesTable getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { return new TimeseriesTable(spec, this.<Table>getDataset(datasetContext, "ts", spec, arguments, classLoader)); }
@Override public DatasetSpecification reconfigure(String instanceName, DatasetProperties newProperties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { TimeseriesTableDefinition.validateNewIntervalSize(newProperties, currentSpec); return super.reconfigure(instanceName, newProperties, currentSpec); }
@Test(expected = IllegalStateException.class) public void testBuilderMissingField() { PartitionKey.builder( Partitioning.builder().addIntField("x").addLongField("y").addStringField("z").build()) .addField("x", 10).addField("y", 10L).build(); }
@Override public void apply() throws Exception { dataset.getPartitionOutput(partitionKey).addPartition(); } });
@Override public void apply() throws Exception { // drop all existing partitions (2 of which are not consumed) for (PartitionDetail partitionDetail : dataset.getPartitions(PartitionFilter.ALWAYS_MATCH)) { dataset.dropPartition(partitionDetail.getPartitionKey()); } // add 5 new ones for (PartitionKey partitionKey : partitionKeys2) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public PartitionKey getPartitionKey(String key, String value) { return PartitionKey.builder() .addStringField("x", key) .build(); } }
@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys1) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public DatasetSpecification reconfigure(String instanceName, DatasetProperties newProperties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { validateNewIntervalSize(newProperties, currentSpec); return super.reconfigure(instanceName, newProperties, currentSpec); }
@Override public void apply() throws Exception { dataset.getPartitionOutput(partitionKey).addPartition(); } });
@Override public void apply() throws Exception { dataset.getPartitionOutput(partitionKey1).addPartition(); } });
@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys1) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys2) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys2) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { for (PartitionKey partitionKey : partitionKeys1) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { dataset.getPartitionOutput(partitionKey).addPartition(); } });
@Override public void apply() throws Exception { for (final PartitionKey partitionKey : partitionKeys) { dataset.getPartitionOutput(partitionKey).addPartition(); } } });
@Override public void apply() throws Exception { dataset.getPartitionOutput(partitionKey).addPartition(); } });