public static FileSplit[] getIndexSplits(IClusterStateManager clusterStateManager, Dataset dataset, String indexName, List<String> nodes) { final String relPath = StoragePathUtil.prepareDataverseIndexName(dataset.getDataverseName(), dataset.getDatasetName(), indexName, dataset.getRebalanceCount()); final List<ClusterPartition> datasetPartitions = getDatasetPartitions(clusterStateManager, dataset, nodes); final List<FileSplit> splits = new ArrayList<>(); for (ClusterPartition partition : datasetPartitions) { File f = new File(StoragePathUtil.prepareStoragePartitionPath(partition.getPartitionId()), relPath); splits.add(StoragePathUtil.getFileSplitForClusterPartition(partition, f.getPath())); } return splits.toArray(new FileSplit[] {}); }
private void writeRebalanceCount(Dataset dataset) throws HyracksDataException { if (dataset.getRebalanceCount() > 0) { // Adds the field rebalanceCount. fieldName.reset(); aString.setValue(MetadataRecordTypes.DATASET_ARECORD_REBALANCE_FIELD_NAME); stringSerde.serialize(aString, fieldName.getDataOutput()); fieldValue.reset(); aBigInt.setValue(dataset.getRebalanceCount()); aBigIntSerde.serialize(aBigInt, fieldValue.getDataOutput()); recordBuilder.addField(fieldName, fieldValue); } }
sourceDataset.getDatasetName(), sourceDataset.getRebalanceCount() + 1, targetNcNames, metadataProvider);
final Set<String> rebalanceToNodes = Stream.of("asterix_nc1").collect(Collectors.toSet()); DatasetUtil.createNodeGroupForNewDataset(sourceDataset.getDataverseName(), sourceDataset.getDatasetName(), sourceDataset.getRebalanceCount() + 1, rebalanceToNodes, metadataProvider);