public CompiledIndexCompactStatement(Dataset dataset, Index index) { super(dataset.getDataverseName(), dataset.getDatasetName()); this.dataset = dataset; this.index = index; }
public void removeDatasetInfo(Dataset dataset) { String key = dataset.getDataverseName() + "." + dataset.getDatasetName(); globalRegister.remove(key); }
public Pair<IFileSplitProvider, AlgebricksPartitionConstraint> getSplitProviderAndConstraints(Dataset ds) throws AlgebricksException { return getSplitProviderAndConstraints(ds, ds.getDatasetName()); }
private static void handleFile(Dataset dataset, List<ExternalFile> files, FileSystem fs, FileStatus fileStatus, int nextFileNumber) throws IOException { if (fileStatus.isDirectory()) { listSubFiles(dataset, fs, fileStatus, files); } else { files.add(new ExternalFile(dataset.getDataverseName(), dataset.getDatasetName(), nextFileNumber, fileStatus.getPath().toUri().getPath(), new Date(fileStatus.getModificationTime()), fileStatus.getLen(), ExternalFilePendingOp.NO_OP)); } }
private List<String> getDatasetNamesPartitionedOnThisNodeGroup(TxnId txnId, String nodegroup) throws AlgebricksException, RemoteException { // this needs to scan the datasets and return the datasets that use this // nodegroup List<String> nodeGroupDatasets = new ArrayList<>(); List<Dataset> datasets = getAllDatasets(txnId); for (Dataset set : datasets) { if (set.getNodeGroupName().equals(nodegroup)) { nodeGroupDatasets.add(set.getDatasetName()); } } return nodeGroupDatasets; }
public void buildIndexEnd(Dataset dataset, boolean firstIndex) { String key = dataset.getDataverseName() + "." + dataset.getDatasetName(); globalRegister.get(key).buildIndexEnd(firstIndex); }
public void buildIndexBegin(Dataset dataset, boolean firstIndex) { String key = dataset.getDataverseName() + "." + dataset.getDatasetName(); ExternalDatasetAccessManager datasetAccessMgr = globalRegister.get(key); if (datasetAccessMgr == null) { globalRegister.putIfAbsent(key, new ExternalDatasetAccessManager()); datasetAccessMgr = globalRegister.get(key); } // aquire the correct lock datasetAccessMgr.buildIndexBegin(firstIndex); }
public void refreshEnd(Dataset dataset, boolean success) { String key = dataset.getDataverseName() + "." + dataset.getDatasetName(); globalRegister.get(key).refreshEnd(success); }
public void refreshBegin(Dataset dataset) { String key = dataset.getDataverseName() + "." + dataset.getDatasetName(); ExternalDatasetAccessManager datasetAccessMgr = globalRegister.get(key); if (datasetAccessMgr == null) { datasetAccessMgr = globalRegister.put(key, new ExternalDatasetAccessManager()); } // aquire the correct lock datasetAccessMgr.refreshBegin(); }
@Override public synchronized void remove(Dataset dataset) throws HyracksDataException { super.remove(dataset); feedConnections.removeIf(o -> o.getDataverseName().equals(dataset.getDataverseName()) && o.getDatasetName().equals(dataset.getDatasetName())); }
private static void dropDatasetFiles(Dataset dataset, MetadataProvider metadataProvider, IHyracksClientConnection hcc) throws Exception { List<JobSpecification> jobs = new ArrayList<>(); List<Index> indexes = metadataProvider.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName()); for (Index index : indexes) { jobs.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, dataset, EnumSet.of(DropOption.IF_EXISTS, DropOption.WAIT_ON_IN_USE), null)); } for (JobSpecification jobSpec : jobs) { JobUtils.runJob(hcc, jobSpec, true); } }
public static FileSplit[] getIndexSplits(IClusterStateManager clusterStateManager, Dataset dataset, String indexName, List<String> nodes) { final String relPath = StoragePathUtil.prepareDataverseIndexName(dataset.getDataverseName(), dataset.getDatasetName(), indexName, dataset.getRebalanceCount()); final List<ClusterPartition> datasetPartitions = getDatasetPartitions(clusterStateManager, dataset, nodes); final List<FileSplit> splits = new ArrayList<>(); for (ClusterPartition partition : datasetPartitions) { File f = new File(StoragePathUtil.prepareStoragePartitionPath(partition.getPartitionId()), relPath); splits.add(StoragePathUtil.getFileSplitForClusterPartition(partition, f.getPath())); } return splits.toArray(new FileSplit[] {}); }
@Override public IDataSourceIndex<String, DataSourceId> findDataSourceIndex(String indexId, DataSourceId dataSourceId) throws AlgebricksException { DataSource source = findDataSource(dataSourceId); Dataset dataset = ((DatasetDataSource) source).getDataset(); Index secondaryIndex = getIndex(dataset.getDataverseName(), dataset.getDatasetName(), indexId); return (secondaryIndex != null) ? new DataSourceIndex(secondaryIndex, dataset.getDataverseName(), dataset.getDatasetName(), this) : null; }
private void confirmDatatypeIsUnusedByDatasets(TxnId txnId, String dataverseName, String datatypeName) throws AlgebricksException, RemoteException { // If any dataset uses this type, throw an error List<Dataset> datasets = getAllDatasets(txnId); for (Dataset set : datasets) { if (set.getItemTypeName().equals(datatypeName) && set.getItemTypeDataverseName().equals(dataverseName)) { throw new AlgebricksException("Cannot drop type " + dataverseName + "." + datatypeName + " being used by dataset " + set.getDataverseName() + "." + set.getDatasetName()); } } }
public static Index getPrimaryIndex(Dataset dataset) { InternalDatasetDetails id = (InternalDatasetDetails) dataset.getDatasetDetails(); return new Index(dataset.getDataverseName(), dataset.getDatasetName(), dataset.getDatasetName(), DatasetConfig.IndexType.BTREE, id.getPartitioningKey(), id.getKeySourceIndicator(), id.getPrimaryKeyType(), false, false, true, dataset.getPendingOp()); }
protected int[] getDatasetPartitions(MetadataProvider metadataProvider) throws AlgebricksException { FileSplit[] splitsForDataset = metadataProvider.splitsForIndex(metadataProvider.getMetadataTxnContext(), this, getDatasetName()); int[] partitions = new int[splitsForDataset.length]; for (int i = 0; i < partitions.length; i++) { partitions[i] = StoragePathUtil.getPartitionNumFromRelativePath(splitsForDataset[i].getPath()); } return partitions; }
@Override public void updateDataset(MetadataTransactionContext ctx, Dataset dataset) throws AlgebricksException { try { metadataNode.updateDataset(ctx.getTxnId(), dataset); } catch (RemoteException e) { throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e); } // reflect the dataset into the cache ctx.dropDataset(dataset.getDataverseName(), dataset.getDatasetName()); ctx.addDataset(dataset); }
private void createIndex() throws Exception { dataset = StorageTestUtils.DATASET; secondaryIndexEntity = new Index(dataset.getDataverseName(), dataset.getDatasetName(), SECONDARY_INDEX_NAME, SECONDARY_INDEX_TYPE, SECONDARY_INDEX_FIELD_NAMES, SECONDARY_INDEX_FIELD_INDICATORS, SECONDARY_INDEX_FIELD_TYPES, false, false, false, 0); primaryIndexInfos = new PrimaryIndexInfo[NUM_PARTITIONS]; secondaryIndexInfo = new SecondaryIndexInfo[NUM_PARTITIONS]; for (int i = 0; i < NUM_PARTITIONS; i++) { primaryIndexInfos[i] = StorageTestUtils.createPrimaryIndex(nc, i); secondaryIndexInfo[i] = nc.createSecondaryIndex(primaryIndexInfos[i], secondaryIndexEntity, StorageTestUtils.STORAGE_MANAGER, i); } }
public static JobSpecification buildDropFilesIndexJobSpec(MetadataProvider metadataProvider, Dataset dataset) throws AlgebricksException { String indexName = IndexingConstants.getFilesIndexName(dataset.getDatasetName()); JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset, indexName); IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory( metadataProvider.getStorageComponentProvider().getStorageManager(), splitsAndConstraint.first); IndexDropOperatorDescriptor btreeDrop = new IndexDropOperatorDescriptor(spec, dataflowHelperFactory); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, btreeDrop, splitsAndConstraint.second); spec.addRoot(btreeDrop); return spec; }
public static Dataset createTransactionDataset(Dataset dataset) { ExternalDatasetDetails originalDsd = (ExternalDatasetDetails) dataset.getDatasetDetails(); ExternalDatasetDetails dsd = new ExternalDatasetDetails(originalDsd.getAdapter(), originalDsd.getProperties(), originalDsd.getTimestamp(), TransactionState.BEGIN); return new Dataset(dataset.getDataverseName(), dataset.getDatasetName(), dataset.getItemTypeDataverseName(), dataset.getItemTypeName(), dataset.getNodeGroupName(), dataset.getCompactionPolicy(), dataset.getCompactionPolicyProperties(), dsd, dataset.getHints(), DatasetType.EXTERNAL, dataset.getDatasetId(), dataset.getPendingOp()); }