private static List<ClusterPartition> getDatasetPartitions(IClusterStateManager clusterStateManager, Dataset dataset, List<String> nodes) { if (dataset.getNodeGroupName().compareTo(MetadataConstants.METADATA_NODEGROUP_NAME) == 0) { return Collections.singletonList(clusterStateManager.getMetadataPartition()); } final List<ClusterPartition> datasetPartitions = new ArrayList<>(); for (String node : nodes) { final ClusterPartition[] nodePartitions = clusterStateManager.getNodePartitions(node); datasetPartitions.addAll(Arrays.asList(nodePartitions)); } return datasetPartitions; } }
private List<String> getDatasetNamesPartitionedOnThisNodeGroup(TxnId txnId, String nodegroup) throws AlgebricksException, RemoteException { // this needs to scan the datasets and return the datasets that use this // nodegroup List<String> nodeGroupDatasets = new ArrayList<>(); List<Dataset> datasets = getAllDatasets(txnId); for (Dataset set : datasets) { if (set.getNodeGroupName().equals(nodegroup)) { nodeGroupDatasets.add(set.getDatasetName()); } } return nodeGroupDatasets; }
public static FileSplit[] getIndexSplits(Dataset dataset, String indexName, MetadataTransactionContext mdTxnCtx, IClusterStateManager csm) throws AlgebricksException { try { NodeGroup nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, dataset.getNodeGroupName()); if (nodeGroup == null) { throw new AlgebricksException("Couldn't find node group " + dataset.getNodeGroupName()); } List<String> nodeList = nodeGroup.getNodeNames(); return getIndexSplits(csm, dataset, indexName, nodeList); } catch (MetadataException me) { throw new AlgebricksException(me); } }
/** * Calculate an estimate size of the bloom filter. Note that this is an * estimation which assumes that the data is going to be uniformly distributed * across all partitions. * * @param dataset * @return Number of elements that will be used to create a bloom filter per * dataset per partition * @throws AlgebricksException */ public long getCardinalityPerPartitionHint(Dataset dataset) throws AlgebricksException { String numElementsHintString = dataset.getHints().get(DatasetCardinalityHint.NAME); long numElementsHint; if (numElementsHintString == null) { numElementsHint = DatasetCardinalityHint.DEFAULT; } else { numElementsHint = Long.parseLong(numElementsHintString); } int numPartitions = 0; List<String> nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, dataset.getNodeGroupName()).getNodeNames(); IClusterStateManager csm = appCtx.getClusterStateManager(); for (String nd : nodeGroup) { numPartitions += csm.getNodePartitionsCount(nd); } return numElementsHint / numPartitions; }
private static void tryDropDatasetNodegroup(Dataset source, MetadataProvider metadataProvider) throws Exception { ICcApplicationContext appCtx = metadataProvider.getApplicationContext(); String sourceNodeGroup = source.getNodeGroupName(); appCtx.getMetadataLockManager().acquireNodeGroupWriteLock(metadataProvider.getLocks(), sourceNodeGroup); MetadataManager.INSTANCE.dropNodegroup(metadataProvider.getMetadataTxnContext(), sourceNodeGroup, true); }
private static void rebalanceSwitch(Dataset source, Dataset target, MetadataProvider metadataProvider) throws AlgebricksException, RemoteException { MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext(); // upgrade lock ICcApplicationContext appCtx = metadataProvider.getApplicationContext(); ActiveNotificationHandler activeNotificationHandler = (ActiveNotificationHandler) appCtx.getActiveNotificationHandler(); IMetadataLockManager lockManager = appCtx.getMetadataLockManager(); lockManager.upgradeDatasetLockToWrite(metadataProvider.getLocks(), DatasetUtil.getFullyQualifiedName(source)); LOGGER.info("Updating dataset {} node group from {} to {}", source.getDatasetName(), source.getNodeGroupName(), target.getNodeGroupName()); try { // Updates the dataset entry in the metadata storage MetadataManager.INSTANCE.updateDataset(mdTxnCtx, target); for (IActiveEntityEventsListener listener : activeNotificationHandler.getEventListeners()) { if (listener instanceof IActiveEntityController) { IActiveEntityController controller = (IActiveEntityController) listener; controller.replace(target); } } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); LOGGER.info("dataset {} node group updated to {}", target.getDatasetName(), target.getNodeGroupName()); } finally { lockManager.downgradeDatasetLockToExclusiveModify(metadataProvider.getLocks(), DatasetUtil.getFullyQualifiedName(target)); } }
Set<String> sourceNodes = new HashSet<>(metadataProvider.findNodes(sourceDataset.getNodeGroupName())); sourceDataset.getDatasetName(), sourceDataset.getNodeGroupName(), sourceNodes, targetDataset.getNodeGroupName(), targetNcNames);
public static Dataset createTransactionDataset(Dataset dataset) { ExternalDatasetDetails originalDsd = (ExternalDatasetDetails) dataset.getDatasetDetails(); ExternalDatasetDetails dsd = new ExternalDatasetDetails(originalDsd.getAdapter(), originalDsd.getProperties(), originalDsd.getTimestamp(), TransactionState.BEGIN); return new Dataset(dataset.getDataverseName(), dataset.getDatasetName(), dataset.getItemTypeDataverseName(), dataset.getItemTypeName(), dataset.getNodeGroupName(), dataset.getCompactionPolicy(), dataset.getCompactionPolicyProperties(), dsd, dataset.getHints(), DatasetType.EXTERNAL, dataset.getDatasetId(), dataset.getPendingOp()); }
public static DataSource lookupSourceInMetadata(IClusterStateManager clusterStateManager, MetadataTransactionContext mdTxnCtx, DataSourceId aqlId) throws AlgebricksException { Dataset dataset = findDataset(mdTxnCtx, aqlId.getDataverseName(), aqlId.getDatasourceName()); if (dataset == null) { throw new AlgebricksException("Datasource with id " + aqlId + " was not found."); } IAType itemType = findType(mdTxnCtx, dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); IAType metaItemType = findType(mdTxnCtx, dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName()); INodeDomain domain = findNodeDomain(clusterStateManager, mdTxnCtx, dataset.getNodeGroupName()); byte datasourceType = dataset.getDatasetType().equals(DatasetType.EXTERNAL) ? DataSource.Type.EXTERNAL_DATASET : DataSource.Type.INTERNAL_DATASET; return new DatasetDataSource(aqlId, dataset, itemType, metaItemType, datasourceType, dataset.getDatasetDetails(), domain); } }
private DatasetDataSource validateDatasetInfo(MetadataProvider metadataProvider, String dataverseName, String datasetName, SourceLocation sourceLoc) throws AlgebricksException { Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); if (dataset == null) { throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName, dataverseName); } if (dataset.getDatasetType() == DatasetType.EXTERNAL) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "Cannot write output to an external dataset."); } DataSourceId sourceId = new DataSourceId(dataverseName, datasetName); IAType itemType = metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); IAType metaItemType = metadataProvider.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName()); INodeDomain domain = metadataProvider.findNodeDomain(dataset.getNodeGroupName()); return new DatasetDataSource(sourceId, dataset, itemType, metaItemType, DataSource.Type.INTERNAL_DATASET, dataset.getDatasetDetails(), domain); }
MetadataManager.INSTANCE.addDataset(mdTxnCtx.getValue(), new Dataset(dataverseName, datasetName, getItemTypeDataverseName(), getItemTypeName(), getMetaItemTypeDataverseName(), getMetaItemTypeName(), getNodeGroupName(), getCompactionPolicy(), getCompactionPolicyProperties(), getDatasetDetails(), getHints(), getDatasetType(), getDatasetId(), MetadataUtil.PENDING_DROP_OP, getCompressionScheme())); MetadataManager.INSTANCE.addDataset(mdTxnCtx.getValue(), new Dataset(dataverseName, datasetName, getItemTypeDataverseName(), getItemTypeName(), getNodeGroupName(), getCompactionPolicy(), getCompactionPolicyProperties(), getDatasetDetails(), getHints(), getDatasetType(), getDatasetId(), MetadataUtil.PENDING_DROP_OP));
String nodeGroup = dataset.getNodeGroupName(); lockManager.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroup); if (MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodeGroup) != null) {
IDataSourceIndex<String, DataSourceId> dsi = mp.findDataSourceIndex(jobGenParams.getIndexName(), dataSourceId); INodeDomain storageDomain = mp.findNodeDomain(dataset.getNodeGroupName()); if (dsi == null) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, op.getSourceLocation(),
private void addDataset(ICcApplicationContext appCtx, Dataset source, int datasetPostfix, boolean abort) throws Exception { Dataset dataset = new Dataset(source.getDataverseName(), "ds_" + datasetPostfix, source.getDataverseName(), source.getDatasetType().name(), source.getNodeGroupName(), NoMergePolicyFactory.NAME, null, source.getDatasetDetails(), source.getHints(), DatasetConfig.DatasetType.INTERNAL, datasetPostfix, 0); MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); final MetadataTransactionContext writeTxn = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(writeTxn); try { MetadataManager.INSTANCE.addDataset(writeTxn, dataset); if (abort) { MetadataManager.INSTANCE.abortTransaction(writeTxn); } else { MetadataManager.INSTANCE.commitTransaction(writeTxn); } } finally { metadataProvider.getLocks().unlock(); } } }
aString.setValue(dataset.getNodeGroupName()); stringSerde.serialize(aString, fieldValue.getDataOutput()); recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_GROUPNAME_FIELD_INDEX, fieldValue);