private static void dropSourceDataset(Dataset source, MetadataProvider metadataProvider, IHyracksClientConnection hcc) throws Exception { // Drops the source dataset files. No need to lock the dataset entity here because the source dataset has // been detached at this point. dropDatasetFiles(source, metadataProvider, hcc); tryDropDatasetNodegroup(source, metadataProvider); MetadataManager.INSTANCE.commitTransaction(metadataProvider.getMetadataTxnContext()); }
private List<Dataset> getAllDatasetsForRebalance(String dataverseName) throws Exception { List<Dataset> datasets; MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); try { datasets = getDatasetsInDataverseForRebalance(dataverseName, mdTxnCtx); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { MetadataManager.INSTANCE.abortTransaction(mdTxnCtx); throw e; } return datasets; }
private List<Dataset> getAllDatasetsForRebalance() throws Exception { List<Dataset> datasets = new ArrayList<>(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); try { List<Dataverse> dataverses = MetadataManager.INSTANCE.getDataverses(mdTxnCtx); for (Dataverse dv : dataverses) { datasets.addAll(getDatasetsInDataverseForRebalance(dv.getDataverseName(), mdTxnCtx)); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { MetadataManager.INSTANCE.abortTransaction(mdTxnCtx); throw e; } return datasets; }
/** * Perform recovery of DDL operations metadata records */ public static void startDDLRecovery() throws AlgebricksException { // #. clean up any record which has pendingAdd/DelOp flag // as traversing all records from DATAVERSE_DATASET to DATASET_DATASET, and then // to INDEX_DATASET. MetadataTransactionContext mdTxnCtx = null; LOGGER.info("Starting DDL recovery ..."); try { mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); List<Dataverse> dataverses = MetadataManager.INSTANCE.getDataverses(mdTxnCtx); for (Dataverse dataverse : dataverses) { recoverDataverse(mdTxnCtx, dataverse); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); LOGGER.info("Completed DDL recovery."); } catch (Exception e) { try { LOGGER.error("Failure during DDL recovery", e); MetadataManager.INSTANCE.abortTransaction(mdTxnCtx); } catch (Exception e2) { e.addSuppressed(e2); } throw MetadataException.create(e); } }
protected void recover(ICcApplicationContext appCtx) throws HyracksDataException { try { LOGGER.info("Starting Global Recovery"); MetadataManager.INSTANCE.init(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); mdTxnCtx = doRecovery(appCtx, mdTxnCtx); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); recoveryCompleted = true; recovering = false; LOGGER.info("Global Recovery Completed. Refreshing cluster state..."); appCtx.getClusterStateManager().refreshState(); } catch (Exception e) { throw HyracksDataException.create(e); } }
private static void purgeDataset(Dataset dataset, MetadataProvider metadataProvider, IHyracksClientConnection hcc) throws Exception { runWithRetryAfterInterrupt(() -> { // drop dataset files dropDatasetFiles(dataset, metadataProvider, hcc); // drop dataset entry from metadata runMetadataTransaction(metadataProvider, () -> MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataset.getDataverseName(), dataset.getDatasetName())); MetadataManager.INSTANCE.commitTransaction(metadataProvider.getMetadataTxnContext()); // try to drop the dataset's node group runMetadataTransaction(metadataProvider, () -> tryDropDatasetNodegroup(dataset, metadataProvider)); MetadataManager.INSTANCE.commitTransaction(metadataProvider.getMetadataTxnContext()); }); } }
@SuppressWarnings("squid:S00112") protected boolean doCreateDataverseStatement(MetadataTransactionContext mdTxnCtx, MetadataProvider metadataProvider, CreateDataverseStatement stmtCreateDataverse) throws Exception { String dvName = stmtCreateDataverse.getDataverseName().getValue(); Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName); if (dv != null) { if (stmtCreateDataverse.getIfNotExists()) { MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return false; } else { throw new CompilationException(ErrorCode.DATAVERSE_EXISTS, stmtCreateDataverse.getSourceLocation(), dvName); } } MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(), new Dataverse(dvName, stmtCreateDataverse.getFormat(), MetadataUtil.PENDING_NO_OP)); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return true; }
protected void handleDropFeedPolicyStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception { MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); FeedPolicyDropStatement stmtFeedPolicyDrop = (FeedPolicyDropStatement) stmt; SourceLocation sourceLoc = stmtFeedPolicyDrop.getSourceLocation(); String dataverseName = getActiveDataverse(stmtFeedPolicyDrop.getDataverseName()); String policyName = stmtFeedPolicyDrop.getPolicyName().getValue(); MetadataLockUtil.dropFeedPolicyBegin(lockManager, metadataProvider.getLocks(), dataverseName, dataverseName + "." + policyName); try { FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(mdTxnCtx, dataverseName, policyName); if (feedPolicy == null) { if (!stmtFeedPolicyDrop.getIfExists()) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "Unknown policy " + policyName + " in dataverse " + dataverseName); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return; } MetadataManager.INSTANCE.dropFeedPolicy(mdTxnCtx, dataverseName, policyName); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { abort(e, e, mdTxnCtx); throw e; } finally { metadataProvider.getLocks().unlock(); } }
protected void handleDropFeedStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception { FeedDropStatement stmtFeedDrop = (FeedDropStatement) stmt; SourceLocation sourceLoc = stmtFeedDrop.getSourceLocation(); String dataverseName = getActiveDataverse(stmtFeedDrop.getDataverseName()); String feedName = stmtFeedDrop.getFeedName().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); MetadataLockUtil.dropFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName, dataverseName + "." + feedName); try { Feed feed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, feedName); if (feed == null) { if (!stmtFeedDrop.getIfExists()) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "There is no feed with this name " + feedName + "."); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return; } doDropFeed(hcc, metadataProvider, feed, sourceLoc); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { abort(e, e, mdTxnCtx); throw e; } finally { metadataProvider.getLocks().unlock(); } }
protected Dataverse handleUseDataverseStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception { DataverseDecl dvd = (DataverseDecl) stmt; SourceLocation sourceLoc = dvd.getSourceLocation(); String dvName = dvd.getDataverseName().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); lockManager.acquireDataverseReadLock(metadataProvider.getLocks(), dvName); try { Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName); if (dv == null) { throw new MetadataException(ErrorCode.UNKNOWN_DATAVERSE, sourceLoc, dvName); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return dv; } catch (Exception e) { abort(e, e, mdTxnCtx); throw new MetadataException(ErrorCode.METADATA_ERROR, e, sourceLoc, e.toString()); } finally { metadataProvider.getLocks().unlock(); } }
private static void rebalanceSwitch(Dataset source, Dataset target, MetadataProvider metadataProvider) throws AlgebricksException, RemoteException { MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext(); // upgrade lock ICcApplicationContext appCtx = metadataProvider.getApplicationContext(); ActiveNotificationHandler activeNotificationHandler = (ActiveNotificationHandler) appCtx.getActiveNotificationHandler(); IMetadataLockManager lockManager = appCtx.getMetadataLockManager(); lockManager.upgradeDatasetLockToWrite(metadataProvider.getLocks(), DatasetUtil.getFullyQualifiedName(source)); LOGGER.info("Updating dataset {} node group from {} to {}", source.getDatasetName(), source.getNodeGroupName(), target.getNodeGroupName()); try { // Updates the dataset entry in the metadata storage MetadataManager.INSTANCE.updateDataset(mdTxnCtx, target); for (IActiveEntityEventsListener listener : activeNotificationHandler.getEventListeners()) { if (listener instanceof IActiveEntityController) { IActiveEntityController controller = (IActiveEntityController) listener; controller.replace(target); } } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); LOGGER.info("dataset {} node group updated to {}", target.getDatasetName(), target.getNodeGroupName()); } finally { lockManager.downgradeDatasetLockToExclusiveModify(metadataProvider.getLocks(), DatasetUtil.getFullyQualifiedName(target)); } }
/** * Gets the file splits of {@code dataset} * * @param integrationUtil * @param dataset * @return the file splits of the dataset * @throws RemoteException * @throws AlgebricksException */ public static FileSplit[] getDatasetSplits(AsterixHyracksIntegrationUtil integrationUtil, Dataset dataset) throws RemoteException, AlgebricksException { final ICcApplicationContext ccAppCtx = (ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext(); final MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); try { return SplitsAndConstraintsUtil.getIndexSplits(dataset, dataset.getDatasetName(), mdTxnCtx, ccAppCtx.getClusterStateManager()); } finally { MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } }
protected void handleNodegroupDropStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception { NodeGroupDropStatement stmtDelete = (NodeGroupDropStatement) stmt; SourceLocation sourceLoc = stmtDelete.getSourceLocation(); String nodegroupName = stmtDelete.getNodeGroupName().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); lockManager.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodegroupName); try { NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodegroupName); if (ng == null) { if (!stmtDelete.getIfExists()) { throw new CompilationException(ErrorCode.UNKNOWN_NODEGROUP, sourceLoc, nodegroupName); } } else { MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodegroupName, false); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { abort(e, e, mdTxnCtx); throw e; } finally { metadataProvider.getLocks().unlock(); } }
@Test public void abortMetadataTxn() throws Exception { ICcApplicationContext appCtx = (ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext(); final MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); final MetadataTransactionContext mdTxn = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxn); final String nodeGroupName = "ng"; try { final List<String> ngNodes = Arrays.asList("asterix_nc1"); MetadataManager.INSTANCE.addNodegroup(mdTxn, new NodeGroup(nodeGroupName, ngNodes)); MetadataManager.INSTANCE.abortTransaction(mdTxn); } finally { metadataProvider.getLocks().unlock(); } // ensure that the node group was not added final MetadataTransactionContext readMdTxn = MetadataManager.INSTANCE.beginTransaction(); try { final NodeGroup nodegroup = MetadataManager.INSTANCE.getNodegroup(readMdTxn, nodeGroupName); if (nodegroup != null) { throw new AssertionError("nodegroup was found after metadata txn was aborted"); } } finally { MetadataManager.INSTANCE.commitTransaction(readMdTxn); } }
protected void handleTypeDropStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception { TypeDropStatement stmtTypeDrop = (TypeDropStatement) stmt; SourceLocation sourceLoc = stmtTypeDrop.getSourceLocation(); String dataverseName = getActiveDataverse(stmtTypeDrop.getDataverseName()); String typeName = stmtTypeDrop.getTypeName().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); MetadataLockUtil.dropTypeBegin(lockManager, metadataProvider.getLocks(), dataverseName, dataverseName + "." + typeName); try { Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName); if (dt == null) { if (!stmtTypeDrop.getIfExists()) { throw new CompilationException(ErrorCode.UNKNOWN_TYPE, sourceLoc, typeName); } } else { MetadataManager.INSTANCE.dropDatatype(mdTxnCtx, dataverseName, typeName); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { abort(e, e, mdTxnCtx); throw e; } finally { metadataProvider.getLocks().unlock(); } }
/** * Gets the reference of dataset {@code dataset} from metadata * * @param integrationUtil * @param datasetName * @return the dataset reference if found. Otherwise null. * @throws AlgebricksException * @throws RemoteException */ public static Dataset getDataset(AsterixHyracksIntegrationUtil integrationUtil, String datasetName) throws AlgebricksException, RemoteException { final ICcApplicationContext appCtx = (ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext(); final MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); final MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); Dataset dataset; try { dataset = metadataProvider.findDataset(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName); } finally { MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); metadataProvider.getLocks().unlock(); } return dataset; }
private ARecordType getMetadataRecordType(String dataverseName, String datasetName) throws Exception { MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); // Retrieves file splits of the dataset. MetadataProvider metadataProvider = new MetadataProvider( (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), null); try { metadataProvider.setMetadataTxnContext(mdTxnCtx); Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); // Metadata transaction commits. MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return recordType; } finally { metadataProvider.getLocks().unlock(); } } }
public SecondaryIndexInfo createSecondaryIndex(PrimaryIndexInfo primaryIndexInfo, Index secondaryIndex, IStorageComponentProvider storageComponentProvider, int partition) throws AlgebricksException, HyracksDataException, RemoteException, ACIDException { MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); org.apache.hyracks.algebricks.common.utils.Pair<ILSMMergePolicyFactory, Map<String, String>> mergePolicy = DatasetUtil.getMergePolicyFactory(primaryIndexInfo.dataset, mdTxnCtx); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); Dataverse dataverse = new Dataverse(primaryIndexInfo.dataset.getDataverseName(), NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP); MetadataProvider mdProvider = new MetadataProvider( (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), dataverse); SecondaryIndexInfo secondaryIndexInfo = new SecondaryIndexInfo(primaryIndexInfo, secondaryIndex); try { IResourceFactory resourceFactory = primaryIndexInfo.dataset.getResourceFactory(mdProvider, secondaryIndex, primaryIndexInfo.recordType, primaryIndexInfo.metaType, mergePolicy.first, mergePolicy.second); IndexBuilderFactory indexBuilderFactory = new IndexBuilderFactory(storageComponentProvider.getStorageManager(), secondaryIndexInfo.fileSplitProvider, resourceFactory, true); IHyracksTaskContext ctx = createTestContext(newJobId(), partition, false); IIndexBuilder indexBuilder = indexBuilderFactory.create(ctx, partition); indexBuilder.build(); } finally { mdProvider.getLocks().unlock(); } return secondaryIndexInfo; }
public PrimaryIndexInfo createPrimaryIndex(Dataset dataset, IAType[] primaryKeyTypes, ARecordType recordType, ARecordType metaType, int[] filterFields, IStorageComponentProvider storageComponentProvider, int[] primaryKeyIndexes, List<Integer> primaryKeyIndicators, int partition) throws AlgebricksException, HyracksDataException, RemoteException, ACIDException { MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); org.apache.hyracks.algebricks.common.utils.Pair<ILSMMergePolicyFactory, Map<String, String>> mergePolicy = DatasetUtil.getMergePolicyFactory(dataset, mdTxnCtx); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); PrimaryIndexInfo primaryIndexInfo = new PrimaryIndexInfo(dataset, primaryKeyTypes, recordType, metaType, mergePolicy.first, mergePolicy.second, filterFields, primaryKeyIndexes, primaryKeyIndicators); Dataverse dataverse = new Dataverse(dataset.getDataverseName(), NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP); MetadataProvider mdProvider = new MetadataProvider( (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), dataverse); try { IResourceFactory resourceFactory = dataset.getResourceFactory(mdProvider, primaryIndexInfo.index, recordType, metaType, mergePolicy.first, mergePolicy.second); IndexBuilderFactory indexBuilderFactory = new IndexBuilderFactory(storageComponentProvider.getStorageManager(), primaryIndexInfo.getFileSplitProvider(), resourceFactory, true); IHyracksTaskContext ctx = createTestContext(newJobId(), partition, false); IIndexBuilder indexBuilder = indexBuilderFactory.create(ctx, partition); indexBuilder.build(); } finally { mdProvider.getLocks().unlock(); } return primaryIndexInfo; }
private void addDataset(ICcApplicationContext appCtx, Dataset source, int datasetPostfix, boolean abort) throws Exception { Dataset dataset = new Dataset(source.getDataverseName(), "ds_" + datasetPostfix, source.getDataverseName(), source.getDatasetType().name(), source.getNodeGroupName(), NoMergePolicyFactory.NAME, null, source.getDatasetDetails(), source.getHints(), DatasetConfig.DatasetType.INTERNAL, datasetPostfix, 0); MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); final MetadataTransactionContext writeTxn = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(writeTxn); try { MetadataManager.INSTANCE.addDataset(writeTxn, dataset); if (abort) { MetadataManager.INSTANCE.abortTransaction(writeTxn); } else { MetadataManager.INSTANCE.commitTransaction(writeTxn); } } finally { metadataProvider.getLocks().unlock(); } } }