public static Pair<ExternalScanOperatorDescriptor, AlgebricksPartitionConstraint> createExternalIndexingOp( JobSpecification spec, MetadataProvider metadataProvider, Dataset dataset, ARecordType itemType, RecordDescriptor indexerDesc, List<ExternalFile> files, SourceLocation sourceLoc) throws HyracksDataException, AlgebricksException { return getIndexingOperator(metadataProvider, spec, itemType, dataset, files == null ? MetadataManager.INSTANCE .getDatasetExternalFiles(metadataProvider.getMetadataTxnContext(), dataset) : files, indexerDesc, sourceLoc); }
private static void dropSourceDataset(Dataset source, MetadataProvider metadataProvider, IHyracksClientConnection hcc) throws Exception { // Drops the source dataset files. No need to lock the dataset entity here because the source dataset has // been detached at this point. dropDatasetFiles(source, metadataProvider, hcc); tryDropDatasetNodegroup(source, metadataProvider); MetadataManager.INSTANCE.commitTransaction(metadataProvider.getMetadataTxnContext()); }
private static void purgeDataset(Dataset dataset, MetadataProvider metadataProvider, IHyracksClientConnection hcc) throws Exception { runWithRetryAfterInterrupt(() -> { // drop dataset files dropDatasetFiles(dataset, metadataProvider, hcc); // drop dataset entry from metadata runMetadataTransaction(metadataProvider, () -> MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataset.getDataverseName(), dataset.getDatasetName())); MetadataManager.INSTANCE.commitTransaction(metadataProvider.getMetadataTxnContext()); // try to drop the dataset's node group runMetadataTransaction(metadataProvider, () -> tryDropDatasetNodegroup(dataset, metadataProvider)); MetadataManager.INSTANCE.commitTransaction(metadataProvider.getMetadataTxnContext()); }); } }
protected int[] getDatasetPartitions(MetadataProvider metadataProvider) throws AlgebricksException { FileSplit[] splitsForDataset = metadataProvider.splitsForIndex(metadataProvider.getMetadataTxnContext(), this, getDatasetName()); int[] partitions = new int[splitsForDataset.length]; for (int i = 0; i < partitions.length; i++) { partitions[i] = StoragePathUtil.getPartitionNumFromRelativePath(splitsForDataset[i].getPath()); } return partitions; }
private static void tryDropDatasetNodegroup(Dataset source, MetadataProvider metadataProvider) throws Exception { ICcApplicationContext appCtx = metadataProvider.getApplicationContext(); String sourceNodeGroup = source.getNodeGroupName(); appCtx.getMetadataLockManager().acquireNodeGroupWriteLock(metadataProvider.getLocks(), sourceNodeGroup); MetadataManager.INSTANCE.dropNodegroup(metadataProvider.getMetadataTxnContext(), sourceNodeGroup, true); }
/*** * Creates a node group that is associated with a new dataset. * * @param dataverseName, * the dataverse name of the dataset. * @param datasetName, * the name of the dataset. * @param rebalanceCount * , the rebalance count of the dataset. * @param ncNames, * the set of node names. * @param metadataProvider, * the metadata provider. * @return the name of the created node group. * @throws Exception */ public static String createNodeGroupForNewDataset(String dataverseName, String datasetName, long rebalanceCount, Set<String> ncNames, MetadataProvider metadataProvider) throws Exception { ICcApplicationContext appCtx = metadataProvider.getApplicationContext(); String nodeGroup = dataverseName + "." + datasetName + (rebalanceCount == 0L ? "" : "_" + rebalanceCount); MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext(); appCtx.getMetadataLockManager().acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroup); NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodeGroup); if (ng != null) { nodeGroup = nodeGroup + "_" + UUID.randomUUID().toString(); appCtx.getMetadataLockManager().acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroup); } MetadataManager.INSTANCE.addNodegroup(mdTxnCtx, new NodeGroup(nodeGroup, new ArrayList<>(ncNames))); return nodeGroup; }
@SuppressWarnings("squid:S00112") protected boolean doCreateDataverseStatement(MetadataTransactionContext mdTxnCtx, MetadataProvider metadataProvider, CreateDataverseStatement stmtCreateDataverse) throws Exception { String dvName = stmtCreateDataverse.getDataverseName().getValue(); Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName); if (dv != null) { if (stmtCreateDataverse.getIfNotExists()) { MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return false; } else { throw new CompilationException(ErrorCode.DATAVERSE_EXISTS, stmtCreateDataverse.getSourceLocation(), dvName); } } MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(), new Dataverse(dvName, stmtCreateDataverse.getFormat(), MetadataUtil.PENDING_NO_OP)); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return true; }
MetadataManager.INSTANCE.getFunction(metadataProvider.getMetadataTxnContext(), signature); if (function == null) { return null; if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_JAVA)) { IFunctionInfo finfo = ExternalFunctionCompilerUtil .getExternalFunctionInfo(metadataProvider.getMetadataTxnContext(), function); f = new ScalarFunctionCallExpression(finfo, args); f.setSourceLocation(sourceLoc);
DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext());
protected Dataverse handleUseDataverseStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception { DataverseDecl dvd = (DataverseDecl) stmt; SourceLocation sourceLoc = dvd.getSourceLocation(); String dvName = dvd.getDataverseName().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); lockManager.acquireDataverseReadLock(metadataProvider.getLocks(), dvName); try { Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName); if (dv == null) { throw new MetadataException(ErrorCode.UNKNOWN_DATAVERSE, sourceLoc, dvName); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return dv; } catch (Exception e) { abort(e, e, mdTxnCtx); throw new MetadataException(ErrorCode.METADATA_ERROR, e, sourceLoc, e.toString()); } finally { metadataProvider.getLocks().unlock(); } }
private static void rebalanceSwitch(Dataset source, Dataset target, MetadataProvider metadataProvider) throws AlgebricksException, RemoteException { MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext(); // upgrade lock ICcApplicationContext appCtx = metadataProvider.getApplicationContext(); ActiveNotificationHandler activeNotificationHandler = (ActiveNotificationHandler) appCtx.getActiveNotificationHandler(); IMetadataLockManager lockManager = appCtx.getMetadataLockManager(); lockManager.upgradeDatasetLockToWrite(metadataProvider.getLocks(), DatasetUtil.getFullyQualifiedName(source)); LOGGER.info("Updating dataset {} node group from {} to {}", source.getDatasetName(), source.getNodeGroupName(), target.getNodeGroupName()); try { // Updates the dataset entry in the metadata storage MetadataManager.INSTANCE.updateDataset(mdTxnCtx, target); for (IActiveEntityEventsListener listener : activeNotificationHandler.getEventListeners()) { if (listener instanceof IActiveEntityController) { IActiveEntityController controller = (IActiveEntityController) listener; controller.replace(target); } } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); LOGGER.info("dataset {} node group updated to {}", target.getDatasetName(), target.getNodeGroupName()); } finally { lockManager.downgradeDatasetLockToExclusiveModify(metadataProvider.getLocks(), DatasetUtil.getFullyQualifiedName(target)); } }
numElementsHint = metadataProvider.getCardinalityPerPartitionHint(dataset); Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext()); mergePolicyFactory = compactionInfo.first; mergePolicyProperties = compactionInfo.second;
protected void doDropFeed(IHyracksClientConnection hcc, MetadataProvider metadataProvider, Feed feed, SourceLocation sourceLoc) throws Exception { MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext(); EntityId feedId = feed.getFeedId(); ActiveNotificationHandler activeNotificationHandler = (ActiveNotificationHandler) appCtx.getActiveNotificationHandler(); ActiveEntityEventsListener listener = (ActiveEntityEventsListener) activeNotificationHandler.getListener(feedId); if (listener != null && listener.getState() != ActivityState.STOPPED) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "Feed " + feedId + " is currently active and connected to the following dataset(s) \n" + listener.toString()); } else if (listener != null) { listener.unregister(); } JobSpecification spec = FeedOperations.buildRemoveFeedStorageJob(metadataProvider, MetadataManager.INSTANCE.getFeed(mdTxnCtx, feedId.getDataverse(), feedId.getEntityName())); runJob(hcc, spec); MetadataManager.INSTANCE.dropFeed(mdTxnCtx, feed.getDataverseName(), feed.getFeedName()); if (LOGGER.isInfoEnabled()) { LOGGER.info("Removed feed " + feedId); } }
try { Feed feed = MetadataManager.INSTANCE.getFeed(metadataProvider.getMetadataTxnContext(), dataverseName, feedName); if (feed != null) { if (cfs.getIfNotExists()) { MetadataManager.INSTANCE.addFeed(metadataProvider.getMetadataTxnContext(), feed); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) {
MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dataverseName); jobsToExecute.add(DatasetUtil.compactDatasetJobSpec(dataverse, datasetName, metadataProvider));
IAType itemType = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), externalDataset.getItemTypeDataverseName(), itemTypeName).getDatatype(); String dataverseName = id.getDataverseName(); String datasetName = id.getDatasourceName(); Index primaryIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, datasetName);
public static JobSpecification buildFilesIndexCreateJobSpec(Dataset dataset, List<ExternalFile> externalFilesSnapshot, MetadataProvider metadataProvider) throws AlgebricksException { IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider(); JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext()); ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first; Map<String, String> mergePolicyProperties = compactionInfo.second; Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider .getSplitProviderAndConstraints(dataset, IndexingConstants.getFilesIndexName(dataset.getDatasetName())); IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first; String fileIndexName = IndexingConstants.getFilesIndexName(dataset.getDatasetName()); Index fileIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataset.getDataverseName(), dataset.getDatasetName(), fileIndexName); ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); IResourceFactory resourceFactory = dataset.getResourceFactory(metadataProvider, fileIndex, recordType, null, mergePolicyFactory, mergePolicyProperties); IIndexBuilderFactory indexBuilderFactory = new IndexBuilderFactory(storageComponentProvider.getStorageManager(), secondaryFileSplitProvider, resourceFactory, true); IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory( storageComponentProvider.getStorageManager(), secondaryFileSplitProvider); ExternalFilesIndexCreateOperatorDescriptor externalFilesOp = new ExternalFilesIndexCreateOperatorDescriptor( spec, indexBuilderFactory, dataflowHelperFactory, externalFilesSnapshot); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, externalFilesOp, secondarySplitsAndConstraint.second); spec.addRoot(externalFilesOp); spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy()); return spec; }
IAObject simThresh = ((AsterixConstantValue) similarityThreshold).getObject(); int numPrimaryKeys = dataset.getPrimaryKeys().size(); Index secondaryIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataset.getDataverseName(), dataset.getDatasetName(), indexName); if (secondaryIndex == null) {
(ARecordType) MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), dataset.getDataverseName(), dataset.getItemTypeName()).getDatatype(); ExternalDatasetDetails datasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
FeedConnection fc = MetadataManager.INSTANCE.getFeedConnection(metadataProvider.getMetadataTxnContext(), dataverseName, feedName, datasetName); Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);