public String getDefaultDataverseName() { return defaultDataverse.getDataverseName(); }
@Override public boolean equals(Object o) { if (!(o instanceof Dataverse)) { return false; } Dataverse other = (Dataverse) o; return dataverseName.equals(other.getDataverseName()); }
@Override public String getActiveDataverseName(String dataverse) { return (dataverse != null && !dataverse.isEmpty()) ? dataverse : activeDataverse.getDataverseName(); }
public Dataverse addDataverseIfNotExists(Dataverse dataverse) { synchronized (dataverses) { synchronized (datasets) { synchronized (datatypes) { if (!dataverses.containsKey(dataverse)) { datasets.put(dataverse.getDataverseName(), new HashMap<String, Dataset>()); datatypes.put(dataverse.getDataverseName(), new HashMap<String, Datatype>()); adapters.put(dataverse.getDataverseName(), new HashMap<String, DatasourceAdapter>()); return dataverses.put(dataverse.getDataverseName(), dataverse); } return null; } } } }
synchronized (feeds) { synchronized (compactionPolicies) { datasets.remove(dataverse.getDataverseName()); indexes.remove(dataverse.getDataverseName()); datatypes.remove(dataverse.getDataverseName()); adapters.remove(dataverse.getDataverseName()); compactionPolicies.remove(dataverse.getDataverseName()); List<FunctionSignature> markedFunctionsForRemoval = new ArrayList<>(); for (FunctionSignature signature : functions.keySet()) { if (signature.getNamespace().equals(dataverse.getDataverseName())) { markedFunctionsForRemoval.add(signature); functions.remove(signature); libraries.remove(dataverse.getDataverseName()); feeds.remove(dataverse.getDataverseName()); return dataverses.remove(dataverse.getDataverseName());
public static Pair<String, String> getDatasetInfo(MetadataProvider metadata, String datasetArg) { String first; String second; int i = datasetArg.indexOf('.'); if (i > 0 && i < datasetArg.length() - 1) { first = datasetArg.substring(0, i); second = datasetArg.substring(i + 1); } else { first = metadata.getDefaultDataverse() == null ? null : metadata.getDefaultDataverse().getDataverseName(); second = datasetArg; } return new Pair<>(first, second); } }
private static void recoverDataverse(MetadataTransactionContext mdTxnCtx, Dataverse dataverse) throws AlgebricksException { if (dataverse.getPendingOp() != MetadataUtil.PENDING_NO_OP) { // drop pending dataverse MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverse.getDataverseName()); if (LOGGER.isInfoEnabled()) { LOGGER.info("Dropped a pending dataverse: " + dataverse.getDataverseName()); } } else { List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverse.getDataverseName()); for (Dataset dataset : datasets) { recoverDataset(mdTxnCtx, dataset); } } }
protected boolean isFunctionUsed(MetadataTransactionContext ctx, FunctionSignature signature, String currentDataverse) throws AlgebricksException { List<Dataverse> allDataverses = MetadataManager.INSTANCE.getDataverses(ctx); for (Dataverse dataverse : allDataverses) { if (dataverse.getDataverseName().equals(currentDataverse)) { continue; } List<Feed> feeds = MetadataManager.INSTANCE.getFeeds(ctx, dataverse.getDataverseName()); for (Feed feed : feeds) { List<FeedConnection> feedConnections = MetadataManager.INSTANCE.getFeedConections(ctx, dataverse.getDataverseName(), feed.getFeedName()); for (FeedConnection conn : feedConnections) { if (conn.containsFunction(signature)) { return true; } } } } return false; }
protected MetadataTransactionContext doRecovery(ICcApplicationContext appCtx, MetadataTransactionContext mdTxnCtx) throws Exception { // Loop over datasets for (Dataverse dataverse : MetadataManager.INSTANCE.getDataverses(mdTxnCtx)) { mdTxnCtx = recoverDatasets(appCtx, mdTxnCtx, dataverse); // Fixes ASTERIXDB-2386 by caching the dataverse during recovery MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse.getDataverseName()); } return mdTxnCtx; }
private Pair<String, String> parseDatasetReference(MetadataProvider metadataProvider, String datasetArg) throws AlgebricksException { String[] datasetNameComponents = datasetArg.split("\\."); String dataverseName; String datasetName; if (datasetNameComponents.length == 1) { Dataverse defaultDataverse = metadataProvider.getDefaultDataverse(); if (defaultDataverse == null) { throw new AlgebricksException("Unresolved dataset " + datasetArg + " Dataverse not specified."); } dataverseName = defaultDataverse.getDataverseName(); datasetName = datasetNameComponents[0]; } else { dataverseName = datasetNameComponents[0]; datasetName = datasetNameComponents[1]; } return new Pair<>(dataverseName, datasetName); }
private List<Dataset> getAllDatasetsForRebalance() throws Exception { List<Dataset> datasets = new ArrayList<>(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); try { List<Dataverse> dataverses = MetadataManager.INSTANCE.getDataverses(mdTxnCtx); for (Dataverse dv : dataverses) { datasets.addAll(getDatasetsInDataverseForRebalance(dv.getDataverseName(), mdTxnCtx)); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { MetadataManager.INSTANCE.abortTransaction(mdTxnCtx); throw e; } return datasets; }
public Dataset findDataset(String dataverse, String dataset) throws AlgebricksException { String dv = dataverse == null ? (defaultDataverse == null ? null : defaultDataverse.getDataverseName()) : dataverse; if (dv == null) { return null; } String fqName = dv + '.' + dataset; appCtx.getMetadataLockManager().acquireDataverseReadLock(locks, dv); appCtx.getMetadataLockManager().acquireDatasetReadLock(locks, fqName); return MetadataManagerUtil.findDataset(mdTxnCtx, dv, dataset); }
@Override public void addDataverse(TxnId txnId, Dataverse dataverse) throws AlgebricksException, RemoteException { try { DataverseTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataverseTupleTranslator(true); ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(dataverse); insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.DATAVERSE_DATASET, tuple); } catch (HyracksDataException e) { if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) { throw new AlgebricksException( "A dataverse with this name " + dataverse.getDataverseName() + " already exists.", e); } else { throw new AlgebricksException(e); } } }
public static JobSpecification dropDataverseJobSpec(Dataverse dataverse, MetadataProvider metadata) { JobSpecification jobSpec = RuntimeUtils.createJobSpecification(metadata.getApplicationContext()); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata.splitAndConstraints(dataverse.getDataverseName()); FileRemoveOperatorDescriptor frod = new FileRemoveOperatorDescriptor(jobSpec, splitsAndConstraint.first, false); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(jobSpec, frod, splitsAndConstraint.second); jobSpec.addRoot(frod); return jobSpec; } }
public static JobSpecification compactDatasetJobSpec(Dataverse dataverse, String datasetName, MetadataProvider metadataProvider) throws AlgebricksException { String dataverseName = dataverse.getDataverseName(); Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); if (dataset == null) { throw new AsterixException("Could not find dataset " + datasetName + " in dataverse " + dataverseName); } JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset); IIndexDataflowHelperFactory indexHelperFactory = new IndexDataflowHelperFactory( metadataProvider.getStorageComponentProvider().getStorageManager(), splitsAndConstraint.first); LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec, indexHelperFactory); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second); spec.addRoot(compactOp); return spec; }
aString.setValue(instance.getDataverseName()); stringSerde.serialize(aString, tupleBuilder.getDataOutput()); tupleBuilder.addFieldEndOffset(); aString.setValue(instance.getDataverseName()); stringSerde.serialize(aString, fieldValue.getDataOutput()); recordBuilder.addField(MetadataRecordTypes.DATAVERSE_ARECORD_NAME_FIELD_INDEX, fieldValue);
private Expression rewriteFunctionBody(FunctionDecl fnDecl) throws CompilationException { SourceLocation sourceLoc = fnDecl.getSourceLocation(); Query wrappedQuery = new Query(false); wrappedQuery.setSourceLocation(sourceLoc); wrappedQuery.setBody(fnDecl.getFuncBody()); wrappedQuery.setTopLevel(false); String fnNamespace = fnDecl.getSignature().getNamespace(); Dataverse defaultDataverse = metadataProvider.getDefaultDataverse(); Dataverse fnDataverse; if (fnNamespace == null || fnNamespace.equals(defaultDataverse.getDataverseName())) { fnDataverse = defaultDataverse; } else { try { fnDataverse = metadataProvider.findDataverse(fnNamespace); } catch (AlgebricksException e) { throw new CompilationException(ErrorCode.UNKNOWN_DATAVERSE, e, sourceLoc, fnNamespace); } } metadataProvider.setDefaultDataverse(fnDataverse); try { IQueryRewriter queryRewriter = rewriterFactory.createQueryRewriter(); queryRewriter.rewrite(declaredFunctions, wrappedQuery, metadataProvider, context, true, fnDecl.getParamList()); return wrappedQuery.getBody(); } finally { metadataProvider.setDefaultDataverse(defaultDataverse); } }
private MetadataTransactionContext recoverDatasets(ICcApplicationContext appCtx, MetadataTransactionContext mdTxnCtx, Dataverse dataverse) throws Exception { if (!dataverse.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME)) { MetadataProvider metadataProvider = new MetadataProvider(appCtx, dataverse); try { List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverse.getDataverseName()); for (Dataset dataset : datasets) { if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
@Test public void reoveryAfterRebalance() throws Exception { String datasetName = "ds"; TestDataUtil.createIdOnlyDataset(datasetName); TestDataUtil.upsertData(datasetName, 10); final long countBeforeRebalance = TestDataUtil.getDatasetCount(datasetName); // rebalance dataset to single nc TestDataUtil.rebalanceDataset(integrationUtil, MetadataBuiltinEntities.DEFAULT_DATAVERSE.getDataverseName(), datasetName, new String[] { "asterix_nc2" }); // check data after rebalance final long countAfterRebalance = TestDataUtil.getDatasetCount(datasetName); Assert.assertEquals(countBeforeRebalance, countAfterRebalance); // insert data after rebalance TestDataUtil.upsertData(datasetName, 20); final long countBeforeRecovery = TestDataUtil.getDatasetCount(datasetName); // do ungraceful shutdown to enforce recovery integrationUtil.deinit(false); integrationUtil.init(false, TEST_CONFIG_FILE_PATH); final long countAfterRecovery = TestDataUtil.getDatasetCount(datasetName); Assert.assertEquals(countBeforeRecovery, countAfterRecovery); } }
MetadataProvider metadataProver = new MetadataProvider(appCtx, null); metadataProver.setMetadataTxnContext(mdTxn); final String defaultDv = MetadataBuiltinEntities.DEFAULT_DATAVERSE.getDataverseName(); final Dataset dataset = MetadataManager.INSTANCE.getDataset(mdTxn, defaultDv, datasetName); MetadataManager.INSTANCE.commitTransaction(mdTxn);