protected DatatypeTupleTranslator(TxnId txnId, MetadataNode metadataNode, boolean getTuple) { super(getTuple, MetadataPrimaryIndexes.DATATYPE_DATASET.getFieldCount()); this.txnId = txnId; this.metadataNode = metadataNode; }
/** * Inserts a metadata dataset to the physical dataset index Should be performed * on a bootstrap of a new universe * * @param mdTxnCtx * @param indexes * @throws AlgebricksException */ public static void insertMetadataDatasets(MetadataTransactionContext mdTxnCtx, IMetadataIndex[] indexes) throws AlgebricksException { for (int i = 0; i < indexes.length; i++) { IDatasetDetails id = new InternalDatasetDetails(FileStructure.BTREE, PartitioningStrategy.HASH, indexes[i].getPartitioningExpr(), indexes[i].getPartitioningExpr(), null, indexes[i].getPartitioningExprType(), false, null); MetadataManager.INSTANCE.addDataset(mdTxnCtx, new Dataset(indexes[i].getDataverseName(), indexes[i].getIndexedDatasetName(), indexes[i].getDataverseName(), indexes[i].getPayloadRecordType().getTypeName(), indexes[i].getNodeGroupName(), GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME, GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES, id, new HashMap<String, String>(), DatasetType.INTERNAL, indexes[i].getDatasetId().getId(), MetadataUtil.PENDING_NO_OP)); } if (LOGGER.isInfoEnabled()) { LOGGER.info("Finished inserting initial datasets."); } }
final int datasetId = index.getDatasetId().getId(); if (!appContext.getDatasetMemoryManager().reserve(index.getDatasetId().getId())) { throw new IllegalStateException("Failed to reserve memory for metadata dataset (" + datasetId + ")"); String resourceName = metadataPartitionPath + File.separator + index.getFileNameRelativePath(); FileReference file = ioManager.resolve(resourceName); index.setFile(file); ITypeTraits[] typeTraits = index.getTypeTraits(); IBinaryComparatorFactory[] cmpFactories = index.getKeyBinaryComparatorFactory(); int[] bloomFilterKeyFields = index.getBloomFilterKeyFields(); index.isPrimaryIndex() ? new PrimaryIndexOperationTrackerFactory(datasetId) : new SecondaryIndexOperationTrackerFactory(datasetId); ILSMComponentIdGeneratorFactory idGeneratorProvider = new DatasetLSMComponentIdGeneratorFactory(datasetId); if (index.getResourceId() != resource.getId()) { throw new HyracksDataException("Resource Id doesn't match expected metadata index resource id");
private IModificationOperationCallback createIndexModificationCallback(Operation indexOp, ITransactionContext txnCtx, IMetadataIndex metadataIndex) { switch (indexOp) { case INSERT: case DELETE: /* * Regardless of the index type (primary or secondary index), secondary index modification * callback is given. This is still correct since metadata index operation doesn't require * any lock from ConcurrentLockMgr. */ return new SecondaryIndexModificationOperationCallback(metadataIndex.getDatasetId(), metadataIndex.getPrimaryKeyIndexes(), txnCtx, transactionSubsystem.getLockManager(), transactionSubsystem, metadataIndex.getResourceId(), metadataStoragePartition, ResourceType.LSM_BTREE, indexOp); case UPSERT: return new UpsertOperationCallback(metadataIndex.getDatasetId(), metadataIndex.getPrimaryKeyIndexes(), txnCtx, transactionSubsystem.getLockManager(), transactionSubsystem, metadataIndex.getResourceId(), metadataStoragePartition, ResourceType.LSM_BTREE, indexOp); default: throw new IllegalStateException("Unknown operation type: " + indexOp); } }
@Test public void fullMergeTest() throws Exception { String datasetName = "ds"; TestDataUtil.createIdOnlyDataset(datasetName); INcApplicationContext appCtx = (INcApplicationContext) (integrationUtil.ncs[0].getApplicationContext()); IDatasetLifecycleManager dlcm = appCtx.getDatasetLifecycleManager(); IMetadataIndex dsIdx = MetadataPrimaryIndexes.DATASET_DATASET; DatasetInfo datasetInfo = dlcm.getDatasetInfo(dsIdx.getDatasetId().getId()); // flush to ensure multiple disk components dlcm.flushAllDatasets(); datasetInfo.waitForIO(); AbstractLSMIndex index = (AbstractLSMIndex) dlcm.getIndex(dsIdx.getDatasetId().getId(), dsIdx.getResourceId()); Assert.assertTrue(index.getDiskComponents().size() > 1); // trigger full merge and ensure we have a single disk component when merge completes testExecutor.executeSqlppUpdateOrDdl("COMPACT DATASET Metadata.`Dataset`;", TestCaseContext.OutputFormat.CLEAN_JSON); datasetInfo.waitForIO(); Assert.assertTrue(index.getDiskComponents().size() == 1); } }
private <T> void searchIndex(TxnId txnId, IMetadataIndex index, ITupleReference searchKey, IValueExtractor<T> valueExtractor, List<T> results) throws AlgebricksException, HyracksDataException, RemoteException { IBinaryComparatorFactory[] comparatorFactories = index.getKeyBinaryComparatorFactory(); if (index.getFile() == null) { throw new AlgebricksException("No file for Index " + index.getDataverseName() + "." + index.getIndexName()); } String resourceName = index.getFile().getRelativePath(); IIndex indexInstance = datasetLifecycleManager.get(resourceName); datasetLifecycleManager.open(resourceName); IIndexAccessor indexAccessor = indexInstance.createAccessor(NoOpIndexAccessParameters.INSTANCE); try { IBinaryComparator[] searchCmps = null; MultiComparator searchCmp = null; if (searchKey != null) { searchCmps = new IBinaryComparator[searchKey.getFieldCount()]; for (int i = 0; i < searchKey.getFieldCount(); i++) { searchCmps[i] = comparatorFactories[i].createBinaryComparator(); } searchCmp = new MultiComparator(searchCmps); } RangePredicate rangePred = new RangePredicate(searchKey, searchKey, true, true, searchCmp, searchCmp); search(indexAccessor, rangePred, results, valueExtractor, txnId); } finally { indexAccessor.destroy(); } datasetLifecycleManager.close(resourceName); }
private void modifyMetadataIndex(Operation op, TxnId txnId, IMetadataIndex metadataIndex, ITupleReference tuple) throws HyracksDataException { String resourceName = metadataIndex.getFile().getRelativePath(); ILSMIndex lsmIndex = (ILSMIndex) datasetLifecycleManager.get(resourceName); datasetLifecycleManager.open(resourceName); ILSMIndexAccessor indexAccessor = lsmIndex.createAccessor(iap); txnCtx.setWriteTxn(true); txnCtx.register(metadataIndex.getResourceId(), StoragePathUtil.getPartitionNumFromRelativePath(resourceName), lsmIndex, modCallback, metadataIndex.isPrimaryIndex()); LSMIndexUtil.checkAndSetFirstLSN((AbstractLSMIndex) lsmIndex, transactionSubsystem.getLogManager()); switch (op) {
@Override public void initializeDatasetIdFactory(TxnId txnId) throws AlgebricksException, RemoteException { int mostRecentDatasetId; try { String resourceName = MetadataPrimaryIndexes.DATASET_DATASET.getFile().getRelativePath(); IIndex indexInstance = datasetLifecycleManager.get(resourceName); datasetLifecycleManager.open(resourceName); try { mostRecentDatasetId = getMostRecentDatasetIdFromStoredDatasetIndex(indexInstance, txnId); } finally { datasetLifecycleManager.close(resourceName); } } catch (HyracksDataException e) { throw new AlgebricksException(e); } DatasetIdFactory.initialize(mostRecentDatasetId); }
dlcm.flushAllDatasets(); IMetadataIndex idx = MetadataPrimaryIndexes.NODEGROUP_DATASET; DatasetInfo datasetInfo = dlcm.getDatasetInfo(idx.getDatasetId().getId()); AbstractLSMIndex index = (AbstractLSMIndex) appCtx.getDatasetLifecycleManager() .getIndex(idx.getDatasetId().getId(), idx.getResourceId()); PrimaryIndexOperationTracker opTracker = (PrimaryIndexOperationTracker) index.getOperationTracker(); final MetadataTransactionContext mdTxn2 = MetadataManager.INSTANCE.beginTransaction();
RangePredicate rangePred; IMetadataIndex index = MetadataPrimaryIndexes.DATAVERSE_DATASET; String resourceName = index.getFile().toString(); IIndex indexInstance = datasetLifecycleManager.get(resourceName); datasetLifecycleManager.open(resourceName);
protected IndexTupleTranslator(TxnId txnId, MetadataNode metadataNode, boolean getTuple) { super(getTuple, MetadataPrimaryIndexes.INDEX_DATASET.getFieldCount()); this.txnId = txnId; this.metadataNode = metadataNode; }
protected NodeGroupTupleTranslator(boolean getTuple) { super(getTuple, MetadataPrimaryIndexes.NODEGROUP_DATASET.getFieldCount()); }
protected ExternalFileTupleTranslator(boolean getTuple) { super(getTuple, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET.getFieldCount()); }
protected NodeTupleTranslator(boolean getTuple) { super(getTuple, MetadataPrimaryIndexes.NODE_DATASET.getFieldCount()); }
public FeedConnectionTupleTranslator(boolean getTuple) { super(getTuple, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET.getFieldCount()); }
protected CompactionPolicyTupleTranslator(boolean getTuple) { super(getTuple, MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET.getFieldCount()); }
protected LibraryTupleTranslator(boolean getTuple) { super(getTuple, MetadataPrimaryIndexes.LIBRARY_DATASET.getFieldCount()); }
protected FeedTupleTranslator(boolean getTuple) { super(getTuple, MetadataPrimaryIndexes.FEED_DATASET.getFieldCount()); }
protected DatasourceAdapterTupleTranslator(boolean getTuple) { super(getTuple, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET.getFieldCount()); }
protected FunctionTupleTranslator(boolean getTuple) { super(getTuple, MetadataPrimaryIndexes.FUNCTION_DATASET.getFieldCount()); }