@Override public void initialize() throws HyracksDataException { indexHelper.open(); ILSMIndex index = (ILSMIndex) indexHelper.getIndexInstance(); ILSMIndexAccessor accessor = index.createAccessor(NoOpIndexAccessParameters.INSTANCE); accessor.scheduleFullMerge(); }
@Override protected void initializeBulkLoader() throws HyracksDataException { ILSMIndex targetIndex = (ILSMIndex) index; Map<String, Object> parameters = new HashMap<>(); parameters.put(LSMIOOperationCallback.KEY_FLUSHED_COMPONENT_ID, LSMComponentId.DEFAULT_COMPONENT_ID); if (usage.equals(BulkLoadUsage.LOAD)) { bulkLoader = targetIndex.createBulkLoader(fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex, parameters); } else { primaryIndexHelper.open(); primaryIndex = (ILSMIndex) primaryIndexHelper.getIndexInstance(); List<ILSMDiskComponent> primaryComponents = primaryIndex.getDiskComponents(); if (!primaryComponents.isEmpty()) { ILSMComponentId bulkloadId = LSMComponentIdUtils.union(primaryComponents.get(0).getId(), primaryComponents.get(primaryComponents.size() - 1).getId()); parameters.put(LSMIOOperationCallback.KEY_FLUSHED_COMPONENT_ID, bulkloadId); } else { parameters.put(LSMIOOperationCallback.KEY_FLUSHED_COMPONENT_ID, LSMComponentId.EMPTY_INDEX_LAST_COMPONENT_ID); } bulkLoader = targetIndex.createBulkLoader(fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex, parameters); } }
@Override public void open() throws HyracksDataException { accessor = new FrameTupleAccessor(recDesc); indexHelper.open(); index = indexHelper.getIndexInstance(); try { writer.open(); initializeBulkLoader(); } catch (Exception e) { throw HyracksDataException.create(e); } }
@Override public void open() throws HyracksDataException { accessor = new FrameTupleAccessor(recDesc); indexHelper.open(); index = indexHelper.getIndexInstance(); try { writer.open(); // Transactional BulkLoader Map<String, Object> parameters = new HashMap<>(); bulkLoader = ((ITwoPCIndex) index).createTransactionBulkLoader(fillFactor, verifyInput, deletedFiles.length, parameters); // Delete files for (int i = 0; i < deletedFiles.length; i++) { fileNumber.setValue(deletedFiles[i]); FilesIndexDescription.getBuddyBTreeTupleFromFileNumber(deleteTuple, buddyBTreeTupleBuilder, fileNumber); ((ITwoPCIndexBulkLoader) bulkLoader).delete(deleteTuple); } } catch (Throwable e) { throw HyracksDataException.create(e); } }
private void dropInUse(IHyracksTaskContext ctx, IndexDataflowHelperFactory helperFactory, IIndexDataflowHelper dataflowHelper) throws Exception { dropFailed.set(false); // open the index to make it in-use dataflowHelper.open(); // try to drop in-use index (should fail) IndexDropOperatorNodePushable dropInUseOp = new IndexDropOperatorNodePushable(helperFactory, EnumSet.noneOf(DropOption.class), ctx, 0); try { dropInUseOp.initialize(); } catch (HyracksDataException e) { e.printStackTrace(); Assert.assertEquals(ErrorCode.CANNOT_DROP_IN_USE_INDEX, e.getErrorCode()); dropFailed.set(true); } Assert.assertTrue(dropFailed.get()); }
@Override public void initialize() throws HyracksDataException { IIndexBuilder indexBuilder = indexBuilderFactory.create(ctx, partition); IIndexDataflowHelper indexHelper = dataflowHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); FileIndexTupleTranslator filesTupleTranslator = new FileIndexTupleTranslator(); // Build the index indexBuilder.build(); // Open the index indexHelper.open(); try { ILSMIndex index = (ILSMIndex) indexHelper.getIndexInstance(); Map<String, Object> parameters = new HashMap<>(); parameters.put(LSMIOOperationCallback.KEY_FLUSHED_COMPONENT_ID, LSMComponentId.DEFAULT_COMPONENT_ID); // Create bulk loader IIndexBulkLoader bulkLoader = index.createBulkLoader(BTree.DEFAULT_FILL_FACTOR, false, files.size(), false, parameters); // Load files for (ExternalFile file : files) { bulkLoader.add(filesTupleTranslator.getTupleFromFile(file)); } bulkLoader.end(); } finally { indexHelper.close(); } }
@Override public void open() throws HyracksDataException { accessor = new FrameTupleAccessor(inputRecDesc); writeBuffer = new VSizeFrame(ctx); indexHelper.open(); index = indexHelper.getIndexInstance(); try { writer.open(); LocalResource resource = indexHelper.getResource(); modCallback = modOpCallbackFactory.createModificationOperationCallback(resource, ctx, this); IIndexAccessParameters iap = new IndexAccessParameters(modCallback, NoOpOperationCallback.INSTANCE); indexAccessor = index.createAccessor(iap); if (tupleFilterFactory != null) { tupleFilter = tupleFilterFactory.createTupleFilter(ctx); frameTuple = new FrameTupleReference(); } } catch (Exception e) { throw HyracksDataException.create(e); } }
@Override public void initialize() throws HyracksDataException { Throwable failure = null; treeIndexHelper.open(); try { writer.open(); FrameTupleAppender appender = new FrameTupleAppender(new VSizeFrame(ctx)); scan(appender); appender.write(writer, true); } catch (Throwable th) { // NOSONAR: Must call writer.fail failure = th; try { writer.fail(); } catch (Throwable failFailure) {// NOSONAR: Must maintain all stacks failure = ExceptionUtils.suppress(failure, failFailure); } } finally { failure = CleanupUtils.close(writer, failure); } if (failure != null) { throw HyracksDataException.create(failure); } }
private void createSecondaryIndex() throws HyracksDataException, RemoteException, ACIDException, AlgebricksException { SecondaryIndexInfo secondaryIndexInfo = nc.createSecondaryIndex(primaryIndexInfo, secondaryIndex, storageManager, 0); IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(), secondaryIndexInfo.getFileSplitProvider()); secondaryIndexDataflowHelper = iHelperFactory.create(taskCtx.getJobletContext().getServiceContext(), 0); secondaryIndexDataflowHelper.open(); secondaryLsmBtree = (TestLsmBtree) secondaryIndexDataflowHelper.getIndexInstance(); secondaryIndexDataflowHelper.close(); }
public void open() throws HyracksDataException { // Open the index and get the instance indexDataflowHelper.open(); index = (ExternalBTree) indexDataflowHelper.getIndexInstance(); // Create search key and search predicate objects searchKey = new ArrayTupleReference(); searchKeyTupleBuilder = new ArrayTupleBuilder(FilesIndexDescription.FILE_KEY_SIZE); searchKeyTupleBuilder.reset(); searchKeyTupleBuilder.addField(intSerde, currentFileNumber); searchKey.reset(searchKeyTupleBuilder.getFieldEndOffsets(), searchKeyTupleBuilder.getByteArray()); MultiComparator searchCmp = BTreeUtils.getSearchMultiComparator(index.getComparatorFactories(), searchKey); searchPredicate = new RangePredicate(searchKey, searchKey, true, true, searchCmp, searchCmp); // create the accessor and the cursor using the passed version ISearchOperationCallback searchCallback = searchCallbackFactory .createSearchOperationCallback(indexDataflowHelper.getResource().getId(), ctx, null); fileIndexAccessor = index.createAccessor(searchCallback, version); fileIndexSearchCursor = fileIndexAccessor.createSearchCursor(false); }
@Before public void createIndex() throws Exception { List<List<String>> partitioningKeys = new ArrayList<>(); partitioningKeys.add(Collections.singletonList("key")); dataset = new TestDataset(DATAVERSE_NAME, DATASET_NAME, DATAVERSE_NAME, DATA_TYPE_NAME, NODE_GROUP_NAME, NoMergePolicyFactory.NAME, null, new InternalDatasetDetails(null, PartitioningStrategy.HASH, partitioningKeys, null, null, null, false, null), null, DatasetType.INTERNAL, DATASET_ID, 0); PrimaryIndexInfo primaryIndexInfo = nc.createPrimaryIndex(dataset, KEY_TYPES, RECORD_TYPE, META_TYPE, null, storageManager, KEY_INDEXES, KEY_INDICATORS_LIST, 0); IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); JobId jobId = nc.newJobId(); ctx = nc.createTestContext(jobId, 0, false); indexDataflowHelper = iHelperFactory.create(ctx.getJobletContext().getServiceContext(), 0); indexDataflowHelper.open(); lsmBtree = (TestLsmBtree) indexDataflowHelper.getIndexInstance(); indexDataflowHelper.close(); txnCtx = nc.getTransactionManager().beginTransaction(nc.getTxnJobId(ctx), new TransactionOptions(ITransactionManager.AtomicityLevel.ENTITY_LEVEL)); insertOp = nc.getInsertPipeline(ctx, dataset, KEY_TYPES, RECORD_TYPE, META_TYPE, null, KEY_INDEXES, KEY_INDICATORS_LIST, storageManager, null).getLeft(); }
private void readIndex() throws HyracksDataException { primaryIndexDataflowHelpers = new IIndexDataflowHelper[NUM_PARTITIONS]; primaryIndexes = new TestLsmBtree[NUM_PARTITIONS]; for (int i = 0; i < NUM_PARTITIONS; i++) { IIndexDataflowHelperFactory factory = new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfos[i].getFileSplitProvider()); primaryIndexDataflowHelpers[i] = factory.create(testCtxs[i].getJobletContext().getServiceContext(), i); primaryIndexDataflowHelpers[i].open(); primaryIndexes[i] = (TestLsmBtree) primaryIndexDataflowHelpers[i].getIndexInstance(); primaryIndexDataflowHelpers[i].close(); } secondaryIndexDataflowHelpers = new IIndexDataflowHelper[NUM_PARTITIONS]; secondaryIndexes = new TestLsmBtree[NUM_PARTITIONS]; for (int i = 0; i < NUM_PARTITIONS; i++) { IIndexDataflowHelperFactory factory = new IndexDataflowHelperFactory(nc.getStorageManager(), secondaryIndexInfo[i].getFileSplitProvider()); secondaryIndexDataflowHelpers[i] = factory.create(testCtxs[i].getJobletContext().getServiceContext(), i); secondaryIndexDataflowHelpers[i].open(); secondaryIndexes[i] = (TestLsmBtree) secondaryIndexDataflowHelpers[i].getIndexInstance(); secondaryIndexDataflowHelpers[i].close(); } }
@Before public void createIndex() throws Exception { PrimaryIndexInfo primaryIndexInfo = StorageTestUtils.createPrimaryIndex(nc, PARTITION); IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); JobId jobId = nc.newJobId(); ctx = nc.createTestContext(jobId, PARTITION, false); indexDataflowHelper = iHelperFactory.create(ctx.getJobletContext().getServiceContext(), PARTITION); indexDataflowHelper.open(); lsmBtree = (TestLsmBtree) indexDataflowHelper.getIndexInstance(); indexDataflowHelper.close(); }
@Test public void testFlushMetadataOnlyComponent() throws Exception { // allow all operations StorageTestUtils.allowAllOps(lsmBtree); // ensure no disk component and memory component is empty Assert.assertEquals(0, lsmBtree.getDiskComponents().size()); Assert.assertFalse(lsmBtree.isMemoryComponentsAllocated()); MutableArrayValueReference key = new MutableArrayValueReference("FlushMetadataOnlyTestKey".getBytes()); MutableArrayValueReference value = new MutableArrayValueReference("FlushMetadataOnlyTestValue".getBytes()); indexDataflowHelper.open(); ILSMIndexAccessor accessor = lsmBtree.createAccessor(NoOpIndexAccessParameters.INSTANCE); accessor.updateMeta(key, value); Assert.assertTrue(lsmBtree.isMemoryComponentsAllocated()); Assert.assertTrue(lsmBtree.getCurrentMemoryComponent().isModified()); indexDataflowHelper.close(); // flush synchronously StorageTestUtils.flush(dsLifecycleMgr, lsmBtree, false); // assert one disk component Assert.assertEquals(1, lsmBtree.getDiskComponents().size()); ArrayBackedValueStorage pointable = new ArrayBackedValueStorage(); ComponentUtils.get(lsmBtree, key, pointable); Assert.assertTrue(DataUtils.equals(pointable, value)); // ensure that we can search this component StorageTestUtils.searchAndAssertCount(nc, PARTITION, 0); }
new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); primaryIndexDataflowHelper = iHelperFactory.create(taskCtx.getJobletContext().getServiceContext(), 0); primaryIndexDataflowHelper.open(); primaryLsmBtree = (TestLsmBtree) primaryIndexDataflowHelper.getIndexInstance(); primaryIndexDataflowHelper.close();
writeBuffer = new VSizeFrame(ctx); appender = new FrameTupleAppender(writeBuffer); indexHelper.open(); lsmIndex = (AbstractLSMIndex) indexHelper.getIndexInstance(); try {
writer.open(); accessor = new FrameTupleAccessor(inputRecDesc); indexHelper.open(); index = indexHelper.getIndexInstance(); if (retainMissing) {
@Before public void createIndex() throws Exception { PrimaryIndexInfo primaryIndexInfo = StorageTestUtils.createPrimaryIndex(nc, PARTITION); IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); JobId jobId = nc.newJobId(); ctx = nc.createTestContext(jobId, PARTITION, false); indexDataflowHelper = iHelperFactory.create(ctx.getJobletContext().getServiceContext(), PARTITION); indexDataflowHelper.open(); lsmBtree = (TestLsmBtree) indexDataflowHelper.getIndexInstance(); indexDataflowHelper.close(); txnCtx = nc.getTransactionManager().beginTransaction(nc.getTxnJobId(ctx), new TransactionOptions(ITransactionManager.AtomicityLevel.ENTITY_LEVEL)); insertOp = StorageTestUtils.getInsertPipeline(nc, ctx); indexPath = indexDataflowHelper.getResource().getPath(); }
@Before public void createIndex() throws Exception { PrimaryIndexInfo primaryIndexInfo = StorageTestUtils.createPrimaryIndex(nc, PARTITION); IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); JobId jobId = nc.newJobId(); ctx = nc.createTestContext(jobId, PARTITION, false); indexDataflowHelper = iHelperFactory.create(ctx.getJobletContext().getServiceContext(), PARTITION); indexDataflowHelper.open(); lsmBtree = (TestLsmBtree) indexDataflowHelper.getIndexInstance(); indexDataflowHelper.close(); txnCtx = nc.getTransactionManager().beginTransaction(nc.getTxnJobId(ctx), new TransactionOptions(ITransactionManager.AtomicityLevel.ENTITY_LEVEL)); insertOp = StorageTestUtils.getInsertPipeline(nc, ctx, null); JobId abortJobId = nc.newJobId(); abortCtx = nc.createTestContext(abortJobId, PARTITION, false); abortTxnCtx = nc.getTransactionManager().beginTransaction(nc.getTxnJobId(abortCtx), new TransactionOptions(ITransactionManager.AtomicityLevel.ENTITY_LEVEL)); // abortOp is initialized by each test separately tupleGenerator = StorageTestUtils.getTupleGenerator(); }