@Override public void deinitialize() throws HyracksDataException { treeIndexHelper.close(); } }
@Override public void open() throws HyracksDataException { super.open(); primaryIndexHelper.open(); primaryIndex = (ILSMIndex) primaryIndexHelper.getIndexInstance(); secondaryIndexHelper.open(); secondaryIndex = (ILSMIndex) secondaryIndexHelper.getIndexInstance(); }
@Override protected void performOpOnIndex(IIndexDataflowHelper indexHelper, IHyracksTaskContext ctx) throws HyracksDataException { String path = indexHelper.getResource().getPath(); IIOManager ioManager = ctx.getIoManager(); FileReference file = ioManager.resolve(path); LOGGER.warn("performing the operation on " + file.getFile().getAbsolutePath()); // Get index IIndex index = indexHelper.getIndexInstance(); // commit transaction ((ITwoPCIndex) index).commitTransaction(); LOGGER.warn("operation on " + file.getFile().getAbsolutePath() + " Succeded"); } }
@Override public void initialize() throws HyracksDataException { IIndexBuilder indexBuilder = indexBuilderFactory.create(ctx, partition); IIndexDataflowHelper indexHelper = dataflowHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); FileIndexTupleTranslator filesTupleTranslator = new FileIndexTupleTranslator(); // Build the index indexBuilder.build(); // Open the index indexHelper.open(); try { ILSMIndex index = (ILSMIndex) indexHelper.getIndexInstance(); Map<String, Object> parameters = new HashMap<>(); parameters.put(LSMIOOperationCallback.KEY_FLUSHED_COMPONENT_ID, LSMComponentId.DEFAULT_COMPONENT_ID); // Create bulk loader IIndexBulkLoader bulkLoader = index.createBulkLoader(BTree.DEFAULT_FILL_FACTOR, false, files.size(), false, parameters); // Load files for (ExternalFile file : files) { bulkLoader.add(filesTupleTranslator.getTupleFromFile(file)); } bulkLoader.end(); } finally { indexHelper.close(); } }
@Override public void open() throws HyracksDataException { accessor = new FrameTupleAccessor(inputRecDesc); writeBuffer = new VSizeFrame(ctx); indexHelper.open(); index = indexHelper.getIndexInstance(); try { writer.open(); LocalResource resource = indexHelper.getResource(); modCallback = modOpCallbackFactory.createModificationOperationCallback(resource, ctx, this); IIndexAccessParameters iap = new IndexAccessParameters(modCallback, NoOpOperationCallback.INSTANCE); indexAccessor = index.createAccessor(iap); if (tupleFilterFactory != null) { tupleFilter = tupleFilterFactory.createTupleFilter(ctx); frameTuple = new FrameTupleReference(); } } catch (Exception e) { throw HyracksDataException.create(e); } }
@Before public void createIndex() throws Exception { PrimaryIndexInfo primaryIndexInfo = StorageTestUtils.createPrimaryIndex(nc, PARTITION); IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); JobId jobId = nc.newJobId(); ctx = nc.createTestContext(jobId, PARTITION, false); indexDataflowHelper = iHelperFactory.create(ctx.getJobletContext().getServiceContext(), PARTITION); indexDataflowHelper.open(); lsmBtree = (TestLsmBtree) indexDataflowHelper.getIndexInstance(); indexDataflowHelper.close(); txnCtx = nc.getTransactionManager().beginTransaction(nc.getTxnJobId(ctx), new TransactionOptions(ITransactionManager.AtomicityLevel.ENTITY_LEVEL)); insertOp = StorageTestUtils.getInsertPipeline(nc, ctx); indexPath = indexDataflowHelper.getResource().getPath(); }
@Test public void testFlushMetadataOnlyComponent() throws Exception { // allow all operations StorageTestUtils.allowAllOps(lsmBtree); // ensure no disk component and memory component is empty Assert.assertEquals(0, lsmBtree.getDiskComponents().size()); Assert.assertFalse(lsmBtree.isMemoryComponentsAllocated()); MutableArrayValueReference key = new MutableArrayValueReference("FlushMetadataOnlyTestKey".getBytes()); MutableArrayValueReference value = new MutableArrayValueReference("FlushMetadataOnlyTestValue".getBytes()); indexDataflowHelper.open(); ILSMIndexAccessor accessor = lsmBtree.createAccessor(NoOpIndexAccessParameters.INSTANCE); accessor.updateMeta(key, value); Assert.assertTrue(lsmBtree.isMemoryComponentsAllocated()); Assert.assertTrue(lsmBtree.getCurrentMemoryComponent().isModified()); indexDataflowHelper.close(); // flush synchronously StorageTestUtils.flush(dsLifecycleMgr, lsmBtree, false); // assert one disk component Assert.assertEquals(1, lsmBtree.getDiskComponents().size()); ArrayBackedValueStorage pointable = new ArrayBackedValueStorage(); ComponentUtils.get(lsmBtree, key, pointable); Assert.assertTrue(DataUtils.equals(pointable, value)); // ensure that we can search this component StorageTestUtils.searchAndAssertCount(nc, PARTITION, 0); }
private void dropInUse(IHyracksTaskContext ctx, IndexDataflowHelperFactory helperFactory, IIndexDataflowHelper dataflowHelper) throws Exception { dropFailed.set(false); // open the index to make it in-use dataflowHelper.open(); // try to drop in-use index (should fail) IndexDropOperatorNodePushable dropInUseOp = new IndexDropOperatorNodePushable(helperFactory, EnumSet.noneOf(DropOption.class), ctx, 0); try { dropInUseOp.initialize(); } catch (HyracksDataException e) { e.printStackTrace(); Assert.assertEquals(ErrorCode.CANNOT_DROP_IN_USE_INDEX, e.getErrorCode()); dropFailed.set(true); } Assert.assertTrue(dropFailed.get()); }
@SuppressWarnings({ "squid:S1181", "squid:S1166" }) private void traceLastRecordIn() { try { if (tracer.isEnabled(traceCategory) && lastRecordInTimeStamp > 0 && indexHelper != null && indexHelper.getIndexInstance() != null) { tracer.instant("UpsertClose", traceCategory, Scope.t, "{\"last-record-in\":\"" + DATE_FORMAT.get().format(new Date(lastRecordInTimeStamp)) + "\", \"index\":" + indexHelper.getIndexInstance().toString() + "}"); } } catch (Throwable traceFailure) { try { LOGGER.warn("Tracing last record in failed", traceFailure); } catch (Throwable ignore) { // Ignore logging failure } } }
@Override protected void performOpOnIndex(IIndexDataflowHelper indexDataflowHelper, IHyracksTaskContext ctx) throws HyracksDataException { String path = indexDataflowHelper.getResource().getPath(); IIOManager ioManager = ctx.getIoManager(); FileReference file = ioManager.resolve(path); AbortRecoverLSMIndexFileManager fileManager = new AbortRecoverLSMIndexFileManager(ctx.getIoManager(), file); fileManager.recoverTransaction(); } }
private void createSecondaryIndex() throws HyracksDataException, RemoteException, ACIDException, AlgebricksException { SecondaryIndexInfo secondaryIndexInfo = nc.createSecondaryIndex(primaryIndexInfo, secondaryIndex, storageManager, 0); IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(), secondaryIndexInfo.getFileSplitProvider()); secondaryIndexDataflowHelper = iHelperFactory.create(taskCtx.getJobletContext().getServiceContext(), 0); secondaryIndexDataflowHelper.open(); secondaryLsmBtree = (TestLsmBtree) secondaryIndexDataflowHelper.getIndexInstance(); secondaryIndexDataflowHelper.close(); }
public void open() throws HyracksDataException { // Open the index and get the instance indexDataflowHelper.open(); index = (ExternalBTree) indexDataflowHelper.getIndexInstance(); // Create search key and search predicate objects searchKey = new ArrayTupleReference(); searchKeyTupleBuilder = new ArrayTupleBuilder(FilesIndexDescription.FILE_KEY_SIZE); searchKeyTupleBuilder.reset(); searchKeyTupleBuilder.addField(intSerde, currentFileNumber); searchKey.reset(searchKeyTupleBuilder.getFieldEndOffsets(), searchKeyTupleBuilder.getByteArray()); MultiComparator searchCmp = BTreeUtils.getSearchMultiComparator(index.getComparatorFactories(), searchKey); searchPredicate = new RangePredicate(searchKey, searchKey, true, true, searchCmp, searchCmp); // create the accessor and the cursor using the passed version ISearchOperationCallback searchCallback = searchCallbackFactory .createSearchOperationCallback(indexDataflowHelper.getResource().getId(), ctx, null); fileIndexAccessor = index.createAccessor(searchCallback, version); fileIndexSearchCursor = fileIndexAccessor.createSearchCursor(false); }
@Override public void initialize() throws HyracksDataException { treeIndexHelper.open(); ITreeIndex treeIndex = (ITreeIndex) treeIndexHelper.getIndexInstance(); try { writer.open(); IBufferCache bufferCache = storageManager.getBufferCache(ctx.getJobletContext().getServiceContext()); LocalResource resource = treeIndexHelper.getResource(); IIOManager ioManager = ctx.getIoManager(); FileReference fileRef = ioManager.resolve(resource.getPath()); writer.close(); } finally { treeIndexHelper.close();
@Override public void initialize() throws HyracksDataException { Throwable failure = null; treeIndexHelper.open(); try { writer.open(); FrameTupleAppender appender = new FrameTupleAppender(new VSizeFrame(ctx)); scan(appender); appender.write(writer, true); } catch (Throwable th) { // NOSONAR: Must call writer.fail failure = th; try { writer.fail(); } catch (Throwable failFailure) {// NOSONAR: Must maintain all stacks failure = ExceptionUtils.suppress(failure, failFailure); } } finally { failure = CleanupUtils.close(writer, failure); } if (failure != null) { throw HyracksDataException.create(failure); } }
@Override protected void performOpOnIndex(IIndexDataflowHelper indexDataflowHelper, IHyracksTaskContext ctx) throws HyracksDataException { String path = indexDataflowHelper.getResource().getPath(); IIOManager ioManager = ctx.getIoManager(); FileReference file = ioManager.resolve(path); AbortRecoverLSMIndexFileManager fileManager = new AbortRecoverLSMIndexFileManager(ctx.getIoManager(), file); fileManager.deleteTransactionFiles(); }
private void dropIndex() throws HyracksDataException { while (true) { try { indexHelper.destroy(); return; } catch (HyracksDataException e) { if (isIgnorable(e)) { LOGGER.debug("Ignoring exception on drop", e); return; } if (canRetry(e)) { LOGGER.info("Retrying drop on exception", e); continue; } throw e; } } }
private void readIndex() throws HyracksDataException { primaryIndexDataflowHelpers = new IIndexDataflowHelper[NUM_PARTITIONS]; primaryIndexes = new TestLsmBtree[NUM_PARTITIONS]; for (int i = 0; i < NUM_PARTITIONS; i++) { IIndexDataflowHelperFactory factory = new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfos[i].getFileSplitProvider()); primaryIndexDataflowHelpers[i] = factory.create(testCtxs[i].getJobletContext().getServiceContext(), i); primaryIndexDataflowHelpers[i].open(); primaryIndexes[i] = (TestLsmBtree) primaryIndexDataflowHelpers[i].getIndexInstance(); primaryIndexDataflowHelpers[i].close(); } secondaryIndexDataflowHelpers = new IIndexDataflowHelper[NUM_PARTITIONS]; secondaryIndexes = new TestLsmBtree[NUM_PARTITIONS]; for (int i = 0; i < NUM_PARTITIONS; i++) { IIndexDataflowHelperFactory factory = new IndexDataflowHelperFactory(nc.getStorageManager(), secondaryIndexInfo[i].getFileSplitProvider()); secondaryIndexDataflowHelpers[i] = factory.create(testCtxs[i].getJobletContext().getServiceContext(), i); secondaryIndexDataflowHelpers[i].open(); secondaryIndexes[i] = (TestLsmBtree) secondaryIndexDataflowHelpers[i].getIndexInstance(); secondaryIndexDataflowHelpers[i].close(); } }
writer.open(); accessor = new FrameTupleAccessor(inputRecDesc); indexHelper.open(); index = indexHelper.getIndexInstance(); if (retainMissing) { int fieldCount = getFieldCount(); appender = new FrameTupleAppender(new VSizeFrame(ctx)); ISearchOperationCallback searchCallback = searchCallbackFactory.createSearchOperationCallback(indexHelper.getResource().getId(), ctx, null);
@Override public void initialize() throws HyracksDataException { indexHelper.open(); ILSMIndex index = (ILSMIndex) indexHelper.getIndexInstance(); ILSMIndexAccessor accessor = index.createAccessor(NoOpIndexAccessParameters.INSTANCE); accessor.scheduleFullMerge(); }