@SuppressWarnings({ "squid:S1181", "squid:S1166" }) private void traceLastRecordIn() { try { if (tracer.isEnabled(traceCategory) && lastRecordInTimeStamp > 0 && indexHelper != null && indexHelper.getIndexInstance() != null) { tracer.instant("UpsertClose", traceCategory, Scope.t, "{\"last-record-in\":\"" + DATE_FORMAT.get().format(new Date(lastRecordInTimeStamp)) + "\", \"index\":" + indexHelper.getIndexInstance().toString() + "}"); } } catch (Throwable traceFailure) { try { LOGGER.warn("Tracing last record in failed", traceFailure); } catch (Throwable ignore) { // Ignore logging failure } } }
@Override public void open() throws HyracksDataException { super.open(); primaryIndexHelper.open(); primaryIndex = (ILSMIndex) primaryIndexHelper.getIndexInstance(); secondaryIndexHelper.open(); secondaryIndex = (ILSMIndex) secondaryIndexHelper.getIndexInstance(); }
@Override public void initialize() throws HyracksDataException { indexHelper.open(); ILSMIndex index = (ILSMIndex) indexHelper.getIndexInstance(); ILSMIndexAccessor accessor = index.createAccessor(NoOpIndexAccessParameters.INSTANCE); accessor.scheduleFullMerge(); }
@Override protected void performOpOnIndex(IIndexDataflowHelper indexHelper, IHyracksTaskContext ctx) throws HyracksDataException { String path = indexHelper.getResource().getPath(); IIOManager ioManager = ctx.getIoManager(); FileReference file = ioManager.resolve(path); LOGGER.warn("performing the operation on " + file.getFile().getAbsolutePath()); // Get index IIndex index = indexHelper.getIndexInstance(); // commit transaction ((ITwoPCIndex) index).commitTransaction(); LOGGER.warn("operation on " + file.getFile().getAbsolutePath() + " Succeded"); } }
@Override protected void initializeBulkLoader() throws HyracksDataException { ILSMIndex targetIndex = (ILSMIndex) index; Map<String, Object> parameters = new HashMap<>(); parameters.put(LSMIOOperationCallback.KEY_FLUSHED_COMPONENT_ID, LSMComponentId.DEFAULT_COMPONENT_ID); if (usage.equals(BulkLoadUsage.LOAD)) { bulkLoader = targetIndex.createBulkLoader(fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex, parameters); } else { primaryIndexHelper.open(); primaryIndex = (ILSMIndex) primaryIndexHelper.getIndexInstance(); List<ILSMDiskComponent> primaryComponents = primaryIndex.getDiskComponents(); if (!primaryComponents.isEmpty()) { ILSMComponentId bulkloadId = LSMComponentIdUtils.union(primaryComponents.get(0).getId(), primaryComponents.get(primaryComponents.size() - 1).getId()); parameters.put(LSMIOOperationCallback.KEY_FLUSHED_COMPONENT_ID, bulkloadId); } else { parameters.put(LSMIOOperationCallback.KEY_FLUSHED_COMPONENT_ID, LSMComponentId.EMPTY_INDEX_LAST_COMPONENT_ID); } bulkLoader = targetIndex.createBulkLoader(fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex, parameters); } }
@Override public void open() throws HyracksDataException { accessor = new FrameTupleAccessor(recDesc); indexHelper.open(); index = indexHelper.getIndexInstance(); try { writer.open(); initializeBulkLoader(); } catch (Exception e) { throw HyracksDataException.create(e); } }
private void scan(FrameTupleAppender appender) throws IOException { ITreeIndex treeIndex = (ITreeIndex) treeIndexHelper.getIndexInstance(); LocalResource resource = treeIndexHelper.getResource(); ISearchOperationCallback searchCallback = searchCallbackFactory.createSearchOperationCallback(resource.getId(), ctx, null); IIndexAccessParameters iap = new IndexAccessParameters(NoOpOperationCallback.INSTANCE, searchCallback); ITreeIndexAccessor indexAccessor = (ITreeIndexAccessor) treeIndex.createAccessor(iap); try { doScan(treeIndex, indexAccessor, appender); } finally { indexAccessor.destroy(); } }
@Override public void open() throws HyracksDataException { accessor = new FrameTupleAccessor(recDesc); indexHelper.open(); index = indexHelper.getIndexInstance(); try { writer.open(); // Transactional BulkLoader Map<String, Object> parameters = new HashMap<>(); bulkLoader = ((ITwoPCIndex) index).createTransactionBulkLoader(fillFactor, verifyInput, deletedFiles.length, parameters); // Delete files for (int i = 0; i < deletedFiles.length; i++) { fileNumber.setValue(deletedFiles[i]); FilesIndexDescription.getBuddyBTreeTupleFromFileNumber(deleteTuple, buddyBTreeTupleBuilder, fileNumber); ((ITwoPCIndexBulkLoader) bulkLoader).delete(deleteTuple); } } catch (Throwable e) { throw HyracksDataException.create(e); } }
@Override public void initialize() throws HyracksDataException { IIndexBuilder indexBuilder = indexBuilderFactory.create(ctx, partition); IIndexDataflowHelper indexHelper = dataflowHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); FileIndexTupleTranslator filesTupleTranslator = new FileIndexTupleTranslator(); // Build the index indexBuilder.build(); // Open the index indexHelper.open(); try { ILSMIndex index = (ILSMIndex) indexHelper.getIndexInstance(); Map<String, Object> parameters = new HashMap<>(); parameters.put(LSMIOOperationCallback.KEY_FLUSHED_COMPONENT_ID, LSMComponentId.DEFAULT_COMPONENT_ID); // Create bulk loader IIndexBulkLoader bulkLoader = index.createBulkLoader(BTree.DEFAULT_FILL_FACTOR, false, files.size(), false, parameters); // Load files for (ExternalFile file : files) { bulkLoader.add(filesTupleTranslator.getTupleFromFile(file)); } bulkLoader.end(); } finally { indexHelper.close(); } }
@Override public void open() throws HyracksDataException { accessor = new FrameTupleAccessor(inputRecDesc); writeBuffer = new VSizeFrame(ctx); indexHelper.open(); index = indexHelper.getIndexInstance(); try { writer.open(); LocalResource resource = indexHelper.getResource(); modCallback = modOpCallbackFactory.createModificationOperationCallback(resource, ctx, this); IIndexAccessParameters iap = new IndexAccessParameters(modCallback, NoOpOperationCallback.INSTANCE); indexAccessor = index.createAccessor(iap); if (tupleFilterFactory != null) { tupleFilter = tupleFilterFactory.createTupleFilter(ctx); frameTuple = new FrameTupleReference(); } } catch (Exception e) { throw HyracksDataException.create(e); } }
private void createSecondaryIndex() throws HyracksDataException, RemoteException, ACIDException, AlgebricksException { SecondaryIndexInfo secondaryIndexInfo = nc.createSecondaryIndex(primaryIndexInfo, secondaryIndex, storageManager, 0); IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(), secondaryIndexInfo.getFileSplitProvider()); secondaryIndexDataflowHelper = iHelperFactory.create(taskCtx.getJobletContext().getServiceContext(), 0); secondaryIndexDataflowHelper.open(); secondaryLsmBtree = (TestLsmBtree) secondaryIndexDataflowHelper.getIndexInstance(); secondaryIndexDataflowHelper.close(); }
public void open() throws HyracksDataException { // Open the index and get the instance indexDataflowHelper.open(); index = (ExternalBTree) indexDataflowHelper.getIndexInstance(); // Create search key and search predicate objects searchKey = new ArrayTupleReference(); searchKeyTupleBuilder = new ArrayTupleBuilder(FilesIndexDescription.FILE_KEY_SIZE); searchKeyTupleBuilder.reset(); searchKeyTupleBuilder.addField(intSerde, currentFileNumber); searchKey.reset(searchKeyTupleBuilder.getFieldEndOffsets(), searchKeyTupleBuilder.getByteArray()); MultiComparator searchCmp = BTreeUtils.getSearchMultiComparator(index.getComparatorFactories(), searchKey); searchPredicate = new RangePredicate(searchKey, searchKey, true, true, searchCmp, searchCmp); // create the accessor and the cursor using the passed version ISearchOperationCallback searchCallback = searchCallbackFactory .createSearchOperationCallback(indexDataflowHelper.getResource().getId(), ctx, null); fileIndexAccessor = index.createAccessor(searchCallback, version); fileIndexSearchCursor = fileIndexAccessor.createSearchCursor(false); }
@Before public void createIndex() throws Exception { List<List<String>> partitioningKeys = new ArrayList<>(); partitioningKeys.add(Collections.singletonList("key")); dataset = new TestDataset(DATAVERSE_NAME, DATASET_NAME, DATAVERSE_NAME, DATA_TYPE_NAME, NODE_GROUP_NAME, NoMergePolicyFactory.NAME, null, new InternalDatasetDetails(null, PartitioningStrategy.HASH, partitioningKeys, null, null, null, false, null), null, DatasetType.INTERNAL, DATASET_ID, 0); PrimaryIndexInfo primaryIndexInfo = nc.createPrimaryIndex(dataset, KEY_TYPES, RECORD_TYPE, META_TYPE, null, storageManager, KEY_INDEXES, KEY_INDICATORS_LIST, 0); IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); JobId jobId = nc.newJobId(); ctx = nc.createTestContext(jobId, 0, false); indexDataflowHelper = iHelperFactory.create(ctx.getJobletContext().getServiceContext(), 0); indexDataflowHelper.open(); lsmBtree = (TestLsmBtree) indexDataflowHelper.getIndexInstance(); indexDataflowHelper.close(); txnCtx = nc.getTransactionManager().beginTransaction(nc.getTxnJobId(ctx), new TransactionOptions(ITransactionManager.AtomicityLevel.ENTITY_LEVEL)); insertOp = nc.getInsertPipeline(ctx, dataset, KEY_TYPES, RECORD_TYPE, META_TYPE, null, KEY_INDEXES, KEY_INDICATORS_LIST, storageManager, null).getLeft(); }
private void readIndex() throws HyracksDataException { primaryIndexDataflowHelpers = new IIndexDataflowHelper[NUM_PARTITIONS]; primaryIndexes = new TestLsmBtree[NUM_PARTITIONS]; for (int i = 0; i < NUM_PARTITIONS; i++) { IIndexDataflowHelperFactory factory = new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfos[i].getFileSplitProvider()); primaryIndexDataflowHelpers[i] = factory.create(testCtxs[i].getJobletContext().getServiceContext(), i); primaryIndexDataflowHelpers[i].open(); primaryIndexes[i] = (TestLsmBtree) primaryIndexDataflowHelpers[i].getIndexInstance(); primaryIndexDataflowHelpers[i].close(); } secondaryIndexDataflowHelpers = new IIndexDataflowHelper[NUM_PARTITIONS]; secondaryIndexes = new TestLsmBtree[NUM_PARTITIONS]; for (int i = 0; i < NUM_PARTITIONS; i++) { IIndexDataflowHelperFactory factory = new IndexDataflowHelperFactory(nc.getStorageManager(), secondaryIndexInfo[i].getFileSplitProvider()); secondaryIndexDataflowHelpers[i] = factory.create(testCtxs[i].getJobletContext().getServiceContext(), i); secondaryIndexDataflowHelpers[i].open(); secondaryIndexes[i] = (TestLsmBtree) secondaryIndexDataflowHelpers[i].getIndexInstance(); secondaryIndexDataflowHelpers[i].close(); } }
@Before public void createIndex() throws Exception { PrimaryIndexInfo primaryIndexInfo = StorageTestUtils.createPrimaryIndex(nc, PARTITION); IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); JobId jobId = nc.newJobId(); ctx = nc.createTestContext(jobId, PARTITION, false); indexDataflowHelper = iHelperFactory.create(ctx.getJobletContext().getServiceContext(), PARTITION); indexDataflowHelper.open(); lsmBtree = (TestLsmBtree) indexDataflowHelper.getIndexInstance(); indexDataflowHelper.close(); }
primaryIndexDataflowHelper = iHelperFactory.create(taskCtx.getJobletContext().getServiceContext(), 0); primaryIndexDataflowHelper.open(); primaryLsmBtree = (TestLsmBtree) primaryIndexDataflowHelper.getIndexInstance(); primaryIndexDataflowHelper.close();
appender = new FrameTupleAppender(writeBuffer); indexHelper.open(); lsmIndex = (AbstractLSMIndex) indexHelper.getIndexInstance(); try { if (isPrimary && ctx.getSharedObject() != null) {
accessor = new FrameTupleAccessor(inputRecDesc); indexHelper.open(); index = indexHelper.getIndexInstance(); if (retainMissing) { int fieldCount = getFieldCount();
@Before public void createIndex() throws Exception { PrimaryIndexInfo primaryIndexInfo = StorageTestUtils.createPrimaryIndex(nc, PARTITION); IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); JobId jobId = nc.newJobId(); ctx = nc.createTestContext(jobId, PARTITION, false); indexDataflowHelper = iHelperFactory.create(ctx.getJobletContext().getServiceContext(), PARTITION); indexDataflowHelper.open(); lsmBtree = (TestLsmBtree) indexDataflowHelper.getIndexInstance(); indexDataflowHelper.close(); txnCtx = nc.getTransactionManager().beginTransaction(nc.getTxnJobId(ctx), new TransactionOptions(ITransactionManager.AtomicityLevel.ENTITY_LEVEL)); insertOp = StorageTestUtils.getInsertPipeline(nc, ctx); indexPath = indexDataflowHelper.getResource().getPath(); }
@Before public void createIndex() throws Exception { PrimaryIndexInfo primaryIndexInfo = StorageTestUtils.createPrimaryIndex(nc, PARTITION); IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); JobId jobId = nc.newJobId(); ctx = nc.createTestContext(jobId, PARTITION, false); indexDataflowHelper = iHelperFactory.create(ctx.getJobletContext().getServiceContext(), PARTITION); indexDataflowHelper.open(); lsmBtree = (TestLsmBtree) indexDataflowHelper.getIndexInstance(); indexDataflowHelper.close(); txnCtx = nc.getTransactionManager().beginTransaction(nc.getTxnJobId(ctx), new TransactionOptions(ITransactionManager.AtomicityLevel.ENTITY_LEVEL)); insertOp = StorageTestUtils.getInsertPipeline(nc, ctx, null); JobId abortJobId = nc.newJobId(); abortCtx = nc.createTestContext(abortJobId, PARTITION, false); abortTxnCtx = nc.getTransactionManager().beginTransaction(nc.getTxnJobId(abortCtx), new TransactionOptions(ITransactionManager.AtomicityLevel.ENTITY_LEVEL)); // abortOp is initialized by each test separately tupleGenerator = StorageTestUtils.getTupleGenerator(); }