public TreeIndexStatsOperatorNodePushable(IHyracksTaskContext ctx, int partition, IIndexDataflowHelperFactory indexHelperFactory, IStorageManager storageManager) throws HyracksDataException { this.ctx = ctx; this.treeIndexHelper = indexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.storageManager = storageManager; }
public TreeIndexDiskOrderScanOperatorNodePushable(IHyracksTaskContext ctx, int partition, IIndexDataflowHelperFactory indexHelperFactory, ISearchOperationCallbackFactory searchCallbackFactory) throws HyracksDataException { this.ctx = ctx; this.treeIndexHelper = indexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.searchCallbackFactory = searchCallbackFactory; }
public IndexDropOperatorNodePushable(IIndexDataflowHelperFactory indexHelperFactory, Set<DropOption> options, IHyracksTaskContext ctx, int partition) throws HyracksDataException { this.indexHelper = indexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.options = options; }
public LSMIndexCompactOperatorNodePushable(IHyracksTaskContext ctx, int partition, IIndexDataflowHelperFactory indexHelperFactory) throws HyracksDataException { this.indexHelper = indexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); }
@Override public void initialize() throws HyracksDataException { try { // perform operation on btrees for (int i = 0; i < treeIndexesDataflowHelperFactories.size(); i++) { IIndexDataflowHelper indexHelper = treeIndexesDataflowHelperFactories.get(i) .create(ctx.getJobletContext().getServiceContext(), partition); performOpOnIndex(indexHelper, ctx); } } catch (Exception e) { throw HyracksDataException.create(e); } }
public IndexInsertUpdateDeleteOperatorNodePushable(IHyracksTaskContext ctx, int partition, IIndexDataflowHelperFactory indexHelperFactory, int[] fieldPermutation, RecordDescriptor inputRecDesc, IndexOperation op, IModificationOperationCallbackFactory modOpCallbackFactory, ITupleFilterFactory tupleFilterFactory) throws HyracksDataException { this.ctx = ctx; this.indexHelper = indexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.modOpCallbackFactory = modOpCallbackFactory; this.tupleFilterFactory = tupleFilterFactory; this.inputRecDesc = inputRecDesc; this.op = op; this.tuple.setFieldPermutation(fieldPermutation); }
public IndexBulkLoadOperatorNodePushable(IIndexDataflowHelperFactory indexDataflowHelperFactory, IHyracksTaskContext ctx, int partition, int[] fieldPermutation, float fillFactor, boolean verifyInput, long numElementsHint, boolean checkIfEmptyIndex, RecordDescriptor recDesc) throws HyracksDataException { this.ctx = ctx; this.indexHelper = indexDataflowHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.fillFactor = fillFactor; this.verifyInput = verifyInput; this.numElementsHint = numElementsHint; this.checkIfEmptyIndex = checkIfEmptyIndex; this.recDesc = recDesc; tuple.setFieldPermutation(fieldPermutation); }
public LSMSecondaryIndexBulkLoadNodePushable(IHyracksTaskContext ctx, int partition, RecordDescriptor inputRecDesc, IIndexDataflowHelperFactory primaryIndexHelperFactory, IIndexDataflowHelperFactory secondaryIndexHelperFactory, int[] fieldPermutation, int numTagFields, int numSecondaryKeys, int numPrimaryKeys, boolean hasBuddyBTree) throws HyracksDataException { super(ctx, partition, inputRecDesc, numTagFields, numSecondaryKeys, numPrimaryKeys, hasBuddyBTree); this.primaryIndexHelper = primaryIndexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.secondaryIndexHelper = secondaryIndexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.tuple = new PermutingFrameTupleReference(fieldPermutation); int[] sourcePermutation = new int[fieldPermutation.length - numTagFields]; for (int i = 0; i < sourcePermutation.length; i++) { sourcePermutation[i] = i + numTagFields; } sourceTuple = new PermutingTupleReference(sourcePermutation); int[] deletedKeyPermutation = new int[inputRecDesc.getFieldCount() - numTagFields - numSecondaryKeys]; for (int i = 0; i < deletedKeyPermutation.length; i++) { deletedKeyPermutation[i] = i + numTagFields + numSecondaryKeys; } deletedKeyTuple = new PermutingTupleReference(deletedKeyPermutation); }
dataflowHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition), searchOpCallbackFactory, version); return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
public LSMIndexBulkLoadOperatorNodePushable(IIndexDataflowHelperFactory indexDataflowHelperFactory, IIndexDataflowHelperFactory priamryIndexDataflowHelperFactory, IHyracksTaskContext ctx, int partition, int[] fieldPermutation, float fillFactor, boolean verifyInput, long numElementsHint, boolean checkIfEmptyIndex, RecordDescriptor recDesc, BulkLoadUsage usage, int datasetId) throws HyracksDataException { super(indexDataflowHelperFactory, ctx, partition, fieldPermutation, fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex, recDesc); if (priamryIndexDataflowHelperFactory != null) { this.primaryIndexHelper = priamryIndexDataflowHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); } else { this.primaryIndexHelper = null; } this.usage = usage; this.datasetId = datasetId; this.partition = partition; INcApplicationContext ncCtx = (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext(); datasetManager = ncCtx.getDatasetLifecycleManager(); }
@Override public void initialize() throws HyracksDataException { IIndexBuilder indexBuilder = indexBuilderFactory.create(ctx, partition); IIndexDataflowHelper indexHelper = dataflowHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); FileIndexTupleTranslator filesTupleTranslator = new FileIndexTupleTranslator(); // Build the index indexBuilder.build(); // Open the index indexHelper.open(); try { ILSMIndex index = (ILSMIndex) indexHelper.getIndexInstance(); Map<String, Object> parameters = new HashMap<>(); parameters.put(LSMIOOperationCallback.KEY_FLUSHED_COMPONENT_ID, LSMComponentId.DEFAULT_COMPONENT_ID); // Create bulk loader IIndexBulkLoader bulkLoader = index.createBulkLoader(BTree.DEFAULT_FILL_FACTOR, false, files.size(), false, parameters); // Load files for (ExternalFile file : files) { bulkLoader.add(filesTupleTranslator.getTupleFromFile(file)); } bulkLoader.end(); } finally { indexHelper.close(); } }
throws HyracksDataException { this.ctx = ctx; this.indexHelper = indexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.retainInput = retainInput; this.retainMissing = retainMissing;
@Override public void initialize() throws HyracksDataException { final IIndexDataflowHelper indexHelper = dataflowHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); FileIndexTupleTranslator filesTupleTranslator = new FileIndexTupleTranslator();
private void readIndex() throws HyracksDataException { primaryIndexDataflowHelpers = new IIndexDataflowHelper[NUM_PARTITIONS]; primaryIndexes = new TestLsmBtree[NUM_PARTITIONS]; for (int i = 0; i < NUM_PARTITIONS; i++) { IIndexDataflowHelperFactory factory = new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfos[i].getFileSplitProvider()); primaryIndexDataflowHelpers[i] = factory.create(testCtxs[i].getJobletContext().getServiceContext(), i); primaryIndexDataflowHelpers[i].open(); primaryIndexes[i] = (TestLsmBtree) primaryIndexDataflowHelpers[i].getIndexInstance(); primaryIndexDataflowHelpers[i].close(); } secondaryIndexDataflowHelpers = new IIndexDataflowHelper[NUM_PARTITIONS]; secondaryIndexes = new TestLsmBtree[NUM_PARTITIONS]; for (int i = 0; i < NUM_PARTITIONS; i++) { IIndexDataflowHelperFactory factory = new IndexDataflowHelperFactory(nc.getStorageManager(), secondaryIndexInfo[i].getFileSplitProvider()); secondaryIndexDataflowHelpers[i] = factory.create(testCtxs[i].getJobletContext().getServiceContext(), i); secondaryIndexDataflowHelpers[i].open(); secondaryIndexes[i] = (TestLsmBtree) secondaryIndexDataflowHelpers[i].getIndexInstance(); secondaryIndexDataflowHelpers[i].close(); } }