public RTreeSearchOperatorNodePushable(IHyracksTaskContext ctx, int partition, RecordDescriptor inputRecDesc, int[] keyFields, int[] minFilterFieldIndexes, int[] maxFilterFieldIndexes, IIndexDataflowHelperFactory indexHelperFactory, boolean retainInput, boolean retainMissing, IMissingWriterFactory missingWriterFactory, ISearchOperationCallbackFactory searchCallbackFactory, boolean appendIndexFilter, boolean appendOpCallbackProceedResult, byte[] searchCallbackProceedResultFalseValue, byte[] searchCallbackProceedResultTrueValue) throws HyracksDataException { // TODO: predicate & limit pushdown not enabled for RTree yet super(ctx, inputRecDesc, partition, minFilterFieldIndexes, maxFilterFieldIndexes, indexHelperFactory, retainInput, retainMissing, missingWriterFactory, searchCallbackFactory, appendIndexFilter, null, -1, appendOpCallbackProceedResult, searchCallbackProceedResultFalseValue, searchCallbackProceedResultTrueValue); if (keyFields != null && keyFields.length > 0) { searchKey = new PermutingFrameTupleReference(); searchKey.setFieldPermutation(keyFields); } }
public LSMSecondaryUpsertOperatorNodePushable(IHyracksTaskContext ctx, int partition, IIndexDataflowHelperFactory indexHelperFactory, IModificationOperationCallbackFactory modCallbackFactory, ITupleFilterFactory tupleFilterFactory, int[] fieldPermutation, RecordDescriptor inputRecDesc, int upsertIndicatorFieldIndex, IBinaryBooleanInspectorFactory upsertIndicatorInspectorFactory, int[] prevValuePermutation) throws HyracksDataException { super(ctx, partition, indexHelperFactory, fieldPermutation, inputRecDesc, IndexOperation.UPSERT, modCallbackFactory, tupleFilterFactory); this.prevValueTuple.setFieldPermutation(prevValuePermutation); this.upsertIndicatorFieldIndex = upsertIndicatorFieldIndex; this.upsertIndicatorInspector = upsertIndicatorInspectorFactory.createBinaryBooleanInspector(ctx); this.numberOfFields = prevValuePermutation.length; // a primary key index only has primary keys, and thus these two permutations are the same this.isPrimaryKeyIndex = Arrays.equals(fieldPermutation, prevValuePermutation); }
public LSMSecondaryIndexBulkLoadNodePushable(IHyracksTaskContext ctx, int partition, RecordDescriptor inputRecDesc, IIndexDataflowHelperFactory primaryIndexHelperFactory, IIndexDataflowHelperFactory secondaryIndexHelperFactory, int[] fieldPermutation, int numTagFields, int numSecondaryKeys, int numPrimaryKeys, boolean hasBuddyBTree) throws HyracksDataException { super(ctx, partition, inputRecDesc, numTagFields, numSecondaryKeys, numPrimaryKeys, hasBuddyBTree); this.primaryIndexHelper = primaryIndexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.secondaryIndexHelper = secondaryIndexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.tuple = new PermutingFrameTupleReference(fieldPermutation); int[] sourcePermutation = new int[fieldPermutation.length - numTagFields]; for (int i = 0; i < sourcePermutation.length; i++) { sourcePermutation[i] = i + numTagFields; } sourceTuple = new PermutingTupleReference(sourcePermutation); int[] deletedKeyPermutation = new int[inputRecDesc.getFieldCount() - numTagFields - numSecondaryKeys]; for (int i = 0; i < deletedKeyPermutation.length; i++) { deletedKeyPermutation[i] = i + numTagFields + numSecondaryKeys; } deletedKeyTuple = new PermutingTupleReference(deletedKeyPermutation); }
public IndexInsertUpdateDeleteOperatorNodePushable(IHyracksTaskContext ctx, int partition, IIndexDataflowHelperFactory indexHelperFactory, int[] fieldPermutation, RecordDescriptor inputRecDesc, IndexOperation op, IModificationOperationCallbackFactory modOpCallbackFactory, ITupleFilterFactory tupleFilterFactory) throws HyracksDataException { this.ctx = ctx; this.indexHelper = indexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.modOpCallbackFactory = modOpCallbackFactory; this.tupleFilterFactory = tupleFilterFactory; this.inputRecDesc = inputRecDesc; this.op = op; this.tuple.setFieldPermutation(fieldPermutation); }
@Override protected void resetSearchPredicate(int tupleIndex) { if (lowKey != null) { lowKey.reset(accessor, tupleIndex); } if (highKey != null) { highKey.reset(accessor, tupleIndex); } if (minFilterKey != null) { minFilterKey.reset(accessor, tupleIndex); } if (maxFilterKey != null) { maxFilterKey.reset(accessor, tupleIndex); } }
public BTreeSearchOperatorNodePushable(IHyracksTaskContext ctx, int partition, RecordDescriptor inputRecDesc, int[] lowKeyFields, int[] highKeyFields, boolean lowKeyInclusive, boolean highKeyInclusive, int[] minFilterFieldIndexes, int[] maxFilterFieldIndexes, IIndexDataflowHelperFactory indexHelperFactory, boolean retainInput, boolean retainMissing, IMissingWriterFactory missingWriterFactory, ISearchOperationCallbackFactory searchCallbackFactory, boolean appendIndexFilter, ITupleFilterFactory tupleFilterFactory, long outputLimit, boolean appendOpCallbackProceedResult, byte[] searchCallbackProceedResultFalseValue, byte[] searchCallbackProceedResultTrueValue) throws HyracksDataException { super(ctx, inputRecDesc, partition, minFilterFieldIndexes, maxFilterFieldIndexes, indexHelperFactory, retainInput, retainMissing, missingWriterFactory, searchCallbackFactory, appendIndexFilter, tupleFilterFactory, outputLimit, appendOpCallbackProceedResult, searchCallbackProceedResultFalseValue, searchCallbackProceedResultTrueValue); this.lowKeyInclusive = lowKeyInclusive; this.highKeyInclusive = highKeyInclusive; if (lowKeyFields != null && lowKeyFields.length > 0) { lowKey = new PermutingFrameTupleReference(); lowKey.setFieldPermutation(lowKeyFields); } if (highKeyFields != null && highKeyFields.length > 0) { highKey = new PermutingFrameTupleReference(); highKey.setFieldPermutation(highKeyFields); } }
public IndexBulkLoadOperatorNodePushable(IIndexDataflowHelperFactory indexDataflowHelperFactory, IHyracksTaskContext ctx, int partition, int[] fieldPermutation, float fillFactor, boolean verifyInput, long numElementsHint, boolean checkIfEmptyIndex, RecordDescriptor recDesc) throws HyracksDataException { this.ctx = ctx; this.indexHelper = indexDataflowHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.fillFactor = fillFactor; this.verifyInput = verifyInput; this.numElementsHint = numElementsHint; this.checkIfEmptyIndex = checkIfEmptyIndex; this.recDesc = recDesc; tuple.setFieldPermutation(fieldPermutation); }
private void resetSearchPredicate(int tupleIndex) { key.reset(accessor, tupleIndex); searchPred.reset(key, key, true, true, keySearchCmp, keySearchCmp); }
this.maxFilterFieldIndexes = maxFilterFieldIndexes; if (minFilterFieldIndexes != null && minFilterFieldIndexes.length > 0) { minFilterKey = new PermutingFrameTupleReference(); minFilterKey.setFieldPermutation(minFilterFieldIndexes); maxFilterKey = new PermutingFrameTupleReference(); maxFilterKey.setFieldPermutation(maxFilterFieldIndexes);
@Override protected void resetSearchPredicate(int tupleIndex) { frameTuple.reset(accessor, tupleIndex); InvertedIndexSearchPredicate invIndexSearchPred = (InvertedIndexSearchPredicate) searchPred; invIndexSearchPred.setQueryTuple(frameTuple); invIndexSearchPred.setQueryFieldIndex(queryFieldIndex); invIndexSearchPred.setIsFullTextSearchQuery(isFullTextSearchQuery); if (minFilterKey != null) { minFilterKey.reset(accessor, tupleIndex); } if (maxFilterKey != null) { maxFilterKey.reset(accessor, tupleIndex); } }
super(ctx, partition, indexHelperFactory, fieldPermutation, inputRecDesc, IndexOperation.UPSERT, modCallbackFactory, null); this.key = new PermutingFrameTupleReference(); this.searchCallbackFactory = searchCallbackFactory; this.numOfPrimaryKeys = numOfPrimaryKeys; searchKeyPermutations[i] = fieldPermutation[i]; key.setFieldPermutation(searchKeyPermutations); hasMeta = (fieldPermutation.length > numOfPrimaryKeys + 1) && (filterFieldIndex < 0 || (filterFieldIndex >= 0 && (fieldPermutation.length > numOfPrimaryKeys + 2)));
@Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { accessor.reset(buffer); int tupleCount = accessor.getTupleCount(); for (int i = 0; i < tupleCount; i++) { tuple.reset(accessor, i); bulkLoader.add(tuple); } }
@Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { accessor.reset(buffer); int tupleCount = accessor.getTupleCount(); for (int i = 0; i < tupleCount; i++) { tuple.reset(accessor, i); bulkLoader.add(tuple); } FrameUtils.flushFrame(buffer, writer); }
@Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { accessor.reset(buffer); int tupleCount = accessor.getTupleCount(); for (int i = 0; i < tupleCount; i++) { try { // if both previous value and new value are null, then we skip tuple.reset(accessor, i); int componentPos = getComponentPos(tuple); if (componentPos != currentComponentPos) { loadNewComponent(componentPos); currentComponentPos = componentPos; } if (isAntiMatterTuple(tuple)) { addAntiMatterTuple(tuple); } else { addMatterTuple(tuple); } } catch (Exception e) { throw HyracksDataException.create(e); } } }
tuple.reset(accessor, i); switch (op) { case INSERT:
tuple.reset(accessor, i);
tuple.reset(accessor, i);