@Override public void setInputRecordDescriptor(int index, RecordDescriptor recordDescriptor) { pushRuntime.setInputRecordDescriptor(index, recordDescriptor); }
@Override public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) { pushRuntime.setOutputFrameWriter(index, writer, recordDesc); }
public static IFrameWriter assemblePipeline(AlgebricksPipeline subplan, IFrameWriter writer, IHyracksTaskContext ctx) throws HyracksDataException { // should enforce protocol boolean enforce = ctx.getJobFlags().contains(JobFlag.ENFORCE_CONTRACT); // plug the operators IFrameWriter start = writer; IPushRuntimeFactory[] runtimeFactories = subplan.getRuntimeFactories(); RecordDescriptor[] recordDescriptors = subplan.getRecordDescriptors(); for (int i = runtimeFactories.length - 1; i >= 0; i--) { IPushRuntime newRuntime = runtimeFactories[i].createPushRuntime(ctx)[0]; newRuntime = enforce ? EnforcePushRuntime.enforce(newRuntime) : newRuntime; start = enforce ? EnforceFrameWriter.enforce(start) : start; newRuntime.setOutputFrameWriter(0, start, recordDescriptors[i]); if (i > 0) { newRuntime.setInputRecordDescriptor(0, recordDescriptors[i - 1]); } else { // the nts has the same input and output rec. desc. newRuntime.setInputRecordDescriptor(0, recordDescriptors[0]); } start = newRuntime; } return start; } }
private void searchAndAssertCount(TestNodeController nc, IHyracksTaskContext ctx, Dataset dataset, StorageComponentProvider storageManager, int numOfRecords) throws HyracksDataException, AlgebricksException { nc.newJobId(); TestTupleCounterFrameWriter countOp = StorageTestUtils.create(nc.getSearchOutputDesc(KEY_TYPES, RECORD_TYPE, META_TYPE), Collections.emptyList(), Collections.emptyList(), false); IPushRuntime emptyTupleOp = nc.getFullScanPipeline(countOp, ctx, dataset, KEY_TYPES, RECORD_TYPE, META_TYPE, new NoMergePolicyFactory(), null, null, KEY_INDEXES, KEY_INDICATORS_LIST, storageManager); emptyTupleOp.open(); emptyTupleOp.close(); Assert.assertEquals(numOfRecords, countOp.getCount()); } }
newRuntimes[j].setOutputFrameWriter(0, start, pipelineOutputRecordDescriptor); newRuntimes[j].setOutputFrameWriter(0, start, recordDescriptors[i]); newRuntime.setInputRecordDescriptor(0, recordDescriptors[i - 1]); } else if (inputArity > 0) { newRuntime.setInputRecordDescriptor(0, pipelineInputRecordDescriptor);
public static void searchAndAssertCount(TestNodeController nc, int partition, Dataset dataset, StorageComponentProvider storageManager, int numOfRecords) throws HyracksDataException, AlgebricksException { JobId jobId = nc.newJobId(); IHyracksTaskContext ctx = nc.createTestContext(jobId, partition, false); TestTupleCounterFrameWriter countOp = create(nc.getSearchOutputDesc(KEY_TYPES, RECORD_TYPE, META_TYPE), Collections.emptyList(), Collections.emptyList(), false); IPushRuntime emptyTupleOp = nc.getFullScanPipeline(countOp, ctx, dataset, KEY_TYPES, RECORD_TYPE, META_TYPE, new NoMergePolicyFactory(), null, null, KEY_INDEXES, KEY_INDICATORS_LIST, storageManager); emptyTupleOp.open(); emptyTupleOp.close(); Assert.assertEquals(numOfRecords, countOp.getCount()); }
.createPushRuntime(ctx)[0]; insertOp.setOutputFrameWriter(0, assignOp, primaryIndexInfo.rDesc); assignOp.setInputRecordDescriptor(0, primaryIndexInfo.rDesc); SecondaryIndexInfo secondaryIndexInfo = new SecondaryIndexInfo(primaryIndexInfo, secondaryIndex); IIndexDataflowHelperFactory secondaryIndexHelperFactory = new IndexDataflowHelperFactory( secondaryIndexInfo.insertFieldsPermutations, secondaryIndexInfo.rDesc, op, false, secondaryIndexHelperFactory, secondaryModCallbackFactory, null, null); assignOp.setOutputFrameWriter(0, secondaryInsertOp, secondaryIndexInfo.rDesc); commitOp.setInputRecordDescriptor(0, secondaryIndexInfo.rDesc); return Pair.of(insertOp, commitOp); } else { .createPushRuntime(ctx)[0]; insertOp.setOutputFrameWriter(0, commitOp, primaryIndexInfo.rDesc); commitOp.setInputRecordDescriptor(0, primaryIndexInfo.rDesc); return Pair.of(insertOp, commitOp);
throw new IllegalStateException("Invalid pipeline"); outputPushRuntime.setInputRecordDescriptor(0, pipelineLastRecordDescriptor); outputWriter = outputPushRuntime; outputRecordDescriptor = pipelineLastRecordDescriptor;
public IPushRuntime getFullScanPipeline(IFrameWriter countOp, IHyracksTaskContext ctx, Dataset dataset, IAType[] primaryKeyTypes, ARecordType recordType, ARecordType metaType, NoMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyProperties, int[] filterFields, int[] primaryKeyIndexes, List<Integer> primaryKeyIndicators, StorageComponentProvider storageComponentProvider) throws HyracksDataException, AlgebricksException { IPushRuntime emptyTupleOp = new EmptyTupleSourceRuntimeFactory().createPushRuntime(ctx)[0]; JobSpecification spec = new JobSpecification(); PrimaryIndexInfo primaryIndexInfo = new PrimaryIndexInfo(dataset, primaryKeyTypes, recordType, metaType, mergePolicyFactory, mergePolicyProperties, filterFields, primaryKeyIndexes, primaryKeyIndicators); IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory( storageComponentProvider.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); BTreeSearchOperatorDescriptor searchOpDesc = new BTreeSearchOperatorDescriptor(spec, primaryIndexInfo.rDesc, null, null, true, true, indexDataflowHelperFactory, false, false, null, NoOpOperationCallbackFactory.INSTANCE, filterFields, filterFields, false); BTreeSearchOperatorNodePushable searchOp = searchOpDesc.createPushRuntime(ctx, primaryIndexInfo.getSearchRecordDescriptorProvider(), ctx.getTaskAttemptId().getTaskId().getPartition(), 1); emptyTupleOp.setOutputFrameWriter(0, searchOp, primaryIndexInfo.getSearchRecordDescriptorProvider().getInputRecordDescriptor(null, 0)); searchOp.setOutputFrameWriter(0, countOp, primaryIndexInfo.rDesc); return emptyTupleOp; }