/** * Since each pipeline only produces one tuple, this method is only * called by the close method of the pipelines. */ @Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { int tIndex = 0; int w = subplans[inputIdx].getOutputWidth(); IFrameTupleAccessor accessor = tAccess[inputIdx]; accessor.reset(buffer); for (int f = 0; f < w; f++) { tb.addField(accessor, tIndex, f); } }
public AggregatorOutput(AlgebricksPipeline[] subplans, int numPropagatedFields) { this.subplans = subplans; int totalAggFields = 0; this.inputRecDesc = new RecordDescriptor[subplans.length]; for (int i = 0; i < subplans.length; i++) { RecordDescriptor[] rd = subplans[i].getRecordDescriptors(); this.inputRecDesc[i] = rd[rd.length - 1]; totalAggFields += subplans[i].getOutputWidth(); } tb = new ArrayTupleBuilder(numPropagatedFields + totalAggFields); this.tAccess = new FrameTupleAccessor[inputRecDesc.length]; for (int i = 0; i < inputRecDesc.length; i++) { tAccess[i] = new FrameTupleAccessor(inputRecDesc[i]); } }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { SubplanOperator subplan = (SubplanOperator) op; if (subplan.getNestedPlans().size() != 1) { throw new NotImplementedException("Subplan currently works only for one nested plan with one root."); } List<List<AlgebricksPipeline>> subplans = compileSubplansImpl(inputSchemas[0], subplan, opSchema, context); assert subplans.size() == 1; List<AlgebricksPipeline> np = subplans.get(0); RecordDescriptor inputRecordDesc = JobGenHelper.mkRecordDescriptor( context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context); IMissingWriterFactory[] missingWriterFactories = new IMissingWriterFactory[np.get(0).getOutputWidth()]; for (int i = 0; i < missingWriterFactories.length; i++) { missingWriterFactories[i] = context.getMissingWriterFactory(); } RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context); SubplanRuntimeFactory runtime = new SubplanRuntimeFactory(np, missingWriterFactories, inputRecordDesc, recDesc, null); runtime.setSourceLocation(subplan.getSourceLocation()); builder.contributeMicroOperator(subplan, runtime, recDesc); ILogicalOperator src = op.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, op, 0); }
public RunningAggregatorOutput(IHyracksTaskContext ctx, AlgebricksPipeline[] subplans, int numPropagatedFields, IFrameWriter outputWriter) throws HyracksDataException { this.subplans = subplans; this.outputWriter = outputWriter; // this.keyFieldIndexes = keyFieldIndexes; int totalAggFields = 0; this.inputRecDesc = new RecordDescriptor[subplans.length]; for (int i = 0; i < subplans.length; i++) { RecordDescriptor[] rd = subplans[i].getRecordDescriptors(); this.inputRecDesc[i] = rd[rd.length - 1]; totalAggFields += subplans[i].getOutputWidth(); } tb = new ArrayTupleBuilder(numPropagatedFields + totalAggFields); gbyTb = new ArrayTupleBuilder(numPropagatedFields); this.tAccess = new FrameTupleAccessor[inputRecDesc.length]; for (int i = 0; i < inputRecDesc.length; i++) { tAccess[i] = new FrameTupleAccessor(inputRecDesc[i]); } this.outputAppender = new FrameTupleAppender(new VSizeFrame(ctx)); }
@Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { int w = subplans[inputIdx].getOutputWidth(); IFrameTupleAccessor accessor = tAccess[inputIdx]; accessor.reset(buffer); for (int tIndex = 0; tIndex < accessor.getTupleCount(); tIndex++) { tb.reset(); TupleUtils.addFields(gbyTb, tb); for (int f = 0; f < w; f++) { tb.addField(accessor, tIndex, f); } FrameUtils.appendToWriter(outputWriter, outputAppender, tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize()); } }