@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { SelectOperator select = (SelectOperator) op; IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider(); IScalarEvaluatorFactory cond = expressionRuntimeProvider.createEvaluatorFactory( select.getCondition().getValue(), context.getTypeEnvironment(op), inputSchemas, context); StreamSelectRuntimeFactory runtime = new StreamSelectRuntimeFactory(cond, null, context.getBinaryBooleanInspectorFactory(), select.getRetainMissing(), inputSchemas[0].findVariable(select.getMissingPlaceholderVariable()), context.getMissingWriterFactory()); runtime.setSourceLocation(select.getSourceLocation()); // contribute one Asterix framewriter RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context); builder.contributeMicroOperator(select, runtime, recDesc); // and contribute one edge from its child ILogicalOperator src = select.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, select, 0); }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider(); RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context); int[] primaryKeyFields = JobGenHelper.variablesToFieldIndexes(primaryKeyLogicalVars, inputSchemas[0]); //get dataset splits IPushRuntimeFactory runtime = dataset.getCommitRuntimeFactory(metadataProvider, primaryKeyFields, isSink); runtime.setSourceLocation(op.getSourceLocation()); builder.contributeMicroOperator(op, runtime, recDesc); ILogicalOperator src = op.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, op, 0); }
@SuppressWarnings("rawtypes") public static RecordDescriptor mkRecordDescriptor(IVariableTypeEnvironment env, IOperatorSchema opSchema, JobGenContext context) throws AlgebricksException { ISerializerDeserializer[] fields = new ISerializerDeserializer[opSchema.getSize()]; ITypeTraits[] typeTraits = new ITypeTraits[opSchema.getSize()]; ISerializerDeserializerProvider sdp = context.getSerializerDeserializerProvider(); ITypeTraitProvider ttp = context.getTypeTraitProvider(); int i = 0; for (LogicalVariable var : opSchema) { Object t = env.getVarType(var); if (t == null) { LOGGER.warn("No type for variable " + var); } fields[i] = sdp.getSerializerDeserializer(t); typeTraits[i] = ttp.getTypeTrait(t); i++; } return new RecordDescriptor(fields, typeTraits); }
@Override public AsterixTupleFilterFactory createTupleFilterFactory(IOperatorSchema[] inputSchemas, IVariableTypeEnvironment typeEnv, ILogicalExpression filterExpr, JobGenContext context) throws AlgebricksException { // No filtering condition. if (filterExpr == null) { return null; } IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider(); IScalarEvaluatorFactory filterEvalFactory = expressionRuntimeProvider.createEvaluatorFactory(filterExpr, typeEnv, inputSchemas, context); return new AsterixTupleFilterFactory(filterEvalFactory, context.getBinaryBooleanInspectorFactory()); }
@Override public Pair<IConnectorDescriptor, TargetConstraint> createConnectorDescriptor(IConnectorDescriptorRegistry spec, ILogicalOperator op, IOperatorSchema opSchema, JobGenContext context) throws AlgebricksException { int[] keys = new int[hashFields.size()]; IBinaryHashFunctionFactory[] hashFunctionFactories = new IBinaryHashFunctionFactory[hashFields.size()]; int i = 0; IBinaryHashFunctionFactoryProvider hashFunProvider = context.getBinaryHashFunctionFactoryProvider(); IVariableTypeEnvironment env = context.getTypeEnvironment(op); for (LogicalVariable v : hashFields) { keys[i] = opSchema.findVariable(v); hashFunctionFactories[i] = hashFunProvider.getBinaryHashFunctionFactory(env.getVarType(v)); ++i; } ITuplePartitionComputerFactory tpcf = new FieldHashPartitionComputerFactory(keys, hashFunctionFactories); IConnectorDescriptor conn = new MToNPartitioningConnectorDescriptor(spec, tpcf); return new Pair<>(conn, null); }
IPartialAggregationTypeComputer partialAggregationTypeComputer = context.getPartialAggregationTypeComputer(); List<Object> intermediateTypes = new ArrayList<Object>(); int n = aggOp.getExpressions().size(); ISerializedAggregateEvaluatorFactory[] aff = new ISerializedAggregateEvaluatorFactory[n]; int i = 0; IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider(); ILogicalOperator aggOpInput = aggOp.getInputs().get(0).getValue(); IOperatorSchema aggOpInputSchema = context.getSchema(aggOpInput); IOperatorSchema[] aggOpInputSchemas = new IOperatorSchema[] { aggOpInputSchema }; IVariableTypeEnvironment aggOpInputEnv = context.getTypeEnvironment(aggOpInput); IVariableTypeEnvironment outputEnv = context.getTypeEnvironment(op); for (Mutable<ILogicalExpression> exprRef : aggOp.getExpressions()) { AggregateFunctionCallExpression aggFun = (AggregateFunctionCallExpression) exprRef.getValue(); aggOpInputSchemas, context); intermediateTypes .add(partialAggregationTypeComputer.getType(aggFun, aggOpInputEnv, context.getMetadataProvider())); JobGenHelper.variablesToAscBinaryComparatorFactories(gbyCols, aggOpInputEnv, context); RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context); IBinaryHashFunctionFamily[] hashFunctionFactories = JobGenHelper.variablesToBinaryHashFunctionFamilies(gbyCols, aggOpInputEnv, context); int memoryBudgetInBytes = context.getFrameSize() * frameLimit; int groupByColumnsCount = gby.getGroupByList().size() + numFds; int hashTableSize = ExternalGroupOperatorDescriptor.calculateGroupByTableCardinality(memoryBudgetInBytes, groupByColumnsCount, context.getFrameSize());
AggregateOperator aggOp = (AggregateOperator) r0.getValue(); IPartialAggregationTypeComputer partialAggregationTypeComputer = context.getPartialAggregationTypeComputer(); List<Object> intermediateTypes = new ArrayList<Object>(); int n = aggOp.getExpressions().size(); IAggregateEvaluatorFactory[] aff = new IAggregateEvaluatorFactory[n]; int i = 0; IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider(); IVariableTypeEnvironment aggOpInputEnv = context.getTypeEnvironment(aggOp.getInputs().get(0).getValue()); IVariableTypeEnvironment outputEnv = context.getTypeEnvironment(op); for (Mutable<ILogicalExpression> exprRef : aggOp.getExpressions()) { AggregateFunctionCallExpression aggFun = (AggregateFunctionCallExpression) exprRef.getValue(); context); intermediateTypes .add(partialAggregationTypeComputer.getType(aggFun, aggOpInputEnv, context.getMetadataProvider())); IBinaryComparatorFactoryProvider bcfProvider = context.getBinaryComparatorFactoryProvider(); i = 0; for (LogicalVariable v : gbyCols) { JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context); JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), localInputSchemas[0], context); INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider(); if (nkcfProvider == null) { normalizedKeyFactory = null;
int outCol = opSchema.findVariable(unnest.getVariable()); ILogicalExpression unnestExpr = unnest.getExpressionRef().getValue(); IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider(); boolean exit = false; if (unnestExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) { context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas, context); int[] projectionList = JobGenHelper.projectAllVariables(opSchema); UnnestRuntimeFactory unnestRuntime = new UnnestRuntimeFactory(outCol, unnestingFactory, projectionList, unnest.getPositionWriter(), leftOuter, context.getMissingWriterFactory()); unnestRuntime.setSourceLocation(unnest.getSourceLocation()); RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context); builder.contributeMicroOperator(unnest, unnestRuntime, recDesc); ILogicalOperator src = unnest.getInputs().get(0).getValue();
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { AggregateOperator aggOp = (AggregateOperator) op; List<LogicalVariable> variables = aggOp.getVariables(); List<Mutable<ILogicalExpression>> expressions = aggOp.getExpressions(); int[] outColumns = new int[variables.size()]; for (int i = 0; i < outColumns.length; i++) { outColumns[i] = opSchema.findVariable(variables.get(i)); } IAggregateEvaluatorFactory[] aggFactories = new IAggregateEvaluatorFactory[expressions.size()]; IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider(); for (int i = 0; i < aggFactories.length; i++) { AggregateFunctionCallExpression aggFun = (AggregateFunctionCallExpression) expressions.get(i).getValue(); aggFactories[i] = expressionRuntimeProvider.createAggregateFunctionFactory(aggFun, context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas, context); } AggregateRuntimeFactory runtime = new AggregateRuntimeFactory(aggFactories); runtime.setSourceLocation(aggOp.getSourceLocation()); // contribute one Asterix framewriter RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context); builder.contributeMicroOperator(aggOp, runtime, recDesc); // and contribute one edge from its child ILogicalOperator src = aggOp.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, aggOp, 0); }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { LimitOperator limit = (LimitOperator) op; IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider(); IVariableTypeEnvironment env = context.getTypeEnvironment(op); IScalarEvaluatorFactory maxObjectsFact = expressionRuntimeProvider .createEvaluatorFactory(limit.getMaxObjects().getValue(), env, inputSchemas, context); ILogicalExpression offsetExpr = limit.getOffset().getValue(); IScalarEvaluatorFactory offsetFact = (offsetExpr == null) ? null : expressionRuntimeProvider.createEvaluatorFactory(offsetExpr, env, inputSchemas, context); RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context); StreamLimitRuntimeFactory runtime = new StreamLimitRuntimeFactory(maxObjectsFact, offsetFact, null, context.getBinaryIntegerInspectorFactory()); runtime.setSourceLocation(limit.getSourceLocation()); builder.contributeMicroOperator(limit, runtime, recDesc); // and contribute one edge from its child ILogicalOperator src = limit.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, limit, 0); }
int[] keysLeft = JobGenHelper.variablesToFieldIndexes(keysLeftBranch, inputSchemas[0]); int[] keysRight = JobGenHelper.variablesToFieldIndexes(keysRightBranch, inputSchemas[1]); IVariableTypeEnvironment env = context.getTypeEnvironment(op); IBinaryHashFunctionFactory[] hashFunFactories = JobGenHelper.variablesToBinaryHashFunctionFactories(keysLeftBranch, env, context); IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keysLeft.length]; int i = 0; IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider(); for (LogicalVariable v : keysLeftBranch) { Object t = env.getVarType(v); context.getPredicateEvaluatorFactoryProvider(); IPredicateEvaluatorFactory predEvaluatorFactory = (predEvaluatorFactoryProvider == null ? null : predEvaluatorFactoryProvider.getPredicateEvaluatorFactory(keysLeft, keysRight)); JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context); IOperatorDescriptorRegistry spec = builder.getJobSpec(); IOperatorDescriptor opDesc; IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[inputSchemas[1].getSize()]; for (int j = 0; j < nonMatchWriterFactories.length; j++) { nonMatchWriterFactories[j] = context.getMissingWriterFactory();
IVariableTypeEnvironment opTypeEnv = context.getTypeEnvironment(op); IBinaryComparatorFactory[] partitionComparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(partitionColumns, opTypeEnv, context); JobGenHelper.variablesToBinaryComparatorFactories(orderColumns, opTypeEnv, context); IVariableTypeEnvironment inputTypeEnv = context.getTypeEnvironment(op.getInputs().get(0).getValue()); IExpressionRuntimeProvider exprRuntimeProvider = context.getExpressionRuntimeProvider(); IBinaryComparatorFactoryProvider binaryComparatorFactoryProvider = context.getBinaryComparatorFactoryProvider(); frameEndExprEvals, frameExcludeExprEvalsAndComparators.first, winOp.getFrameExcludeNegationStartIdx(), frameExcludeExprEvalsAndComparators.second, frameOffsetExprEval, context.getBinaryIntegerInspectorFactory(), winOp.getFrameMaxObjects(), projectionColumnsExcludingSubplans, runningAggOutColumns, runningAggFactories, aggregatorOutputSchemaSize, nestedAggFactory);
@Override public Pair<IConnectorDescriptor, TargetConstraint> createConnectorDescriptor(IConnectorDescriptorRegistry spec, ILogicalOperator op, IOperatorSchema opSchema, JobGenContext context) throws AlgebricksException { int n = sortColumns.length; int[] sortFields = new int[n]; IBinaryComparatorFactory[] comps = new IBinaryComparatorFactory[n]; IBinaryHashFunctionFactory[] hashFuns = new IBinaryHashFunctionFactory[n]; IVariableTypeEnvironment env = context.getTypeEnvironment(op); INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider(); INormalizedKeyComputerFactory nkcf = null; for (int i = 0; i < n; i++) { sortFields[i] = opSchema.findVariable(sortColumns[i].getColumn()); Object type = env.getVarType(sortColumns[i].getColumn()); IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider(); comps[i] = bcfp.getBinaryComparatorFactory(type, sortColumns[i].getOrder() == OrderKind.ASC); IBinaryHashFunctionFactoryProvider bhffp = context.getBinaryHashFunctionFactoryProvider(); hashFuns[i] = bhffp.getBinaryHashFunctionFactory(type); if (i == 0 && nkcfProvider != null && type != null) { nkcf = nkcfProvider.getNormalizedKeyComputerFactory(type, sortColumns[i].getOrder() == OrderKind.ASC); } } ITuplePartitionComputerFactory tpcf = new FieldHashPartitionComputerFactory(sortFields, hashFuns); IConnectorDescriptor conn = new MToNPartitioningMergingConnectorDescriptor(spec, tpcf, sortFields, comps, nkcf); return new Pair<IConnectorDescriptor, TargetConstraint>(conn, TargetConstraint.ONE); }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { SubplanOperator subplan = (SubplanOperator) op; if (subplan.getNestedPlans().size() != 1) { throw new NotImplementedException("Subplan currently works only for one nested plan with one root."); } List<List<AlgebricksPipeline>> subplans = compileSubplansImpl(inputSchemas[0], subplan, opSchema, context); assert subplans.size() == 1; List<AlgebricksPipeline> np = subplans.get(0); RecordDescriptor inputRecordDesc = JobGenHelper.mkRecordDescriptor( context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context); IMissingWriterFactory[] missingWriterFactories = new IMissingWriterFactory[np.get(0).getOutputWidth()]; for (int i = 0; i < missingWriterFactories.length; i++) { missingWriterFactories[i] = context.getMissingWriterFactory(); } RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context); SubplanRuntimeFactory runtime = new SubplanRuntimeFactory(np, missingWriterFactories, inputRecordDesc, recDesc, null); runtime.setSourceLocation(subplan.getSourceLocation()); builder.contributeMicroOperator(subplan, runtime, recDesc); ILogicalOperator src = op.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, op, 0); }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context); MicroUnionAllRuntimeFactory runtime = new MicroUnionAllRuntimeFactory(op.getInputs().size()); runtime.setSourceLocation(op.getSourceLocation()); builder.contributeMicroOperator(op, runtime, recordDescriptor); super.contributeRuntimeOperator(builder, context, op, opSchema, inputSchemas, outerPlanSchema); } }
@Override public Pair<IConnectorDescriptor, TargetConstraint> createConnectorDescriptor(IConnectorDescriptorRegistry spec, ILogicalOperator op, IOperatorSchema opSchema, JobGenContext context) throws AlgebricksException { int n = partitioningFields.size(); int[] sortFields = new int[n]; IBinaryComparatorFactory[] comps = new IBinaryComparatorFactory[n]; INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider(); INormalizedKeyComputerFactory nkcf = null; IVariableTypeEnvironment env = context.getTypeEnvironment(op); int i = 0; for (OrderColumn oc : partitioningFields) { LogicalVariable var = oc.getColumn(); sortFields[i] = opSchema.findVariable(var); Object type = env.getVarType(var); OrderKind order = oc.getOrder(); if (i == 0 && nkcfProvider != null && type != null) { nkcf = nkcfProvider.getNormalizedKeyComputerFactory(type, order == OrderKind.ASC); } IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider(); comps[i] = bcfp.getBinaryComparatorFactory(type, oc.getOrder() == OrderKind.ASC); i++; } ITuplePartitionComputerFactory tpcf = new StaticFieldRangePartitionComputerFactory(sortFields, comps, rangeMap); IConnectorDescriptor conn = new MToNPartitioningMergingConnectorDescriptor(spec, tpcf, sortFields, comps, nkcf); return new Pair<IConnectorDescriptor, TargetConstraint>(conn, null); }
context.getMissingWriterFactory());
IScalarEvaluatorFactory fact = jobGenCtx.getExpressionRuntimeProvider().createEvaluatorFactory(expr, _emptyTypeEnv, _emptySchemas, jobGenCtx); jobGenCtx.getSerializerDeserializerProvider().getSerializerDeserializer(returnType); bbis.setByteBuffer(ByteBuffer.wrap(p.getByteArray(), p.getStartOffset(), p.getLength()), 0); IAObject o = (IAObject) serde.deserialize(dis);
int[] keysLeft = JobGenHelper.variablesToFieldIndexes(keysLeftBranch, inputSchemas[0]); int[] keysRight = JobGenHelper.variablesToFieldIndexes(keysRightBranch, inputSchemas[1]); IVariableTypeEnvironment env = context.getTypeEnvironment(op); IBinaryHashFunctionFamily[] hashFunFamilies = JobGenHelper.variablesToBinaryHashFunctionFamilies(keysLeftBranch, env, context); IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keysLeft.length]; int i = 0; IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider(); for (LogicalVariable v : keysLeftBranch) { Object t = env.getVarType(v); context.getPredicateEvaluatorFactoryProvider(); IPredicateEvaluatorFactory predEvaluatorFactory = predEvaluatorFactoryProvider == null ? null : predEvaluatorFactoryProvider.getPredicateEvaluatorFactory(keysLeft, keysRight); JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context); IOperatorDescriptorRegistry spec = builder.getJobSpec(); IOperatorDescriptor opDesc;
private IScalarEvaluatorFactory createConstantEvaluatorFactory(ConstantExpression expr, JobGenContext context) throws AlgebricksException { MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider(); return metadataProvider.getDataFormat().getConstantEvalFactory(expr.getValue()); }