private void copySourceLocation(ILogicalOperator src, AbstractLogicalOperator dest) { dest.setSourceLocation(src.getSourceLocation()); }
private IOperatorDescriptor findOpDescForAlgebraicOp(ILogicalOperator op) throws AlgebricksException { IOperatorDescriptor hOpDesc = hyracksOps.get(op); if (hOpDesc != null) { return hOpDesc; } Integer metaOpKey = algebraicOpBelongingToMetaAsterixOp.get(op); if (metaOpKey == null) { throw AlgebricksException.create(DESCRIPTOR_GENERATION_ERROR, op.getSourceLocation(), op.getOperatorTag()); } return metaAsterixOps.get(metaOpKey); }
@Override public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException { if (checked) { return false; } ILogicalOperator op = opRef.getValue(); if (InsertUpsertCheckUtil.check(op)) { throw new CompilationException(ErrorCode.COMPILATION_INVALID_RETURNING_EXPRESSION, op.getSourceLocation()); } checked = true; return false; } }
public List<LogicalVariable> getIxJoinOuterAdditionalDataSourceVariables(int idx) throws AlgebricksException { if (getIxJoinOuterAdditionalDataSourceRefs() != null && getIxJoinOuterAdditionalDataSourceRefs().size() > idx) { switch (getIxJoinOuterAdditionalDataSourceTypes().get(idx)) { case DATASOURCE_SCAN: case EXTERNAL_SCAN: case PRIMARY_INDEX_LOOKUP: AbstractScanOperator scanOp = (AbstractScanOperator) getIxJoinOuterAdditionalDataSourceRefs().get(idx).getValue(); return scanOp.getVariables(); case INDEXONLY_PLAN_SECONDARY_INDEX_LOOKUP: List<LogicalVariable> PKVars = new ArrayList<>(); getPrimaryKeyVars(ixJoinOuterAdditionalDataSourceRefs.get(idx), PKVars); return PKVars; case COLLECTION_SCAN: return new ArrayList<>(); case NO_DATASOURCE: default: throw new CompilationException(ErrorCode.SUBTREE_HAS_NO_ADDTIONAL_DATA_SOURCE, root.getSourceLocation()); } } else { return null; } }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { IOperatorDescriptorRegistry spec = builder.getJobSpec(); SinkOperatorDescriptor opDesc = new SinkOperatorDescriptor(spec, op.getInputs().size()); opDesc.setSourceLocation(op.getSourceLocation()); contributeOpDesc(builder, (AbstractLogicalOperator) op, opDesc); for (int i = 0; i < op.getInputs().size(); i++) { builder.contributeGraphEdge(op.getInputs().get(i).getValue(), 0, op, i); } }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { EmptyTupleSourceRuntimeFactory runtime = new EmptyTupleSourceRuntimeFactory(); runtime.setSourceLocation(op.getSourceLocation()); RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {}); builder.contributeMicroOperator(op, runtime, recDesc); }
private Pair<ILogicalOperator, LogicalVariable> produceSelectPlan(boolean isSubquery, Mutable<ILogicalOperator> returnOpRef, LogicalVariable resVar) { if (isSubquery) { return aggListifyForSubquery(resVar, returnOpRef, false); } else { ProjectOperator pr = new ProjectOperator(resVar); pr.getInputs().add(returnOpRef); pr.setSourceLocation(returnOpRef.getValue().getSourceLocation()); return new Pair<>(pr, resVar); } }
public List<LogicalVariable> getDataSourceVariables() throws AlgebricksException { switch (getDataSourceType()) { case DATASOURCE_SCAN: case EXTERNAL_SCAN: case PRIMARY_INDEX_LOOKUP: AbstractScanOperator scanOp = (AbstractScanOperator) getDataSourceRef().getValue(); return scanOp.getVariables(); case INDEXONLY_PLAN_SECONDARY_INDEX_LOOKUP: // This data-source doesn't have record variables. List<LogicalVariable> pkVars = new ArrayList<>(); getPrimaryKeyVars(dataSourceRef, pkVars); return pkVars; case COLLECTION_SCAN: return new ArrayList<>(); case NO_DATASOURCE: default: throw new CompilationException(ErrorCode.SUBTREE_HAS_NO_DATA_SOURCE, root.getSourceLocation()); } }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context); MicroUnionAllRuntimeFactory runtime = new MicroUnionAllRuntimeFactory(op.getInputs().size()); runtime.setSourceLocation(op.getSourceLocation()); builder.contributeMicroOperator(op, runtime, recordDescriptor); super.contributeRuntimeOperator(builder, context, op, opSchema, inputSchemas, outerPlanSchema); } }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { propagatedSchema.addAllVariables(outerPlanSchema); NestedTupleSourceRuntimeFactory runtime = new NestedTupleSourceRuntimeFactory(); runtime.setSourceLocation(op.getSourceLocation()); RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context); builder.contributeMicroOperator(op, runtime, recDesc); }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context); UnionAllOperatorDescriptor opDesc = new UnionAllOperatorDescriptor(builder.getJobSpec(), op.getInputs().size(), recordDescriptor); opDesc.setSourceLocation(op.getSourceLocation()); contributeOpDesc(builder, (AbstractLogicalOperator) op, opDesc); super.contributeRuntimeOperator(builder, context, op, opSchema, inputSchemas, outerPlanSchema); } }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context); MaterializingOperatorDescriptor materializationOpDesc = new MaterializingOperatorDescriptor(builder.getJobSpec(), recDescriptor, isSingleActivity); materializationOpDesc.setSourceLocation(op.getSourceLocation()); contributeOpDesc(builder, (AbstractLogicalOperator) op, materializationOpDesc); ILogicalOperator src = op.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, op, 0); }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider(); RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context); int[] primaryKeyFields = JobGenHelper.variablesToFieldIndexes(primaryKeyLogicalVars, inputSchemas[0]); //get dataset splits IPushRuntimeFactory runtime = dataset.getCommitRuntimeFactory(metadataProvider, primaryKeyFields, isSink); runtime.setSourceLocation(op.getSourceLocation()); builder.contributeMicroOperator(op, runtime, recDesc); ILogicalOperator src = op.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, op, 0); }
public static ILogicalOperator deepCopy(ILogicalOperator op) throws AlgebricksException { OperatorDeepCopyVisitor visitor = new OperatorDeepCopyVisitor(); AbstractLogicalOperator copiedOperator = (AbstractLogicalOperator) op.accept(visitor, null); copiedOperator.setSourceLocation(op.getSourceLocation()); copiedOperator.setExecutionMode(op.getExecutionMode()); copiedOperator.getAnnotations().putAll(op.getAnnotations()); copiedOperator.setSchema(op.getSchema()); AbstractLogicalOperator sourceOp = (AbstractLogicalOperator) op; copiedOperator.setPhysicalOperator(sourceOp.getPhysicalOperator()); return copiedOperator; }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { IOperatorDescriptorRegistry spec = builder.getJobSpec(); int[] keysAndDecs = getKeysAndDecs(inputSchemas[0]); IBinaryComparatorFactory[] comparatorFactories = JobGenHelper .variablesToAscBinaryComparatorFactories(columnList, context.getTypeEnvironment(op), context); IAggregateEvaluatorFactory[] aggFactories = new IAggregateEvaluatorFactory[] {}; AbstractAggregatorDescriptorFactory aggregatorFactory = new SimpleAlgebricksAccumulatingAggregatorFactory(aggFactories, keysAndDecs); aggregatorFactory.setSourceLocation(op.getSourceLocation()); RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context); /* make fd columns part of the key but the comparator only compares the distinct key columns */ PreclusteredGroupOperatorDescriptor opDesc = new PreclusteredGroupOperatorDescriptor(spec, keysAndDecs, comparatorFactories, aggregatorFactory, recordDescriptor); opDesc.setSourceLocation(op.getSourceLocation()); contributeOpDesc(builder, (AbstractLogicalOperator) op, opDesc); ILogicalOperator src = op.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, op, 0); } }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { int[] keysAndDecs = getKeysAndDecs(inputSchemas[0]); IBinaryComparatorFactory[] comparatorFactories = JobGenHelper .variablesToAscBinaryComparatorFactories(columnList, context.getTypeEnvironment(op), context); IAggregateEvaluatorFactory[] aggFactories = new IAggregateEvaluatorFactory[] {}; AbstractAggregatorDescriptorFactory aggregatorFactory = new SimpleAlgebricksAccumulatingAggregatorFactory(aggFactories, keysAndDecs); aggregatorFactory.setSourceLocation(op.getSourceLocation()); RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context); RecordDescriptor inputRecordDesc = JobGenHelper.mkRecordDescriptor( context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context); /* make fd columns part of the key but the comparator only compares the distinct key columns */ MicroPreClusteredGroupRuntimeFactory runtime = new MicroPreClusteredGroupRuntimeFactory(keysAndDecs, comparatorFactories, aggregatorFactory, inputRecordDesc, recordDescriptor, null); runtime.setSourceLocation(op.getSourceLocation()); builder.contributeMicroOperator(op, runtime, recordDescriptor); ILogicalOperator src = op.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, op, 0); } }
protected Pair<ILogicalOperator, LogicalVariable> aggListifyForSubquery(LogicalVariable var, Mutable<ILogicalOperator> opRef, boolean bProject) { SourceLocation sourceLoc = opRef.getValue().getSourceLocation(); AggregateFunctionCallExpression funAgg = BuiltinFunctions.makeAggregateFunctionExpression(BuiltinFunctions.LISTIFY, new ArrayList<>()); funAgg.getArguments().add(new MutableObject<>(new VariableReferenceExpression(var))); funAgg.setSourceLocation(sourceLoc); LogicalVariable varListified = context.newSubplanOutputVar(); AggregateOperator agg = new AggregateOperator(mkSingletonArrayList(varListified), mkSingletonArrayList(new MutableObject<>(funAgg))); agg.getInputs().add(opRef); agg.setSourceLocation(sourceLoc); ILogicalOperator res; if (bProject) { ProjectOperator pr = new ProjectOperator(varListified); pr.getInputs().add(new MutableObject<>(agg)); pr.setSourceLocation(sourceLoc); res = pr; } else { res = agg; } return new Pair<>(res, varListified); }
@Override public Pair<IConnectorDescriptor, TargetConstraint> createConnectorDescriptor(IConnectorDescriptorRegistry spec, ILogicalOperator op, IOperatorSchema opSchema, JobGenContext context) throws AlgebricksException { int n = partitioningFields.size(); int[] sortFields = new int[n]; IBinaryComparatorFactory[] comps = new IBinaryComparatorFactory[n]; IVariableTypeEnvironment env = context.getTypeEnvironment(op); int i = 0; for (OrderColumn oc : partitioningFields) { LogicalVariable var = oc.getColumn(); sortFields[i] = opSchema.findVariable(var); Object type = env.getVarType(var); IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider(); comps[i] = bcfp.getBinaryComparatorFactory(type, oc.getOrder() == OrderKind.ASC); i++; } FieldRangePartitionComputerFactory partitionerFactory; if (rangeMapIsComputedAtRunTime) { partitionerFactory = new DynamicFieldRangePartitionComputerFactory(sortFields, comps, rangeMapKeyInContext, op.getSourceLocation()); } else { partitionerFactory = new StaticFieldRangePartitionComputerFactory(sortFields, comps, rangeMap); } IConnectorDescriptor conn = new MToNPartitioningConnectorDescriptor(spec, partitionerFactory); return new Pair<>(conn, null); }
private Mutable<ILogicalOperator> enforceOrderProperties(List<LocalOrderProperty> oList, Mutable<ILogicalOperator> topOp, boolean isMicroOp, IOptimizationContext context) throws AlgebricksException { SourceLocation sourceLoc = topOp.getValue().getSourceLocation(); List<Pair<IOrder, Mutable<ILogicalExpression>>> oe = new LinkedList<>(); for (LocalOrderProperty orderProperty : oList) { for (OrderColumn oc : orderProperty.getOrderColumns()) { IOrder ordType = (oc.getOrder() == OrderKind.ASC) ? OrderOperator.ASC_ORDER : OrderOperator.DESC_ORDER; VariableReferenceExpression ocColumnRef = new VariableReferenceExpression(oc.getColumn()); ocColumnRef.setSourceLocation(sourceLoc); Pair<IOrder, Mutable<ILogicalExpression>> pair = new Pair<>(ordType, new MutableObject<ILogicalExpression>(ocColumnRef)); oe.add(pair); } } OrderOperator oo = new OrderOperator(oe); oo.setSourceLocation(sourceLoc); oo.setExecutionMode(AbstractLogicalOperator.ExecutionMode.LOCAL); if (isMicroOp) { oo.setPhysicalOperator(new InMemoryStableSortPOperator()); } else { oo.setPhysicalOperator(new StableSortPOperator(physicalOptimizationConfig.getMaxFramesExternalSort())); } oo.getInputs().add(topOp); context.computeAndSetTypeEnvironmentForOperator(oo); if (AlgebricksConfig.ALGEBRICKS_LOGGER.isTraceEnabled()) { AlgebricksConfig.ALGEBRICKS_LOGGER.trace(">>>> Added sort enforcer " + oo.getPhysicalOperator() + ".\n"); } return new MutableObject<ILogicalOperator>(oo); }
new DistinctOperator(mkSingletonArrayList(new MutableObject<>(returnVarRef))); distinctOperator.getInputs().add(new MutableObject<>(returnOperator)); distinctOperator.setSourceLocation(returnOperator.getSourceLocation()); return new Pair<>(distinctOperator, returnVar); } else {