private IScalarEvaluatorFactory createConstantEvaluatorFactory(ConstantExpression expr, JobGenContext context) throws AlgebricksException { MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider(); return metadataProvider.getDataFormat().getConstantEvalFactory(expr.getValue()); }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider(); RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context); int[] primaryKeyFields = JobGenHelper.variablesToFieldIndexes(primaryKeyLogicalVars, inputSchemas[0]); //get dataset splits IPushRuntimeFactory runtime = dataset.getCommitRuntimeFactory(metadataProvider, primaryKeyFields, isSink); runtime.setSourceLocation(op.getSourceLocation()); builder.contributeMicroOperator(op, runtime, recDesc); ILogicalOperator src = op.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, op, 0); }
@SuppressWarnings({ "rawtypes", "unchecked" }) @Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { WriteResultOperator writeResultOp = (WriteResultOperator) op; IMetadataProvider mp = context.getMetadataProvider(); JobSpecification spec = builder.getJobSpec(); Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getWriteResultRuntime( dataSource, propagatedSchema, keys, payload, additionalFilteringKeys, context, spec); IOperatorDescriptor opDesc = runtimeAndConstraints.first; opDesc.setSourceLocation(writeResultOp.getSourceLocation()); builder.contributeHyracksOperator(writeResultOp, opDesc); builder.contributeAlgebricksPartitionConstraint(opDesc, runtimeAndConstraints.second); ILogicalOperator src = writeResultOp.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, writeResultOp, 0); }
throws AlgebricksException { DataSourceScanOperator scan = (DataSourceScanOperator) op; IMetadataProvider mp = context.getMetadataProvider(); IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op); List<LogicalVariable> vars = scan.getVariables(); tupleFilterFactory = context.getMetadataProvider().createTupleFilterFactory( new IOperatorSchema[] { opSchema }, typeEnv, scan.getSelectCondition().getValue(), context);
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { IndexInsertDeleteUpsertOperator indexInsertDeleteOp = (IndexInsertDeleteUpsertOperator) op; assert indexInsertDeleteOp.getOperation() == Kind.INSERT; assert indexInsertDeleteOp.isBulkload(); IMetadataProvider mp = context.getMetadataProvider(); IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op); JobSpecification spec = builder.getJobSpec(); RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor( context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context); Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getIndexInsertRuntime(dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys, secondaryKeys, additionalFilteringKeys, filterExpr, inputDesc, context, spec, true); IOperatorDescriptor opDesc = runtimeAndConstraints.first; opDesc.setSourceLocation(indexInsertDeleteOp.getSourceLocation()); builder.contributeHyracksOperator(indexInsertDeleteOp, opDesc); builder.contributeAlgebricksPartitionConstraint(opDesc, runtimeAndConstraints.second); ILogicalOperator src = indexInsertDeleteOp.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, indexInsertDeleteOp, 0); }
@SuppressWarnings({ "rawtypes", "unchecked" }) @Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { InsertDeleteUpsertOperator insertDeleteOp = (InsertDeleteUpsertOperator) op; assert insertDeleteOp.getOperation() == Kind.INSERT; assert insertDeleteOp.isBulkload(); IMetadataProvider mp = context.getMetadataProvider(); IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op); JobSpecification spec = builder.getJobSpec(); RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor( context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context); Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getInsertRuntime(dataSource, propagatedSchema, typeEnv, primaryKeys, payload, additionalFilteringKeys, additionalNonFilterVars, inputDesc, context, spec, true); IOperatorDescriptor opDesc = runtimeAndConstraints.first; opDesc.setSourceLocation(insertDeleteOp.getSourceLocation()); builder.contributeHyracksOperator(insertDeleteOp, opDesc); builder.contributeAlgebricksPartitionConstraint(opDesc, runtimeAndConstraints.second); ILogicalOperator src = insertDeleteOp.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, insertDeleteOp, 0); }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { UnnestMapOperator unnestMap = (UnnestMapOperator) op; ILogicalExpression expr = unnestMap.getExpressionRef().getValue(); if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) { throw new IllegalStateException(); } AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr; FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier(); if (!funcIdent.equals(BuiltinFunctions.EXTERNAL_LOOKUP)) { return; } int[] ridIndexes = getKeyIndexes(ridVarList, inputSchemas); IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op); MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider(); Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> externalLoopup = metadataProvider.buildExternalDataLookupRuntime(builder.getJobSpec(), dataset, ridIndexes, retainInput, typeEnv, opSchema, context, metadataProvider, retainMissing); IOperatorDescriptor opDesc = externalLoopup.first; opDesc.setSourceLocation(unnestMap.getSourceLocation()); builder.contributeHyracksOperator(unnestMap, opDesc); builder.contributeAlgebricksPartitionConstraint(opDesc, externalLoopup.second); ILogicalOperator srcExchange = unnestMap.getInputs().get(0).getValue(); builder.contributeGraphEdge(srcExchange, 0, unnestMap, 0); }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { TokenizeOperator tokenizeOp = (TokenizeOperator) op; if (tokenizeOp.getOperation() != Kind.INSERT || !tokenizeOp.isBulkload()) { throw new AlgebricksException("Tokenize Operator only works when bulk-loading data."); } IMetadataProvider mp = context.getMetadataProvider(); IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op); JobSpecification spec = builder.getJobSpec(); RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor( context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context); Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getTokenizerRuntime(dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys, secondaryKeys, null, inputDesc, context, spec, true); IOperatorDescriptor opDesc = runtimeAndConstraints.first; opDesc.setSourceLocation(tokenizeOp.getSourceLocation()); builder.contributeHyracksOperator(tokenizeOp, opDesc); builder.contributeAlgebricksPartitionConstraint(opDesc, runtimeAndConstraints.second); ILogicalOperator src = tokenizeOp.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, tokenizeOp, 0); }
throws AlgebricksException { DistributeResultOperator resultOp = (DistributeResultOperator) op; IMetadataProvider mp = context.getMetadataProvider();
throws AlgebricksException { InsertDeleteUpsertOperator insertDeleteOp = (InsertDeleteUpsertOperator) op; IMetadataProvider mp = context.getMetadataProvider(); IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op); JobSpecification spec = builder.getJobSpec();
JobGenHelper.mkPrinterFactories(inputSchemas[0], context.getTypeEnvironment(op), context, columns); IMetadataProvider<?, ?> mp = context.getMetadataProvider();
throws AlgebricksException { IndexInsertDeleteUpsertOperator insertDeleteUpsertOp = (IndexInsertDeleteUpsertOperator) op; IMetadataProvider mp = context.getMetadataProvider();
aggOpInputSchemas, context); intermediateTypes .add(partialAggregationTypeComputer.getType(aggFun, aggOpInputEnv, context.getMetadataProvider()));
context); intermediateTypes .add(partialAggregationTypeComputer.getType(aggFun, aggOpInputEnv, context.getMetadataProvider()));
int[] maxFilterFieldIndexes = getKeyIndexes(unnestMap.getMaxFilterVars(), inputSchemas); MetadataProvider mp = (MetadataProvider) context.getMetadataProvider(); Dataset dataset = mp.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName()); IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(unnestMap);
jobGenParams.readFromFuncArgs(unnestFuncExpr.getArguments()); MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider(); Dataset dataset = metadataProvider.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName()); int[] keyIndexes = getKeyIndexes(jobGenParams.getKeyVarList(), inputSchemas);
boolean propagateFilter = unnestMap.propagateIndexFilter(); MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider(); Dataset dataset = metadataProvider.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName()); IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op);