private void fillPKVars(DataSourceScanOperator dataScan, List<LogicalVariable> pkVars) { pkVars.clear(); DatasetDataSource datasetDataSource = (DatasetDataSource) dataScan.getDataSource(); pkVars.clear(); if (datasetDataSource.getDataset().getDatasetDetails() instanceof InternalDatasetDetails) { int numPKs = datasetDataSource.getDataset().getPrimaryKeys().size(); for (int i = 0; i < numPKs; i++) { pkVars.add(dataScan.getVariables().get(i)); } } }
DatasetDataSource prevAqlDataSource = (DatasetDataSource) dataScans.get(i - 1).getDataSource(); DatasetDataSource currAqlDataSource = (DatasetDataSource) dataScans.get(i).getDataSource(); if (!prevAqlDataSource.getDataset().equals(currAqlDataSource.getDataset())) { return -1;
private ILogicalOperator translateDelete(DatasetDataSource targetDatasource, Mutable<ILogicalExpression> varRef, List<Mutable<ILogicalExpression>> varRefsForLoading, List<Mutable<ILogicalExpression>> additionalFilteringExpressions, ILogicalOperator assign, ICompiledDmlStatement stmt) throws AlgebricksException { SourceLocation sourceLoc = stmt.getSourceLocation(); if (targetDatasource.getDataset().hasMetaPart()) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, targetDatasource.getDataset().getDatasetName() + ": delete from dataset is not supported on Datasets with Meta records"); } InsertDeleteUpsertOperator deleteOp = new InsertDeleteUpsertOperator(targetDatasource, varRef, varRefsForLoading, InsertDeleteUpsertOperator.Kind.DELETE, false); deleteOp.setAdditionalFilteringExpressions(additionalFilteringExpressions); deleteOp.getInputs().add(new MutableObject<>(assign)); deleteOp.setSourceLocation(sourceLoc); DelegateOperator leafOperator = new DelegateOperator(new CommitOperator(true)); leafOperator.getInputs().add(new MutableObject<>(deleteOp)); leafOperator.setSourceLocation(sourceLoc); return leafOperator; }
@Override public IDataSourceIndex<String, DataSourceId> findDataSourceIndex(String indexId, DataSourceId dataSourceId) throws AlgebricksException { DataSource source = findDataSource(dataSourceId); Dataset dataset = ((DatasetDataSource) source).getDataset(); Index secondaryIndex = getIndex(dataset.getDataverseName(), dataset.getDatasetName(), indexId); return (secondaryIndex != null) ? new DataSourceIndex(secondaryIndex, dataset.getDataverseName(), dataset.getDatasetName(), this) : null; }
private ILogicalOperator translateInsert(DatasetDataSource targetDatasource, Mutable<ILogicalExpression> varRef, List<Mutable<ILogicalExpression>> varRefsForLoading, List<Mutable<ILogicalExpression>> additionalFilteringExpressions, ILogicalOperator assign, ICompiledDmlStatement stmt) throws AlgebricksException { SourceLocation sourceLoc = stmt.getSourceLocation(); if (targetDatasource.getDataset().hasMetaPart()) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, targetDatasource.getDataset().getDatasetName() + ": insert into dataset is not supported on Datasets with Meta records"); } // Adds the insert operator. InsertDeleteUpsertOperator insertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef, varRefsForLoading, InsertDeleteUpsertOperator.Kind.INSERT, false); insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions); insertOp.getInputs().add(new MutableObject<>(assign)); insertOp.setSourceLocation(sourceLoc); // Adds the commit operator. CompiledInsertStatement compiledInsert = (CompiledInsertStatement) stmt; Expression returnExpression = compiledInsert.getReturnExpression(); DelegateOperator rootOperator = new DelegateOperator(new CommitOperator(returnExpression == null)); rootOperator.getInputs().add(new MutableObject<>(insertOp)); rootOperator.setSourceLocation(sourceLoc); // Compiles the return expression. return processReturningExpression(rootOperator, insertOp, compiledInsert); }
DataSourceScanOperator dataSourceScanOp = (DataSourceScanOperator) descendantOp; DataSource ds = (DataSource) dataSourceScanOp.getDataSource(); if (dataset.getDatasetName().compareTo(((DatasetDataSource) ds).getDataset().getDatasetName()) == 0) { List<LogicalVariable> minFilterVars = new ArrayList<>(); List<LogicalVariable> maxFilterVars = new ArrayList<>();
@Override public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException { AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue(); if (op.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE_UPSERT) { return false; } InsertDeleteUpsertOperator insertOp = (InsertDeleteUpsertOperator) op; boolean sameDataset = checkIfInsertAndScanDatasetsSame(op, ((DatasetDataSource) insertOp.getDataSource()).getDataset().getDatasetName()); if (sameDataset) { MaterializeOperator materializeOperator = new MaterializeOperator(); materializeOperator.setSourceLocation(op.getSourceLocation()); MaterializePOperator materializePOperator = new MaterializePOperator(true); materializeOperator.setPhysicalOperator(materializePOperator); materializeOperator.getInputs() .add(new MutableObject<ILogicalOperator>(insertOp.getInputs().get(0).getValue())); context.computeAndSetTypeEnvironmentForOperator(materializeOperator); insertOp.getInputs().clear(); insertOp.getInputs().add(new MutableObject<ILogicalOperator>(materializeOperator)); context.computeAndSetTypeEnvironmentForOperator(insertOp); return true; } else { return false; } }
DataSource ds = (DataSource) dataSourceScanOp.getDataSource(); if ((ds.getDatasourceType() == Type.INTERNAL_DATASET || ds.getDatasourceType() == Type.EXTERNAL_DATASET) && ((DatasetDataSource) ds).getDataset().getDatasetName().compareTo(insertDatasetName) == 0) { return true;
ICompiledDmlStatement stmt) throws AlgebricksException { SourceLocation sourceLoc = stmt.getSourceLocation(); if (!targetDatasource.getDataset().allow(topOp, DatasetUtil.OP_UPSERT)) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, targetDatasource.getDataset().getDatasetName() + ": upsert into dataset is not supported on Datasets with Meta records"); InsertDeleteUpsertOperator upsertOp; ILogicalOperator rootOperator; if (targetDatasource.getDataset().hasMetaPart()) { if (returnExpression != null) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, upsertOp.setPrevRecordType(targetDatasource.getItemType()); upsertOp.setSourceLocation(sourceLoc); if (targetDatasource.getDataset().hasMetaPart()) { List<LogicalVariable> metaVars = new ArrayList<>(); metaVars.add(context.newVar());
DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(), stmt.getDatasetName(), sourceLoc); List<List<String>> partitionKeys = targetDatasource.getDataset().getPrimaryKeys(); if (dataset.hasMetaPart()) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, List<String> additionalFilteringField = DatasetUtil.getFilterField(targetDatasource.getDataset()); List<LogicalVariable> additionalFilteringVars; List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions;
validateDatasetInfo(metadataProvider, stmt.getDataverseName(), stmt.getDatasetName(), sourceLoc); List<Integer> keySourceIndicator = ((InternalDatasetDetails) targetDatasource.getDataset().getDatasetDetails()) .getKeySourceIndicator(); ArrayList<LogicalVariable> vars = new ArrayList<>(); ArrayList<Mutable<ILogicalExpression>> exprs = new ArrayList<>(); List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<>(); List<List<String>> partitionKeys = targetDatasource.getDataset().getPrimaryKeys(); int numOfPrimaryKeys = partitionKeys.size(); for (int i = 0; i < numOfPrimaryKeys; i++) { List<String> additionalFilteringField = DatasetUtil.getFilterField(targetDatasource.getDataset()); List<LogicalVariable> additionalFilteringVars; List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions;
if (!operator.isBulkload() && operator.getPrevSecondaryKeyExprs() == null) { primaryKeyExprs = operator.getPrimaryKeyExpressions(); dataset = ((DatasetDataSource) operator.getDataSourceIndex().getDataSource()).getDataset(); break; if (!insertDeleteUpsertOperator.isBulkload()) { primaryKeyExprs = insertDeleteUpsertOperator.getPrimaryKeyExpressions(); dataset = ((DatasetDataSource) insertDeleteUpsertOperator.getDataSource()).getDataset(); break;
return null; return ((DatasetDataSource) ds).getDataset(); } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) { UnnestMapOperator unnestMapOp = (UnnestMapOperator) descendantOp;
return null; dataset = ((DatasetDataSource) ds).getDataset(); } else {
boolean autogenerated = ((InternalDatasetDetails) dds.getDataset().getDatasetDetails()).isAutogenerated(); if (!autogenerated) { return false; ((InternalDatasetDetails) dds.getDataset().getDatasetDetails()).getPrimaryKey().get(0); VariableReferenceExpression rec0 = new VariableReferenceExpression(inputRecord); rec0.setSourceLocation(inputRecordSourceLoc);
"Tuple filter and limit are not supported by ExternalDataSource"); Dataset externalDataset = ((DatasetDataSource) dataSource).getDataset(); String itemTypeName = externalDataset.getItemTypeName(); IAType itemType = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), int[] maxFilterFieldIndexes = createFilterIndexes(maxFilterVars, opSchema); return metadataProvider.buildBtreeRuntime(jobSpec, opSchema, typeEnv, context, true, false, ((DatasetDataSource) dataSource).getDataset(), primaryIndex.getIndexName(), null, null, true, true, false, minFilterFieldIndexes, maxFilterFieldIndexes, tupleFilterFactory, outputLimit, false);