@Override public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) throws AlgebricksException { DataSource ds = new DatasetDataSource(datasetId, dataset, recordType, null /*external dataset doesn't have meta records.*/, Type.EXTERNAL_DATASET, dataset.getDatasetDetails(), context.getComputationNodeDomain()); IDataSourcePropertiesProvider dspp = ds.getPropertiesProvider(); AbstractScanOperator as = (AbstractScanOperator) op; deliveredProperties = dspp.computePropertiesVector(as.getVariables()); }
private void fillPKVars(DataSourceScanOperator dataScan, List<LogicalVariable> pkVars) { pkVars.clear(); DatasetDataSource datasetDataSource = (DatasetDataSource) dataScan.getDataSource(); pkVars.clear(); if (datasetDataSource.getDataset().getDatasetDetails() instanceof InternalDatasetDetails) { int numPKs = datasetDataSource.getDataset().getPrimaryKeys().size(); for (int i = 0; i < numPKs; i++) { pkVars.add(dataScan.getVariables().get(i)); } } }
public DatasetDataSource(DataSourceId id, Dataset dataset, IAType itemType, IAType metaItemType, byte datasourceType, IDatasetDetails datasetDetails, INodeDomain datasetDomain) throws AlgebricksException { super(id, itemType, metaItemType, datasourceType, datasetDomain); this.dataset = dataset; switch (dataset.getDatasetType()) { case INTERNAL: initInternalDataset(itemType, metaItemType, datasetDetails); break; case EXTERNAL: initExternalDataset(itemType); break; } }
"Tuple filter and limit are not supported by ExternalDataSource"); Dataset externalDataset = ((DatasetDataSource) dataSource).getDataset(); String itemTypeName = externalDataset.getItemTypeName(); IAType itemType = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), return metadataProvider.buildExternalDatasetDataScannerRuntime(jobSpec, itemType, adapterFactory); case INTERNAL: DataSourceId id = getId(); String dataverseName = id.getDataverseName(); String datasetName = id.getDatasourceName(); dataverseName, datasetName, datasetName); int[] minFilterFieldIndexes = createFilterIndexes(minFilterVars, opSchema); int[] maxFilterFieldIndexes = createFilterIndexes(maxFilterVars, opSchema); return metadataProvider.buildBtreeRuntime(jobSpec, opSchema, typeEnv, context, true, false, ((DatasetDataSource) dataSource).getDataset(), primaryIndex.getIndexName(), null, null, true, true, false, minFilterFieldIndexes, maxFilterFieldIndexes, tupleFilterFactory, outputLimit, false);
ICompiledDmlStatement stmt) throws AlgebricksException { SourceLocation sourceLoc = stmt.getSourceLocation(); if (!targetDatasource.getDataset().allow(topOp, DatasetUtil.OP_UPSERT)) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, targetDatasource.getDataset().getDatasetName() + ": upsert into dataset is not supported on Datasets with Meta records"); InsertDeleteUpsertOperator upsertOp; ILogicalOperator rootOperator; if (targetDatasource.getDataset().hasMetaPart()) { if (returnExpression != null) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, upsertOp.setPrevRecordType(targetDatasource.getItemType()); upsertOp.setSourceLocation(sourceLoc); if (targetDatasource.getDataset().hasMetaPart()) { List<LogicalVariable> metaVars = new ArrayList<>(); metaVars.add(context.newVar()); upsertOp.setPrevAdditionalNonFilteringVars(metaVars); List<Object> metaTypes = new ArrayList<>(); metaTypes.add(targetDatasource.getMetaItemType()); upsertOp.setPrevAdditionalNonFilteringTypes(metaTypes); upsertOp.setPrevFilterVar(context.newVar()); upsertOp.setPrevFilterType( ((ARecordType) targetDatasource.getItemType()).getFieldType(additionalFilteringField.get(0))); additionalFilteringAssign.getInputs().clear(); additionalFilteringAssign.getInputs().add(assign.getInputs().get(0));
DatasetDataSource prevAqlDataSource = (DatasetDataSource) dataScans.get(i - 1).getDataSource(); DatasetDataSource currAqlDataSource = (DatasetDataSource) dataScans.get(i).getDataSource(); if (!prevAqlDataSource.getDataset().equals(currAqlDataSource.getDataset())) { return -1;
public static DataSource lookupSourceInMetadata(IClusterStateManager clusterStateManager, MetadataTransactionContext mdTxnCtx, DataSourceId aqlId) throws AlgebricksException { Dataset dataset = findDataset(mdTxnCtx, aqlId.getDataverseName(), aqlId.getDatasourceName()); if (dataset == null) { throw new AlgebricksException("Datasource with id " + aqlId + " was not found."); } IAType itemType = findType(mdTxnCtx, dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); IAType metaItemType = findType(mdTxnCtx, dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName()); INodeDomain domain = findNodeDomain(clusterStateManager, mdTxnCtx, dataset.getNodeGroupName()); byte datasourceType = dataset.getDatasetType().equals(DatasetType.EXTERNAL) ? DataSource.Type.EXTERNAL_DATASET : DataSource.Type.INTERNAL_DATASET; return new DatasetDataSource(aqlId, dataset, itemType, metaItemType, datasourceType, dataset.getDatasetDetails(), domain); } }
private ILogicalOperator translateDelete(DatasetDataSource targetDatasource, Mutable<ILogicalExpression> varRef, List<Mutable<ILogicalExpression>> varRefsForLoading, List<Mutable<ILogicalExpression>> additionalFilteringExpressions, ILogicalOperator assign, ICompiledDmlStatement stmt) throws AlgebricksException { SourceLocation sourceLoc = stmt.getSourceLocation(); if (targetDatasource.getDataset().hasMetaPart()) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, targetDatasource.getDataset().getDatasetName() + ": delete from dataset is not supported on Datasets with Meta records"); } InsertDeleteUpsertOperator deleteOp = new InsertDeleteUpsertOperator(targetDatasource, varRef, varRefsForLoading, InsertDeleteUpsertOperator.Kind.DELETE, false); deleteOp.setAdditionalFilteringExpressions(additionalFilteringExpressions); deleteOp.getInputs().add(new MutableObject<>(assign)); deleteOp.setSourceLocation(sourceLoc); DelegateOperator leafOperator = new DelegateOperator(new CommitOperator(true)); leafOperator.getInputs().add(new MutableObject<>(deleteOp)); leafOperator.setSourceLocation(sourceLoc); return leafOperator; }
private DatasetDataSource validateDatasetInfo(MetadataProvider metadataProvider, String dataverseName, String datasetName, SourceLocation sourceLoc) throws AlgebricksException { Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); if (dataset == null) { throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName, dataverseName); } if (dataset.getDatasetType() == DatasetType.EXTERNAL) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "Cannot write output to an external dataset."); } DataSourceId sourceId = new DataSourceId(dataverseName, datasetName); IAType itemType = metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); IAType metaItemType = metadataProvider.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName()); INodeDomain domain = metadataProvider.findNodeDomain(dataset.getNodeGroupName()); return new DatasetDataSource(sourceId, dataset, itemType, metaItemType, DataSource.Type.INTERNAL_DATASET, dataset.getDatasetDetails(), domain); }
@Override public IDataSourceIndex<String, DataSourceId> findDataSourceIndex(String indexId, DataSourceId dataSourceId) throws AlgebricksException { DataSource source = findDataSource(dataSourceId); Dataset dataset = ((DatasetDataSource) source).getDataset(); Index secondaryIndex = getIndex(dataset.getDataverseName(), dataset.getDatasetName(), indexId); return (secondaryIndex != null) ? new DataSourceIndex(secondaryIndex, dataset.getDataverseName(), dataset.getDatasetName(), this) : null; }
private ILogicalOperator translateInsert(DatasetDataSource targetDatasource, Mutable<ILogicalExpression> varRef, List<Mutable<ILogicalExpression>> varRefsForLoading, List<Mutable<ILogicalExpression>> additionalFilteringExpressions, ILogicalOperator assign, ICompiledDmlStatement stmt) throws AlgebricksException { SourceLocation sourceLoc = stmt.getSourceLocation(); if (targetDatasource.getDataset().hasMetaPart()) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, targetDatasource.getDataset().getDatasetName() + ": insert into dataset is not supported on Datasets with Meta records"); } // Adds the insert operator. InsertDeleteUpsertOperator insertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef, varRefsForLoading, InsertDeleteUpsertOperator.Kind.INSERT, false); insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions); insertOp.getInputs().add(new MutableObject<>(assign)); insertOp.setSourceLocation(sourceLoc); // Adds the commit operator. CompiledInsertStatement compiledInsert = (CompiledInsertStatement) stmt; Expression returnExpression = compiledInsert.getReturnExpression(); DelegateOperator rootOperator = new DelegateOperator(new CommitOperator(returnExpression == null)); rootOperator.getInputs().add(new MutableObject<>(insertOp)); rootOperator.setSourceLocation(sourceLoc); // Compiles the return expression. return processReturningExpression(rootOperator, insertOp, compiledInsert); }
DataSourceScanOperator dataSourceScanOp = (DataSourceScanOperator) descendantOp; DataSource ds = (DataSource) dataSourceScanOp.getDataSource(); if (dataset.getDatasetName().compareTo(((DatasetDataSource) ds).getDataset().getDatasetName()) == 0) { List<LogicalVariable> minFilterVars = new ArrayList<>(); List<LogicalVariable> maxFilterVars = new ArrayList<>();
@Override public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException { AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue(); if (op.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE_UPSERT) { return false; } InsertDeleteUpsertOperator insertOp = (InsertDeleteUpsertOperator) op; boolean sameDataset = checkIfInsertAndScanDatasetsSame(op, ((DatasetDataSource) insertOp.getDataSource()).getDataset().getDatasetName()); if (sameDataset) { MaterializeOperator materializeOperator = new MaterializeOperator(); materializeOperator.setSourceLocation(op.getSourceLocation()); MaterializePOperator materializePOperator = new MaterializePOperator(true); materializeOperator.setPhysicalOperator(materializePOperator); materializeOperator.getInputs() .add(new MutableObject<ILogicalOperator>(insertOp.getInputs().get(0).getValue())); context.computeAndSetTypeEnvironmentForOperator(materializeOperator); insertOp.getInputs().clear(); insertOp.getInputs().add(new MutableObject<ILogicalOperator>(materializeOperator)); context.computeAndSetTypeEnvironmentForOperator(insertOp); return true; } else { return false; } }
DataSource ds = (DataSource) dataSourceScanOp.getDataSource(); if ((ds.getDatasourceType() == Type.INTERNAL_DATASET || ds.getDatasourceType() == Type.EXTERNAL_DATASET) && ((DatasetDataSource) ds).getDataset().getDatasetName().compareTo(insertDatasetName) == 0) { return true;
DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(), stmt.getDatasetName(), sourceLoc); List<List<String>> partitionKeys = targetDatasource.getDataset().getPrimaryKeys(); if (dataset.hasMetaPart()) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, List<String> additionalFilteringField = DatasetUtil.getFilterField(targetDatasource.getDataset()); List<LogicalVariable> additionalFilteringVars; List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions;
validateDatasetInfo(metadataProvider, stmt.getDataverseName(), stmt.getDatasetName(), sourceLoc); List<Integer> keySourceIndicator = ((InternalDatasetDetails) targetDatasource.getDataset().getDatasetDetails()) .getKeySourceIndicator(); ArrayList<LogicalVariable> vars = new ArrayList<>(); ArrayList<Mutable<ILogicalExpression>> exprs = new ArrayList<>(); List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<>(); List<List<String>> partitionKeys = targetDatasource.getDataset().getPrimaryKeys(); int numOfPrimaryKeys = partitionKeys.size(); for (int i = 0; i < numOfPrimaryKeys; i++) { List<String> additionalFilteringField = DatasetUtil.getFilterField(targetDatasource.getDataset()); List<LogicalVariable> additionalFilteringVars; List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions;
if (!operator.isBulkload() && operator.getPrevSecondaryKeyExprs() == null) { primaryKeyExprs = operator.getPrimaryKeyExpressions(); dataset = ((DatasetDataSource) operator.getDataSourceIndex().getDataSource()).getDataset(); break; if (!insertDeleteUpsertOperator.isBulkload()) { primaryKeyExprs = insertDeleteUpsertOperator.getPrimaryKeyExpressions(); dataset = ((DatasetDataSource) insertDeleteUpsertOperator.getDataSource()).getDataset(); break;
return null; return ((DatasetDataSource) ds).getDataset(); } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) { UnnestMapOperator unnestMapOp = (UnnestMapOperator) descendantOp;
return null; dataset = ((DatasetDataSource) ds).getDataset(); } else {
boolean autogenerated = ((InternalDatasetDetails) dds.getDataset().getDatasetDetails()).isAutogenerated(); if (!autogenerated) { return false; ((InternalDatasetDetails) dds.getDataset().getDatasetDetails()).getPrimaryKey().get(0); VariableReferenceExpression rec0 = new VariableReferenceExpression(inputRecord); rec0.setSourceLocation(inputRecordSourceLoc);