/** * Retrieve the Output RecordType, as defined by "set output-record-type". */ public ARecordType findOutputRecordType() throws AlgebricksException { return MetadataManagerUtil.findOutputRecordType(mdTxnCtx, getDefaultDataverseName(), getProperty("output-record-type")); }
@Override public IDataSource<DataSourceId> getDataSource() { try { DataSourceId sourceId = new DataSourceId(dataverse, dataset); return metadataProvider.lookupSourceInMetadata(sourceId); } catch (Exception me) { return null; } }
@Override public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexUpsertRuntime( IDataSourceIndex<String, DataSourceId> dataSourceIndex, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys, List<LogicalVariable> additionalFilteringKeys, ILogicalExpression filterExpr, LogicalVariable upsertIndicatorVar, List<LogicalVariable> prevSecondaryKeys, LogicalVariable prevAdditionalFilteringKey, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec) throws AlgebricksException { return getIndexInsertOrDeleteOrUpsertRuntime(IndexOperation.UPSERT, dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys, secondaryKeys, additionalFilteringKeys, filterExpr, recordDesc, context, spec, false, upsertIndicatorVar, prevSecondaryKeys, prevAdditionalFilteringKey); }
@Override public IDataSourceIndex<String, DataSourceId> findDataSourceIndex(String indexId, DataSourceId dataSourceId) throws AlgebricksException { DataSource source = findDataSource(dataSourceId); Dataset dataset = ((DatasetDataSource) source).getDataset(); Index secondaryIndex = getIndex(dataset.getDataverseName(), dataset.getDatasetName(), indexId); return (secondaryIndex != null) ? new DataSourceIndex(secondaryIndex, dataset.getDataverseName(), dataset.getDatasetName(), this) : null; }
public static String getSimFunction(MetadataProvider metadata) { String simFunction = metadata.getProperty(SIM_FUNCTION_PROP_NAME); if (simFunction == null) { simFunction = DEFAULT_SIM_FUNCTION; } simFunction = simFunction.toLowerCase(); return simFunction; }
@Override public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInsertRuntime( IDataSource<DataSourceId> dataSource, IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv, List<LogicalVariable> keys, LogicalVariable payload, List<LogicalVariable> additionalNonKeyFields, List<LogicalVariable> additionalNonFilteringFields, RecordDescriptor inputRecordDesc, JobGenContext context, JobSpecification spec, boolean bulkload) throws AlgebricksException { return getInsertOrDeleteRuntime(IndexOperation.INSERT, dataSource, propagatedSchema, keys, payload, additionalNonKeyFields, inputRecordDesc, context, spec, bulkload, additionalNonFilteringFields); }
public FeedConnection findFeedConnection(String dataverseName, String feedName, String datasetName) throws AlgebricksException { return MetadataManagerUtil.findFeedConnection(mdTxnCtx, dataverseName, feedName, datasetName); }
@Override public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getScannerRuntime( IDataSource<DataSourceId> dataSource, List<LogicalVariable> scanVariables, List<LogicalVariable> projectVariables, boolean projectPushed, List<LogicalVariable> minFilterVars, List<LogicalVariable> maxFilterVars, ITupleFilterFactory tupleFilterFactory, long outputLimit, IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv, JobGenContext context, JobSpecification jobSpec, Object implConfig) throws AlgebricksException { return ((DataSource) dataSource).buildDatasourceScanRuntime(this, dataSource, scanVariables, projectVariables, projectPushed, minFilterVars, maxFilterVars, tupleFilterFactory, outputLimit, opSchema, typeEnv, context, jobSpec, implConfig); }
public static DataSource findDataSource(IClusterStateManager clusterStateManager, MetadataTransactionContext mdTxnCtx, DataSourceId id) throws AlgebricksException { return lookupSourceInMetadata(clusterStateManager, mdTxnCtx, id); }
@Override public String toString() { return id.toString(); }
@Override public IDataSourcePropertiesProvider getPropertiesProvider() { return new DataSourcePartitioningProvider(this, domain); }
public IAType findType(String dataverse, String typeName) throws AlgebricksException { return MetadataManagerUtil.findType(mdTxnCtx, dataverse, typeName); }
public Feed findFeed(String dataverse, String feedName) throws AlgebricksException { return MetadataManagerUtil.findFeed(mdTxnCtx, dataverse, feedName); }
public FeedPolicyEntity findFeedPolicy(String dataverse, String policyName) throws AlgebricksException { return MetadataManagerUtil.findFeedPolicy(mdTxnCtx, dataverse, policyName); }
public List<String> findNodes(String nodeGroupName) throws AlgebricksException { return MetadataManagerUtil.findNodes(mdTxnCtx, nodeGroupName); }
public List<Index> getDatasetIndexes(String dataverseName, String datasetName) throws AlgebricksException { return MetadataManagerUtil.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName); }
public FeedDataSource(Feed feed, DataSourceId id, String targetDataset, IAType itemType, IAType metaType, List<IAType> pkTypes, List<ScalarFunctionCallExpression> keyAccessExpression, EntityId sourceFeedId, FeedRuntimeType location, String[] locations, INodeDomain domain, FeedConnection feedConnection) throws AlgebricksException { super(id, itemType, metaType, Type.FEED, domain); this.feed = feed; this.targetDataset = targetDataset; this.sourceFeedId = sourceFeedId; this.location = location; this.locations = locations; this.pkTypes = pkTypes; this.keyAccessExpression = keyAccessExpression; this.computationNodeDomain = domain; this.feedConnection = feedConnection; initFeedDataSource(); }
@Override public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexDeleteRuntime( IDataSourceIndex<String, DataSourceId> dataSourceIndex, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys, List<LogicalVariable> additionalNonKeyFields, ILogicalExpression filterExpr, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec) throws AlgebricksException { return getIndexInsertOrDeleteOrUpsertRuntime(IndexOperation.DELETE, dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys, secondaryKeys, additionalNonKeyFields, filterExpr, recordDesc, context, spec, false, null, null, null); }
@Override public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getDeleteRuntime( IDataSource<DataSourceId> dataSource, IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv, List<LogicalVariable> keys, LogicalVariable payload, List<LogicalVariable> additionalNonKeyFields, RecordDescriptor inputRecordDesc, JobGenContext context, JobSpecification spec) throws AlgebricksException { return getInsertOrDeleteRuntime(IndexOperation.DELETE, dataSource, propagatedSchema, keys, payload, additionalNonKeyFields, inputRecordDesc, context, spec, false, null); }
@Override public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexInsertRuntime( IDataSourceIndex<String, DataSourceId> dataSourceIndex, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys, List<LogicalVariable> additionalNonKeyFields, ILogicalExpression filterExpr, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec, boolean bulkload) throws AlgebricksException { return getIndexInsertOrDeleteOrUpsertRuntime(IndexOperation.INSERT, dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys, secondaryKeys, additionalNonKeyFields, filterExpr, recordDesc, context, spec, bulkload, null, null, null); }