public Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> buildFeedIntakeRuntime( JobSpecification jobSpec, Feed feed, FeedPolicyAccessor policyAccessor) throws Exception { Triple<IAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> factoryOutput; factoryOutput = FeedMetadataUtil.getFeedFactoryAndOutput(feed, policyAccessor, mdTxnCtx, getApplicationContext()); ARecordType recordType = FeedMetadataUtil.getOutputType(feed, feed.getConfiguration().get(ExternalDataConstants.KEY_TYPE_NAME)); IAdapterFactory adapterFactory = factoryOutput.first; FeedIntakeOperatorDescriptor feedIngestor = null; switch (factoryOutput.third) { case INTERNAL: feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, feed, adapterFactory, recordType, policyAccessor, factoryOutput.second); break; case EXTERNAL: String libraryName = feed.getConfiguration().get(ExternalDataConstants.KEY_ADAPTER_NAME).trim() .split(FeedConstants.NamingConstants.LIBRARY_NAME_SEPARATOR)[0]; feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, feed, libraryName, adapterFactory.getClass().getName(), recordType, policyAccessor, factoryOutput.second); break; default: break; } AlgebricksPartitionConstraint partitionConstraint = adapterFactory.getPartitionConstraint(); return new Triple<>(feedIngestor, partitionConstraint, adapterFactory); }
public static Pair<JobSpecification, AlgebricksAbsolutePartitionConstraint> buildStartFeedJob( MetadataProvider metadataProvider, Feed feed, List<FeedConnection> feedConnections, IStatementExecutor statementExecutor, IHyracksClientConnection hcc) throws Exception { FeedPolicyAccessor fpa = new FeedPolicyAccessor(new HashMap<>()); Pair<JobSpecification, IAdapterFactory> intakeInfo = buildFeedIntakeJobSpec(feed, metadataProvider, fpa); List<JobSpecification> jobsList = new ArrayList<>(); // TODO: Figure out a better way to handle insert/upsert per conn instead of per feed Boolean insertFeed = ExternalDataUtils.isInsertFeed(feed.getConfiguration()); // Construct the ingestion Job JobSpecification intakeJob = intakeInfo.getLeft(); IAdapterFactory ingestionAdaptorFactory = intakeInfo.getRight(); String[] ingestionLocations = ingestionAdaptorFactory.getPartitionConstraint().getLocations(); // Add metadata configs metadataProvider.getConfig().put(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS, Boolean.TRUE.toString()); metadataProvider.getConfig().put(FeedActivityDetails.COLLECT_LOCATIONS, StringUtils.join(ingestionLocations, ',')); // TODO: Once we deprecated AQL, this extra queryTranslator can be removed. IStatementExecutor translator = getSQLPPTranslator(metadataProvider, ((QueryTranslator) statementExecutor).getSessionOutput()); // Add connection job for (FeedConnection feedConnection : feedConnections) { JobSpecification connectionJob = getConnectionJob(metadataProvider, feedConnection, translator, hcc, insertFeed); jobsList.add(connectionJob); } return Pair.of(combineIntakeCollectJobs(metadataProvider, feed, intakeJob, jobsList, feedConnections, ingestionLocations), intakeInfo.getRight().getPartitionConstraint()); } }
private void writeFeedAdaptorField(IARecordBuilder recordBuilder, Feed feed, ArrayBackedValueStorage fieldValueBuffer) throws HyracksDataException { UnorderedListBuilder listBuilder = new UnorderedListBuilder(); ArrayBackedValueStorage listEleBuffer = new ArrayBackedValueStorage(); listBuilder.reset((AUnorderedListType) MetadataRecordTypes.FEED_RECORDTYPE .getFieldTypes()[MetadataRecordTypes.FEED_ARECORD_ADAPTOR_CONFIG_INDEX]); for (Map.Entry<String, String> property : feed.getConfiguration().entrySet()) { String name = property.getKey(); String value = property.getValue(); listEleBuffer.reset(); writePropertyTypeRecord(name, value, listEleBuffer.getDataOutput()); listBuilder.addItem(listEleBuffer); } listBuilder.write(fieldValueBuffer.getDataOutput(), true); recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_CONFIG_INDEX, fieldValueBuffer); }
String metaTypeName = FeedUtils.getFeedMetaTypeName(sourceFeed.getConfiguration()); if (metaTypeName == null) { throw new AlgebricksException("Feed to a dataset with metadata doesn't have meta type specified"); if (ExternalDataUtils.isChangeFeed(sourceFeed.getConfiguration())) { List<Mutable<ILogicalExpression>> keyAccessExpression = new ArrayList<>(); keyAccessScalarFunctionCallExpression = new ArrayList<>();
public static void validateFeed(Feed feed, MetadataTransactionContext mdTxnCtx, ICcApplicationContext appCtx) throws AlgebricksException { try { Map<String, String> configuration = feed.getConfiguration(); ARecordType adapterOutputType = getOutputType(feed, configuration.get(ExternalDataConstants.KEY_TYPE_NAME)); ARecordType metaType = getOutputType(feed, configuration.get(ExternalDataConstants.KEY_META_TYPE_NAME));
IDataSourceAdapter.AdapterType adapterType = null; try { Map<String, String> configuration = feed.getConfiguration(); adapterName = configuration.get(ExternalDataConstants.KEY_ADAPTER_NAME); configuration.putAll(policyAccessor.getFeedPolicy());
feed.getConfiguration().get(ExternalDataConstants.KEY_TYPE_NAME)); List<FunctionSignature> appliedFunctions = cfs.getAppliedFunctions(); for (FunctionSignature func : appliedFunctions) {