public Feed dropFeedIfExists(Feed feed) { synchronized (feeds) { Map<String, Feed> feedsInDataverse = feeds.get(feed.getDataverseName()); if (feedsInDataverse != null) { return feedsInDataverse.remove(feed.getFeedName()); } return null; } }
public Feed addFeedIfNotExists(Feed feed) { synchronized (feeds) { Map<String, Feed> feedsInDataverse = feeds.get(feed.getDataverseName()); if (feedsInDataverse == null) { feeds.put(feed.getDataverseName(), new HashMap<>()); feedsInDataverse = feeds.get(feed.getDataverseName()); } return feedsInDataverse.put(feed.getFeedName(), feed); } }
public static JobSpecification buildRemoveFeedStorageJob(MetadataProvider metadataProvider, Feed feed) throws AsterixException { ICcApplicationContext appCtx = metadataProvider.getApplicationContext(); JobSpecification spec = RuntimeUtils.createJobSpecification(appCtx); IClusterStateManager csm = appCtx.getClusterStateManager(); AlgebricksAbsolutePartitionConstraint allCluster = csm.getClusterLocations(); Set<String> nodes = new TreeSet<>(); for (String node : allCluster.getLocations()) { nodes.add(node); } AlgebricksAbsolutePartitionConstraint locations = new AlgebricksAbsolutePartitionConstraint(nodes.toArray(new String[nodes.size()])); FileSplit[] feedLogFileSplits = FeedUtils.splitsForAdapter(appCtx, feed.getDataverseName(), feed.getFeedName(), locations); org.apache.hyracks.algebricks.common.utils.Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spC = StoragePathUtil.splitProviderAndPartitionConstraints(feedLogFileSplits); FileRemoveOperatorDescriptor frod = new FileRemoveOperatorDescriptor(spec, spC.first, true); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, frod, spC.second); spec.addRoot(frod); return spec; }
protected boolean isFunctionUsed(MetadataTransactionContext ctx, FunctionSignature signature, String currentDataverse) throws AlgebricksException { List<Dataverse> allDataverses = MetadataManager.INSTANCE.getDataverses(ctx); for (Dataverse dataverse : allDataverses) { if (dataverse.getDataverseName().equals(currentDataverse)) { continue; } List<Feed> feeds = MetadataManager.INSTANCE.getFeeds(ctx, dataverse.getDataverseName()); for (Feed feed : feeds) { List<FeedConnection> feedConnections = MetadataManager.INSTANCE.getFeedConections(ctx, dataverse.getDataverseName(), feed.getFeedName()); for (FeedConnection conn : feedConnections) { if (conn.containsFunction(signature)) { return true; } } } } return false; }
@Override public void addFeed(TxnId txnId, Feed feed) throws AlgebricksException, RemoteException { try { // Insert into the 'Feed' dataset. FeedTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedTupleTranslator(true); ITupleReference feedTuple = tupleReaderWriter.getTupleFromMetadataEntity(feed); insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.FEED_DATASET, feedTuple); } catch (HyracksDataException e) { if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) { throw new AlgebricksException("A feed with this name " + feed.getFeedName() + " already exists in dataverse '" + feed.getDataverseName() + "'.", e); } else { throw new AlgebricksException(e); } } }
ARecordType adapterOutputType = getOutputType(feed, configuration.get(ExternalDataConstants.KEY_TYPE_NAME)); ARecordType metaType = getOutputType(feed, configuration.get(ExternalDataConstants.KEY_META_TYPE_NAME)); ExternalDataUtils.prepareFeed(configuration, feed.getDataverseName(), feed.getFeedName());
feedConnections = getFeedConnections(txnId, dataverseName, feed.getFeedName()); for (FeedConnection feedConnection : feedConnections) { dropFeedConnection(txnId, dataverseName, feed.getFeedName(), feedConnection.getDatasetName()); dropFeed(txnId, dataverseName, feed.getFeedName());
adapterOutputType = getOutputType(feed, configuration.get(ExternalDataConstants.KEY_TYPE_NAME)); metaType = getOutputType(feed, configuration.get(ExternalDataConstants.KEY_META_TYPE_NAME)); ExternalDataUtils.prepareFeed(configuration, feed.getDataverseName(), feed.getFeedName());
protected void doDropFeed(IHyracksClientConnection hcc, MetadataProvider metadataProvider, Feed feed, SourceLocation sourceLoc) throws Exception { MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext(); EntityId feedId = feed.getFeedId(); ActiveNotificationHandler activeNotificationHandler = (ActiveNotificationHandler) appCtx.getActiveNotificationHandler(); ActiveEntityEventsListener listener = (ActiveEntityEventsListener) activeNotificationHandler.getListener(feedId); if (listener != null && listener.getState() != ActivityState.STOPPED) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "Feed " + feedId + " is currently active and connected to the following dataset(s) \n" + listener.toString()); } else if (listener != null) { listener.unregister(); } JobSpecification spec = FeedOperations.buildRemoveFeedStorageJob(metadataProvider, MetadataManager.INSTANCE.getFeed(mdTxnCtx, feedId.getDataverse(), feedId.getEntityName())); runJob(hcc, spec); MetadataManager.INSTANCE.dropFeed(mdTxnCtx, feed.getDataverseName(), feed.getFeedName()); if (LOGGER.isInfoEnabled()) { LOGGER.info("Removed feed " + feedId); } }
tupleBuilder.addFieldEndOffset(); aString.setValue(feed.getFeedName()); stringSerde.serialize(aString, tupleBuilder.getDataOutput()); tupleBuilder.addFieldEndOffset(); aString.setValue(feed.getFeedName()); stringSerde.serialize(aString, fieldValue.getDataOutput()); recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_FEED_NAME_FIELD_INDEX, fieldValue);