public boolean containsKey(K key) { return get(key) != null; }
public long getLastPublishedSequenceId(String producerName) { Long sequenceId = highestSequencedPushed.get(producerName); return sequenceId != null ? sequenceId : -1; }
public boolean containsKey(K key) { return get(key) != null; }
public Replicator getPersistentReplicator(String remoteCluster) { return replicators.get(remoteCluster); }
public ReplicationMetrics get(String namespaceCluster) { return metricsMap.get(namespaceCluster); }
public PersistentSubscription getSubscription(String subscriptionName) { return subscriptions.get(subscriptionName); }
public PersistentOfflineTopicStats getOfflineTopicStat(TopicName topicName) { return offlineTopicStatCache.get(topicName); }
@Override public Subscription getSubscription(String subscription) { return subscriptions.get(subscription); }
public Replicator getPersistentReplicator(String remoteCluster) { return replicators.get(remoteCluster); }
public List<Topic> getAllTopicsFromNamespaceBundle(String namespace, String bundle) { ConcurrentOpenHashMap<String, ConcurrentOpenHashMap<String, Topic>> map1 = multiLayerTopicsMap.get(namespace); if (map1 == null) { return Collections.emptyList(); } ConcurrentOpenHashMap<String, Topic> map2 = map1.get(bundle); if (map2 == null) { return Collections.emptyList(); } return map2.values(); }
/** * Get a reference to a topic that is currently loaded in the broker. * * This method will not make the broker attempt to load the topic if it's not already. */ public Optional<Topic> getTopicReference(String topic) { CompletableFuture<Optional<Topic>> future = topics.get(topic); if (future != null && future.isDone() && !future.isCompletedExceptionally()) { return future.join(); } else { return Optional.empty(); } }
CompletableFuture<Void> removeReplicator(String remoteCluster) { log.info("[{}] Removing replicator to {}", topic, remoteCluster); final CompletableFuture<Void> future = new CompletableFuture<>(); String name = NonPersistentReplicator.getReplicatorName(replicatorPrefix, remoteCluster); replicators.get(remoteCluster).disconnect().thenRun(() -> { log.info("[{}] Successfully removed replicator {}", name, remoteCluster); }).exceptionally(e -> { log.error("[{}] Failed to close replication producer {} {}", topic, name, e.getMessage(), e); future.completeExceptionally(e); return null; }); return future; }
public void remove(String namespaceCluster) { ReplicationMetrics replicationMetrics = metricsMap.get(namespaceCluster); if (replicationMetrics != null) { replicationMetrics.recycle(); metricsMap.remove(namespaceCluster); } }
private void addDynamicConfigValidator(String key, Predicate<String> validator) { validateConfigKey(key); if (dynamicConfigurationMap.containsKey(key)) { dynamicConfigurationMap.get(key).validator = validator; } }
CompletableFuture<Void> removeReplicator(String remoteCluster) { log.info("[{}] Removing replicator to {}", topic, remoteCluster); final CompletableFuture<Void> future = new CompletableFuture<>(); String name = PersistentReplicator.getReplicatorName(replicatorPrefix, remoteCluster); replicators.get(remoteCluster).disconnect().thenRun(() -> { ledger.asyncDeleteCursor(name, new DeleteCursorCallback() { @Override public void deleteCursorComplete(Object ctx) { replicators.remove(remoteCluster); future.complete(null); } @Override public void deleteCursorFailed(ManagedLedgerException exception, Object ctx) { log.error("[{}] Failed to delete cursor {} {}", topic, name, exception.getMessage(), exception); future.completeExceptionally(new PersistenceException(exception)); } }, null); }).exceptionally(e -> { log.error("[{}] Failed to close replication producer {} {}", topic, name, e.getMessage(), e); future.completeExceptionally(e); return null; }); return future; }
public boolean removeProducer(ProducerHandler producer) { final String topicName = producer.getProducer().getTopic(); if (topicProducerMap.containsKey(topicName)) { return topicProducerMap.get(topicName).remove(producer); } return false; }
public boolean removeConsumer(ConsumerHandler consumer) { final String topicName = consumer.getConsumer().getTopic(); if (topicConsumerMap.containsKey(topicName)) { return topicConsumerMap.get(topicName).remove(consumer); } return false; }
public boolean removeReader(ReaderHandler reader) { final String topicName = reader.getConsumer().getTopic(); if (topicReaderMap.containsKey(topicName)) { return topicReaderMap.get(topicName).remove(reader); } return false; }
public void refreshTopicToStatsMaps(NamespaceBundle oldBundle) { checkNotNull(oldBundle); try { // retrieve all topics under existing old bundle List<Topic> topics = getAllTopicsFromNamespaceBundle(oldBundle.getNamespaceObject().toString(), oldBundle.toString()); if (!isEmpty(topics)) { // add topic under new split bundles which already updated into NamespaceBundleFactory.bundleCache topics.stream().forEach(t -> { addTopicToStatsMaps(TopicName.get(t.getName()), t); }); // remove old bundle from the map synchronized (multiLayerTopicsMap) { multiLayerTopicsMap.get(oldBundle.getNamespaceObject().toString()).remove(oldBundle.toString()); pulsarStats.invalidBundleStats(oldBundle.toString()); } } } catch (Exception e) { log.warn("Got exception while refreshing topicStats map", e); } }