@Override public Map<String, List<PartitionInfo>> listTopics() { return delegate.listTopics(); }
@Override public Map<String, List<PartitionInfo>> listTopics(Duration timeout) { return delegate.listTopics(timeout); }
@Override public List<KafkaTopic> getTopics() { return FluentIterable.from(this.consumer.listTopics().entrySet()) .transform(new Function<Entry<String, List<PartitionInfo>>, KafkaTopic>() { @Override public KafkaTopic apply(Entry<String, List<PartitionInfo>> filteredTopicEntry) { return new KafkaTopic(filteredTopicEntry.getKey(), Lists.transform(filteredTopicEntry.getValue(), PARTITION_INFO_TO_KAFKA_PARTITION)); } }).toList(); }
@Override public Set<TopicPartition> getAllSubscribedPartitions(Consumer<?, ?> consumer) { topics.clear(); Set<TopicPartition> allPartitions = new HashSet<>(); for (Map.Entry<String, List<PartitionInfo>> entry : consumer.listTopics().entrySet()) { if (pattern.matcher(entry.getKey()).matches()) { for (PartitionInfo partitionInfo : entry.getValue()) { allPartitions.add(new TopicPartition(partitionInfo.topic(), partitionInfo.partition())); topics.add(partitionInfo.topic()); } } } return allPartitions; }
for (String topic : _metricConsumer.listTopics().keySet()) { if (topicPattern.matcher(topic).matches()) { return;
@Override public Set<String> listTopics() { try (Consumer<String, String> consumer = kafkaConsumerFactory.createConsumer()) { final Map<String, List<PartitionInfo>> topicsInfo = consumer.listTopics(); final Set<String> topics = topicsInfo == null ? new HashSet<>() : topicsInfo.keySet(); topics.remove(CONSUMER_OFFSETS_TOPIC); return topics; } }
@Override public Map<String, List<PartitionInfo>> listTopics(Duration duration) { return consumer.listTopics(duration); }
@Override public Map<String, List<PartitionInfo>> listTopics() { return _kafkaConsumer.listTopics(); }
@Override public Map<String, List<PartitionInfo>> listTopics() { return consumer.listTopics(); }
@Override public Map<String, List<PartitionInfo>> listTopics(Duration timeout) { return kafkaConsumer.listTopics(timeout); }
@Override public Map<String, List<PartitionInfo>> listTopics() { return consumer.listTopics(); }
@Override public Map<String, List<PartitionInfo>> listTopics(Duration timeout) { return _kafkaConsumer.listTopics(timeout); }
@Override public Map<String, List<PartitionInfo>> listTopics() { return kafkaConsumer.listTopics(); }
@Override public Map<String, List<PartitionInfo>> listTopics(Duration duration) { return consumer.listTopics(duration); }
@Override public Map<String, List<PartitionInfo>> listTopics() { return delegate.listTopics(); }
private void refreshChangelogInfo() { try { partitionInfo.putAll(restoreConsumer.listTopics()); } catch (final TimeoutException e) { log.debug("Could not fetch topic metadata within the timeout, will retry in the next run loop"); } }
@Override public Set<SystemStream> getAllSystemStreams() { return ((Set<String>) this.metadataConsumer.listTopics().keySet()).stream() .map(x -> new SystemStream(systemName, x)) .collect(Collectors.toSet()); }
@Override public KafkaReadStream<K, V> listTopics(Handler<AsyncResult<Map<String,List<PartitionInfo>>>> handler) { this.submitTask((consumer, future) -> { Map<String, List<PartitionInfo>> topics = consumer.listTopics(); if (future != null) { future.complete(topics); } }, handler); return this; }
private List<TopicPartition> fetchMatchingPartitions(Consumer<byte[], byte[]> consumer) { return consumer .listTopics() .entrySet() .stream() .filter( e -> topicsWhitelist.contains(e.getKey()) || topicsRegexPattern.matcher(e.getKey()).matches()) .flatMap(e -> e.getValue().stream()) .map(partitionInfo -> new TopicPartition(partitionInfo.topic(), partitionInfo.partition())) .collect(Collectors.toList()); }
private Set<TopicPartition> destinationPartitionIds() { synchronized (destinationConsumer) { return destinationConsumer .listTopics() .values() .stream() .flatMap(Collection::stream) .map( partitionInfo -> keyStrategy.topicPartitionKey(partitionInfo.topic(), partitionInfo.partition())) .collect(Collectors.toSet()); } }