@Override public void assign(Collection<TopicPartition> partitions) { delegate.assign(partitions); }
/** * Assign partitions to the KafkaConsumer. * @param <K> The consumer key type * @param <V> The consumer value type * @param consumer The Kafka consumer to assign partitions to * @param newAssignment The partitions to assign. * @param listener The rebalance listener to call back on when the assignment changes */ public <K, V> void assignPartitions(Consumer<K, V> consumer, Set<TopicPartition> newAssignment, ConsumerRebalanceListener listener) { Set<TopicPartition> currentAssignment = consumer.assignment(); if (!newAssignment.equals(currentAssignment)) { listener.onPartitionsRevoked(currentAssignment); consumer.assign(newAssignment); listener.onPartitionsAssigned(newAssignment); } }
CommandTopic( final String commandTopicName, final Consumer<CommandId, Command> commandConsumer, final Producer<CommandId, Command> commandProducer ) { this.commandTopicPartition = new TopicPartition(commandTopicName, 0); this.commandConsumer = Objects.requireNonNull(commandConsumer, "commandConsumer"); this.commandProducer = Objects.requireNonNull(commandProducer, "commandProducer"); this.commandTopicName = Objects.requireNonNull(commandTopicName, "commandTopicName"); commandConsumer.assign(Collections.singleton(commandTopicPartition)); }
@Override public String getSampleMessage(final String topic) { String message = null; if (listTopics().contains(topic)) { try (Consumer<String, String> kafkaConsumer = kafkaConsumerFactory.createConsumer()) { kafkaConsumer.assign(kafkaConsumer.partitionsFor(topic).stream() .map(partitionInfo -> new TopicPartition(topic, partitionInfo.partition())) .collect(Collectors.toList())); kafkaConsumer.assignment().stream() .filter(p -> (kafkaConsumer.position(p) - 1) >= 0) .forEach(p -> kafkaConsumer.seek(p, kafkaConsumer.position(p) - 1)); final ConsumerRecords<String, String> records = kafkaConsumer.poll(KAFKA_CONSUMER_TIMEOUT); message = records.isEmpty() ? null : records.iterator().next().value(); kafkaConsumer.unsubscribe(); } } return message; }
@Override public long getEarliestOffset(KafkaPartition partition) throws KafkaOffsetRetrievalFailureException { TopicPartition topicPartition = new TopicPartition(partition.getTopicName(), partition.getId()); this.consumer.assign(Collections.singletonList(topicPartition)); this.consumer.seekToBeginning(topicPartition); return this.consumer.position(topicPartition); }
@Override public long getLatestOffset(KafkaPartition partition) throws KafkaOffsetRetrievalFailureException { TopicPartition topicPartition = new TopicPartition(partition.getTopicName(), partition.getId()); this.consumer.assign(Collections.singletonList(topicPartition)); this.consumer.seekToEnd(topicPartition); return this.consumer.position(topicPartition); }
@Test public void shouldAssignCorrectPartitionToConsumer() { verify(commandConsumer) .assign(eq(Collections.singleton(new TopicPartition(COMMAND_TOPIC_NAME, 0)))); }
@Override public Iterator<KafkaConsumerRecord> consume(KafkaPartition partition, long nextOffset, long maxOffset) { if (nextOffset > maxOffset) { return null; } this.consumer.assign(Lists.newArrayList(new TopicPartition(partition.getTopicName(), partition.getId()))); this.consumer.seek(new TopicPartition(partition.getTopicName(), partition.getId()), nextOffset); ConsumerRecords<K, V> consumerRecords = consumer.poll(super.fetchTimeoutMillis); return Iterators.transform(consumerRecords.iterator(), new Function<ConsumerRecord<K, V>, KafkaConsumerRecord>() { @Override public KafkaConsumerRecord apply(ConsumerRecord<K, V> input) { return new Kafka09ConsumerRecord<>(input); } }); }
public KafkaSimpleStreamingExtractor(WorkUnitState state) { super(state); _consumer = KafkaSimpleStreamingSource.getKafkaConsumer(ConfigUtils.propertiesToConfig(state.getProperties())); closer.register(_consumer); _partition = new TopicPartition(KafkaSimpleStreamingSource.getTopicNameFromState(state), KafkaSimpleStreamingSource.getPartitionIdFromState(state)); _consumer.assign(Collections.singletonList(_partition)); this._schemaRegistry = state.contains(KafkaSchemaRegistry.KAFKA_SCHEMA_REGISTRY_CLASS) ? Optional .of(KafkaSchemaRegistry.<String, S>get(state.getProperties())) : Optional.<KafkaSchemaRegistry<String, S>>absent(); this.fetchTimeOut = state.getPropAsLong(AbstractBaseKafkaConsumerClient.CONFIG_KAFKA_FETCH_TIMEOUT_VALUE, AbstractBaseKafkaConsumerClient.CONFIG_KAFKA_FETCH_TIMEOUT_VALUE_DEFAULT); }
public void initialize(InputSplit split, Configuration conf) throws IOException, InterruptedException { this.conf = conf; this.split = (KafkaInputSplit) split; brokers = this.split.getBrokers(); topic = this.split.getTopic(); partition = this.split.getPartition(); watermark = this.split.getOffsetStart(); if (conf.get(KafkaFlatTableJob.CONFIG_KAFKA_TIMEOUT) != null) { timeOut = Long.parseLong(conf.get(KafkaFlatTableJob.CONFIG_KAFKA_TIMEOUT)); } String consumerGroup = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_CONSUMER_GROUP); Properties kafkaProperties = KafkaConsumerProperties.extractKafkaConfigToProperties(conf); consumer = org.apache.kylin.source.kafka.util.KafkaClient.getKafkaConsumer(brokers, consumerGroup, kafkaProperties); earliestOffset = this.split.getOffsetStart(); latestOffset = this.split.getOffsetEnd(); TopicPartition topicPartition = new TopicPartition(topic, partition); consumer.assign(Arrays.asList(topicPartition)); log.info("Split {} Topic: {} Broker: {} Partition: {} Start: {} End: {}", new Object[] { this.split, topic, this.split.getBrokers(), partition, earliestOffset, latestOffset }); }
final List<TopicPartition> topicPartitionList = Collections.singletonList(topicPartition); consumer.assign(topicPartitionList);
assertThat(smallOffsetCommitted.get()).isFalse(); Consumer<Integer, String> consumer = cf.createConsumer(); consumer.assign(Arrays.asList(new TopicPartition(topic9, 0), new TopicPartition(topic9, 1))); assertThat(consumer.position(new TopicPartition(topic9, 0))).isEqualTo(2); assertThat(consumer.position(new TopicPartition(topic9, 1))).isEqualTo(2);
consumer.assign(Arrays.asList(new TopicPartition(topic8, 0), new TopicPartition(topic8, 1))); assertThat(consumer.position(new TopicPartition(topic8, 0))).isEqualTo(2); assertThat(consumer.position(new TopicPartition(topic8, 1))).isEqualTo(2);
consumer.assign(Arrays.asList(new TopicPartition(topic7, 0), new TopicPartition(topic7, 1))); assertThat(consumer.position(new TopicPartition(topic7, 0))).isEqualTo(2); assertThat(consumer.position(new TopicPartition(topic7, 1))).isEqualTo(2);
consumer.assign(Arrays.asList(new TopicPartition(topic, 0), new TopicPartition(topic, 0)));
container.stop(); Consumer<Integer, String> consumer = cf.createConsumer(); consumer.assign(Arrays.asList(new TopicPartition(topic9, 0), new TopicPartition(topic9, 1)));
assertThat(commitLatch.await(60, TimeUnit.SECONDS)).isTrue(); Consumer<Integer, String> consumer = cf.createConsumer(); consumer.assign(Arrays.asList(new TopicPartition(topic10, 0), new TopicPartition(topic10, 1))); assertThat(consumer.position(new TopicPartition(topic10, 0))).isEqualTo(2); assertThat(consumer.position(new TopicPartition(topic10, 1))).isEqualTo(2);
container2.stop(); Consumer<Integer, String> consumer = cf.createConsumer(); consumer.assign(Arrays.asList(new TopicPartition(topic18, 0), new TopicPartition(topic18, 1)));
assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue(); Consumer<Integer, String> consumer = cf.createConsumer(); consumer.assign(Arrays.asList(new TopicPartition(topic6, 0), new TopicPartition(topic6, 1))); assertThat(consumer.position(new TopicPartition(topic6, 0))).isEqualTo(2); assertThat(consumer.position(new TopicPartition(topic6, 1))).isEqualTo(2);
container2.stop(); Consumer<Integer, String> consumer = cf.createConsumer(); consumer.assign(Arrays.asList(new TopicPartition(topic14, 0), new TopicPartition(topic14, 1))); assertThat(consumer.position(new TopicPartition(topic14, 0))).isEqualTo(2); assertThat(consumer.position(new TopicPartition(topic14, 1))).isEqualTo(2);