@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer08<>(topic, deserializationSchema, properties); } }
private FlinkKafkaConsumer08( List<String> topics, Pattern subscriptionPattern, KeyedDeserializationSchema<T> deserializer, Properties props) { super( topics, subscriptionPattern, deserializer, getLong( checkNotNull(props, "props"), KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS, PARTITION_DISCOVERY_DISABLED), !getBoolean(props, KEY_DISABLE_METRICS, false)); this.kafkaProperties = props; // validate the zookeeper properties validateZooKeeperConfig(props); // eagerly check for invalid "auto.offset.reset" values before launching the job validateAutoOffsetResetValue(props); }
@Override protected List<KafkaTopicPartition> getKafkaPartitions(List<String> topics) { // Connect to a broker to get the partitions for all topics List<KafkaTopicPartition> partitionInfos = KafkaTopicPartition.dropLeaderData(getPartitionsForTopic(topics, kafkaProperties)); if (partitionInfos.size() == 0) { throw new RuntimeException( "Unable to retrieve any partitions for the requested topics " + topics + ". Please check previous log entries"); } if (LOG.isInfoEnabled()) { logPartitionInfo(LOG, partitionInfos); } return partitionInfos; }
Node leader = brokerToNode(part.leader()); KafkaTopicPartition ktp = new KafkaTopicPartition(item.topic(), part.partitionId()); KafkaTopicPartitionLeader pInfo = new KafkaTopicPartitionLeader(ktp, leader); validateSeedBrokers(seedBrokers, e); LOG.warn("Error communicating with broker {} to find partitions for {}. {} Message: {}", seedBroker, topics, e.getClass().getName(), e.getMessage());
@Override public void run() { try { result = FlinkKafkaConsumer08.getPartitionsForTopic(topics, properties); } catch (Throwable t) { this.error = t; } }
public void createDataStream(FlinkBenchConfig config) throws Exception { Properties properties = new Properties(); properties.setProperty("zookeeper.connect", config.zkHost); properties.setProperty("group.id", config.consumerGroup); properties.setProperty("bootstrap.servers", config.brokerList); properties.setProperty("auto.offset.reset", config.offsetReset); this.dataStream = new FlinkKafkaConsumer08<Tuple2<String, String>>( config.topic, new KeyedTupleSchema(), properties); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.8.x * * This constructor allows passing multiple topics and a key/value deserialization schema. * * @param topics * The Kafka topics to read from. * @param deserializer * The keyed de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer08(List<String> topics, KeyedDeserializationSchema<T> deserializer, Properties props) { super(topics, deserializer); checkNotNull(topics, "topics"); this.kafkaProperties = checkNotNull(props, "props"); // validate the zookeeper properties validateZooKeeperConfig(props); // eagerly check for invalid "auto.offset.reset" values before launching the job validateAutoOffsetResetValue(props); }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer08<>(topic, deserializationSchema, properties); } }
private FlinkKafkaConsumer08( List<String> topics, Pattern subscriptionPattern, KeyedDeserializationSchema<T> deserializer, Properties props) { super( topics, subscriptionPattern, deserializer, getLong( checkNotNull(props, "props"), KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS, PARTITION_DISCOVERY_DISABLED), !getBoolean(props, KEY_DISABLE_METRICS, false)); this.kafkaProperties = props; // validate the zookeeper properties validateZooKeeperConfig(props); // eagerly check for invalid "auto.offset.reset" values before launching the job validateAutoOffsetResetValue(props); }
@Override FlinkKafkaConsumerBase<Row> getKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer08<>(topic, deserializationSchema, properties); } }
private FlinkKafkaConsumer08( List<String> topics, Pattern subscriptionPattern, KeyedDeserializationSchema<T> deserializer, Properties props) { super( topics, subscriptionPattern, deserializer, getLong( checkNotNull(props, "props"), KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS, PARTITION_DISCOVERY_DISABLED), !getBoolean(props, KEY_DISABLE_METRICS, false)); this.kafkaProperties = props; // validate the zookeeper properties validateZooKeeperConfig(props); // eagerly check for invalid "auto.offset.reset" values before launching the job validateAutoOffsetResetValue(props); }
@Override FlinkKafkaConsumerBase<Row> getKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer08<>(topic, deserializationSchema, properties); } }
@Override FlinkKafkaConsumerBase<Row> getKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer08<>(topic, deserializationSchema, properties); } }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer08<>(topic, deserializationSchema, properties); } }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer08<>(topic, deserializationSchema, properties); }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer08<>(topic, deserializationSchema, properties); }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer08<>(topic, deserializationSchema, properties); }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer08<>(topic, deserializationSchema, properties); }
/** * Setup kafka source */ private static FlinkKafkaConsumer08<String> kafkaSource(BenchmarkConfig config) { return new FlinkKafkaConsumer08<>( config.kafkaTopic, new SimpleStringSchema(), config.getParameters().getProperties()); }
.addSource(new FlinkKafkaConsumer08<String>(appArgs.getProperty(DiPConfiguration.KAFKA_TOPIC), new SimpleStringSchema(), properties)) .name("KafkaSource");