/** * Creates a new Kafka streaming source consumer. * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics The Kafka topics to read from. * @param deserializer The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props */ public FlinkKafkaConsumer(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.9.x * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics * The Kafka topics to read from. * @param deserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer09(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
/** * Creates a new Kafka streaming source consumer. Use this constructor to * subscribe to multiple topics based on a regular expression pattern. * * <p>If partition discovery is enabled (by setting a non-negative value for * {@link FlinkKafkaConsumer#KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS} in the properties), topics * with names matching the pattern will also be subscribed to as they are created on the fly. * * @param subscriptionPattern The regular expression for a pattern of topic names to subscribe to. * @param valueDeserializer The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props */ public FlinkKafkaConsumer(Pattern subscriptionPattern, DeserializationSchema<T> valueDeserializer, Properties props) { this(null, subscriptionPattern, new KeyedDeserializationSchemaWrapper<>(valueDeserializer), props); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.11.x * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics * The Kafka topics to read from. * @param deserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer011(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.8.x * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics * The Kafka topics to read from. * @param deserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer08(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.10.x * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics * The Kafka topics to read from. * @param deserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer010(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.11.x. Use this constructor to * subscribe to multiple topics based on a regular expression pattern. * * <p>If partition discovery is enabled (by setting a non-negative value for * {@link FlinkKafkaConsumer011#KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS} in the properties), topics * with names matching the pattern will also be subscribed to as they are created on the fly. * * @param subscriptionPattern * The regular expression for a pattern of topic names to subscribe to. * @param valueDeserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties used to configure the Kafka consumer client, and the ZooKeeper client. */ @PublicEvolving public FlinkKafkaConsumer011(Pattern subscriptionPattern, DeserializationSchema<T> valueDeserializer, Properties props) { this(subscriptionPattern, new KeyedDeserializationSchemaWrapper<>(valueDeserializer), props); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.9.x. Use this constructor to * subscribe to multiple topics based on a regular expression pattern. * * <p>If partition discovery is enabled (by setting a non-negative value for * {@link FlinkKafkaConsumer09#KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS} in the properties), topics * with names matching the pattern will also be subscribed to as they are created on the fly. * * @param subscriptionPattern * The regular expression for a pattern of topic names to subscribe to. * @param valueDeserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties used to configure the Kafka consumer client, and the ZooKeeper client. */ @PublicEvolving public FlinkKafkaConsumer09(Pattern subscriptionPattern, DeserializationSchema<T> valueDeserializer, Properties props) { this(subscriptionPattern, new KeyedDeserializationSchemaWrapper<>(valueDeserializer), props); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.10.x. Use this constructor to * subscribe to multiple topics based on a regular expression pattern. * * <p>If partition discovery is enabled (by setting a non-negative value for * {@link FlinkKafkaConsumer010#KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS} in the properties), topics * with names matching the pattern will also be subscribed to as they are created on the fly. * * @param subscriptionPattern * The regular expression for a pattern of topic names to subscribe to. * @param valueDeserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties used to configure the Kafka consumer client, and the ZooKeeper client. */ @PublicEvolving public FlinkKafkaConsumer010(Pattern subscriptionPattern, DeserializationSchema<T> valueDeserializer, Properties props) { this(subscriptionPattern, new KeyedDeserializationSchemaWrapper<>(valueDeserializer), props); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.8.x. Use this constructor to * subscribe to multiple topics based on a regular expression pattern. * * <p>If partition discovery is enabled (by setting a non-negative value for * {@link FlinkKafkaConsumer08#KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS} in the properties), topics * with names matching the pattern will also be subscribed to as they are created on the fly. * * @param subscriptionPattern * The regular expression for a pattern of topic names to subscribe to. * @param valueDeserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties used to configure the Kafka consumer client, and the ZooKeeper client. */ @PublicEvolving public FlinkKafkaConsumer08(Pattern subscriptionPattern, DeserializationSchema<T> valueDeserializer, Properties props) { this(subscriptionPattern, new KeyedDeserializationSchemaWrapper<>(valueDeserializer), props); }
public <T> FlinkKafkaConsumerBase<T> getConsumer(List<String> topics, DeserializationSchema<T> deserializationSchema, Properties props) { return getConsumer(topics, new KeyedDeserializationSchemaWrapper<T>(deserializationSchema), props); }
new KeyedDeserializationSchemaWrapper<>( new TypeInformationSerializationSchema<>(resultType, new ExecutionConfig()));
new KeyedDeserializationSchemaWrapper<>( new TypeInformationSerializationSchema<>(resultType, new ExecutionConfig()));
new KeyedDeserializationSchemaWrapper<>( new TypeInformationSerializationSchema<>(resultType, new ExecutionConfig()));
/** * Creates a new Kafka streaming source consumer for Kafka 0.11.x * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics * The Kafka topics to read from. * @param deserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer011(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.11.x * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics * The Kafka topics to read from. * @param deserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer011(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.9.x * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics * The Kafka topics to read from. * @param deserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer09(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.10.x * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics * The Kafka topics to read from. * @param deserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer010(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.8.x * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics * The Kafka topics to read from. * @param deserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer08(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.10.x * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics * The Kafka topics to read from. * @param deserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer010(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }