/** * Creates a new Kafka streaming source consumer. * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics The Kafka topics to read from. * @param deserializer The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props */ public FlinkKafkaConsumer(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
@Override public TypeInformation<T> getProducedType() { return deserializer.getProducedType(); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces its input to * the topic. It accepts a key-less {@link SerializationSchema} and possibly a custom {@link FlinkKafkaPartitioner}. * * <p>Since a key-less {@link SerializationSchema} is used, all records sent to Kafka will not have an * attached key. Therefore, if a partitioner is also not provided, records will be distributed to Kafka * partitions in a round-robin fashion. * * @param topicId The topic to write data to * @param serializationSchema A key-less serializable serialization schema for turning user objects into a kafka-consumable byte[] * @param producerConfig Configuration properties for the KafkaProducer. 'bootstrap.servers.' is the only required argument. * @param customPartitioner A serializable partitioner for assigning messages to Kafka partitions. * If a partitioner is not provided, records will be distributed to Kafka partitions * in a round-robin fashion. */ public FlinkKafkaProducer011( String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig, Optional<FlinkKafkaPartitioner<IN>> customPartitioner) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces its input to * the topic. It accepts a key-less {@link SerializationSchema} and possibly a custom {@link FlinkKafkaPartitioner}. * * <p>Since a key-less {@link SerializationSchema} is used, all records sent to Kafka will not have an * attached key. Therefore, if a partitioner is also not provided, records will be distributed to Kafka * partitions in a round-robin fashion. * * @param topicId The topic to write data to * @param serializationSchema A key-less serializable serialization schema for turning user objects into a kafka-consumable byte[] * @param producerConfig Configuration properties for the KafkaProducer. 'bootstrap.servers.' is the only required argument. * @param customPartitioner A serializable partitioner for assigning messages to Kafka partitions. * If a partitioner is not provided, records will be distributed to Kafka partitions * in a round-robin fashion. */ public FlinkKafkaProducer( String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig, Optional<FlinkKafkaPartitioner<IN>> customPartitioner) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.9.x * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics * The Kafka topics to read from. * @param deserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer09(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
/** * @deprecated Use {@link FlinkKafkaProducer08#FlinkKafkaProducer08(String, SerializationSchema, Properties)} */ @Deprecated public FlinkKafkaProducer(String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig) { super(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, (FlinkKafkaPartitioner<IN>) null); }
/** * Creates a new Kafka streaming source consumer. Use this constructor to * subscribe to multiple topics based on a regular expression pattern. * * <p>If partition discovery is enabled (by setting a non-negative value for * {@link FlinkKafkaConsumer#KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS} in the properties), topics * with names matching the pattern will also be subscribed to as they are created on the fly. * * @param subscriptionPattern The regular expression for a pattern of topic names to subscribe to. * @param valueDeserializer The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props */ public FlinkKafkaConsumer(Pattern subscriptionPattern, DeserializationSchema<T> valueDeserializer, Properties props) { this(null, subscriptionPattern, new KeyedDeserializationSchemaWrapper<>(valueDeserializer), props); }
/** * @deprecated Use {@link FlinkKafkaProducer08#FlinkKafkaProducer08(String, SerializationSchema, Properties, KafkaPartitioner)} */ @Deprecated public FlinkKafkaProducer(String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig, KafkaPartitioner customPartitioner) { super(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.11.x * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics * The Kafka topics to read from. * @param deserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer011(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
@Override protected SinkFunction<Row> createKafkaProducer( String topic, Properties properties, SerializationSchema<Row> serializationSchema, Optional<FlinkKafkaPartitioner<Row>> partitioner) { return new FlinkKafkaProducer<>( topic, new KeyedSerializationSchemaWrapper<>(serializationSchema), properties, partitioner); } }
/** * Creates a new Kafka streaming source consumer for Kafka 0.8.x * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics * The Kafka topics to read from. * @param deserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer08(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
/** * Creates a FlinkKafkaProducer for a given topic. the sink produces a DataStream to * the topic. * * @param topicId The topic to write data to * @param serializationSchema A (keyless) serializable serialization schema for turning user objects into a kafka-consumable byte[] * @param producerConfig Configuration properties for the KafkaProducer. 'bootstrap.servers.' is the only required argument. * @param customPartitioner A serializable partitioner for assigning messages to Kafka partitions (when passing null, we'll use Kafka's partitioner) * * @deprecated This is a deprecated constructor that does not correctly handle partitioning when * producing to multiple topics. Use * {@link #FlinkKafkaProducer09(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead. */ @Deprecated public FlinkKafkaProducer09(String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig, KafkaPartitioner<IN> customPartitioner) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.10.x * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics * The Kafka topics to read from. * @param deserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties that are used to configure both the fetcher and the offset handler. */ public FlinkKafkaConsumer010(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KeyedDeserializationSchemaWrapper<>(deserializer), props); }
/** * The main constructor for creating a FlinkKafkaProducer. * * @param topicId The topic to write data to * @param serializationSchema A (keyless) serializable serialization schema for turning user objects into a kafka-consumable byte[] * @param producerConfig Configuration properties for the KafkaProducer. 'bootstrap.servers.' is the only required argument. * @param customPartitioner A serializable partitioner for assigning messages to Kafka partitions. * * @deprecated This is a deprecated constructor that does not correctly handle partitioning when * producing to multiple topics. Use * {@link #FlinkKafkaProducer08(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead. */ @Deprecated public FlinkKafkaProducer08(String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig, KafkaPartitioner<IN> customPartitioner) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
/** * Creates a new Kafka streaming source consumer for Kafka 0.9.x. Use this constructor to * subscribe to multiple topics based on a regular expression pattern. * * <p>If partition discovery is enabled (by setting a non-negative value for * {@link FlinkKafkaConsumer09#KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS} in the properties), topics * with names matching the pattern will also be subscribed to as they are created on the fly. * * @param subscriptionPattern * The regular expression for a pattern of topic names to subscribe to. * @param valueDeserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties used to configure the Kafka consumer client, and the ZooKeeper client. */ @PublicEvolving public FlinkKafkaConsumer09(Pattern subscriptionPattern, DeserializationSchema<T> valueDeserializer, Properties props) { this(subscriptionPattern, new KeyedDeserializationSchemaWrapper<>(valueDeserializer), props); }
@Override protected SinkFunction<Row> createKafkaProducer( String topic, Properties properties, SerializationSchema<Row> serializationSchema, Optional<FlinkKafkaPartitioner<Row>> partitioner) { return new FlinkKafkaProducer011<>( topic, new KeyedSerializationSchemaWrapper<>(serializationSchema), properties, partitioner); } }
/** * Creates a new Kafka streaming source consumer for Kafka 0.10.x. Use this constructor to * subscribe to multiple topics based on a regular expression pattern. * * <p>If partition discovery is enabled (by setting a non-negative value for * {@link FlinkKafkaConsumer010#KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS} in the properties), topics * with names matching the pattern will also be subscribed to as they are created on the fly. * * @param subscriptionPattern * The regular expression for a pattern of topic names to subscribe to. * @param valueDeserializer * The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props * The properties used to configure the Kafka consumer client, and the ZooKeeper client. */ @PublicEvolving public FlinkKafkaConsumer010(Pattern subscriptionPattern, DeserializationSchema<T> valueDeserializer, Properties props) { this(subscriptionPattern, new KeyedDeserializationSchemaWrapper<>(valueDeserializer), props); }
/** * Creates a FlinkKafkaProducer for a given topic. the sink produces a DataStream to * the topic. * * @param topicId The topic to write data to * @param serializationSchema A (keyless) serializable serialization schema for turning user objects into a kafka-consumable byte[] * @param producerConfig Configuration properties for the KafkaProducer. 'bootstrap.servers.' is the only required argument. * @param customPartitioner A serializable partitioner for assigning messages to Kafka partitions (when passing null, we'll use Kafka's partitioner) * * @deprecated This is a deprecated since it does not correctly handle partitioning when * producing to multiple topics. Use * {@link FlinkKafkaProducer010#FlinkKafkaProducer010(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead. */ @Deprecated public FlinkKafkaProducer010(String topicId, SerializationSchema<T> serializationSchema, Properties producerConfig, KafkaPartitioner<T> customPartitioner) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
public <T> FlinkKafkaConsumerBase<T> getConsumer(List<String> topics, DeserializationSchema<T> deserializationSchema, Properties props) { return getConsumer(topics, new KeyedDeserializationSchemaWrapper<T>(deserializationSchema), props); }
/** * @deprecated Use {@link FlinkKafkaProducer08#FlinkKafkaProducer08(String, String, SerializationSchema)} */ @Deprecated public FlinkKafkaProducer(String brokerList, String topicId, SerializationSchema<IN> serializationSchema) { super(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), getPropertiesFromBrokerList(brokerList), (FlinkKafkaPartitioner<IN>) null); }