/** * Creates a FlinkKafkaProducer for a given topic. The sink produces its input to * the topic. It accepts a key-less {@link SerializationSchema} and possibly a custom {@link FlinkKafkaPartitioner}. * * <p>Since a key-less {@link SerializationSchema} is used, all records sent to Kafka will not have an * attached key. Therefore, if a partitioner is also not provided, records will be distributed to Kafka * partitions in a round-robin fashion. * * @param topicId The topic to write data to * @param serializationSchema A key-less serializable serialization schema for turning user objects into a kafka-consumable byte[] * @param producerConfig Configuration properties for the KafkaProducer. 'bootstrap.servers.' is the only required argument. * @param customPartitioner A serializable partitioner for assigning messages to Kafka partitions. * If a partitioner is not provided, records will be distributed to Kafka partitions * in a round-robin fashion. */ public FlinkKafkaProducer011( String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig, Optional<FlinkKafkaPartitioner<IN>> customPartitioner) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
/** * @deprecated Use {@link FlinkKafkaProducer08#FlinkKafkaProducer08(String, SerializationSchema, Properties)} */ @Deprecated public FlinkKafkaProducer(String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig) { super(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, (FlinkKafkaPartitioner<IN>) null); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces its input to * the topic. It accepts a key-less {@link SerializationSchema} and possibly a custom {@link FlinkKafkaPartitioner}. * * <p>Since a key-less {@link SerializationSchema} is used, all records sent to Kafka will not have an * attached key. Therefore, if a partitioner is also not provided, records will be distributed to Kafka * partitions in a round-robin fashion. * * @param topicId The topic to write data to * @param serializationSchema A key-less serializable serialization schema for turning user objects into a kafka-consumable byte[] * @param producerConfig Configuration properties for the KafkaProducer. 'bootstrap.servers.' is the only required argument. * @param customPartitioner A serializable partitioner for assigning messages to Kafka partitions. * If a partitioner is not provided, records will be distributed to Kafka partitions * in a round-robin fashion. */ public FlinkKafkaProducer( String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig, Optional<FlinkKafkaPartitioner<IN>> customPartitioner) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
/** * @deprecated Use {@link FlinkKafkaProducer08#FlinkKafkaProducer08(String, SerializationSchema, Properties, KafkaPartitioner)} */ @Deprecated public FlinkKafkaProducer(String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig, KafkaPartitioner customPartitioner) { super(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
@Override protected SinkFunction<Row> createKafkaProducer( String topic, Properties properties, SerializationSchema<Row> serializationSchema, Optional<FlinkKafkaPartitioner<Row>> partitioner) { return new FlinkKafkaProducer<>( topic, new KeyedSerializationSchemaWrapper<>(serializationSchema), properties, partitioner); } }
/** * Creates a FlinkKafkaProducer for a given topic. the sink produces a DataStream to * the topic. * * @param topicId The topic to write data to * @param serializationSchema A (keyless) serializable serialization schema for turning user objects into a kafka-consumable byte[] * @param producerConfig Configuration properties for the KafkaProducer. 'bootstrap.servers.' is the only required argument. * @param customPartitioner A serializable partitioner for assigning messages to Kafka partitions (when passing null, we'll use Kafka's partitioner) * * @deprecated This is a deprecated constructor that does not correctly handle partitioning when * producing to multiple topics. Use * {@link #FlinkKafkaProducer09(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead. */ @Deprecated public FlinkKafkaProducer09(String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig, KafkaPartitioner<IN> customPartitioner) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
/** * The main constructor for creating a FlinkKafkaProducer. * * @param topicId The topic to write data to * @param serializationSchema A (keyless) serializable serialization schema for turning user objects into a kafka-consumable byte[] * @param producerConfig Configuration properties for the KafkaProducer. 'bootstrap.servers.' is the only required argument. * @param customPartitioner A serializable partitioner for assigning messages to Kafka partitions. * * @deprecated This is a deprecated constructor that does not correctly handle partitioning when * producing to multiple topics. Use * {@link #FlinkKafkaProducer08(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead. */ @Deprecated public FlinkKafkaProducer08(String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig, KafkaPartitioner<IN> customPartitioner) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
@Override protected SinkFunction<Row> createKafkaProducer( String topic, Properties properties, SerializationSchema<Row> serializationSchema, Optional<FlinkKafkaPartitioner<Row>> partitioner) { return new FlinkKafkaProducer011<>( topic, new KeyedSerializationSchemaWrapper<>(serializationSchema), properties, partitioner); } }
/** * Creates a FlinkKafkaProducer for a given topic. the sink produces a DataStream to * the topic. * * @param topicId The topic to write data to * @param serializationSchema A (keyless) serializable serialization schema for turning user objects into a kafka-consumable byte[] * @param producerConfig Configuration properties for the KafkaProducer. 'bootstrap.servers.' is the only required argument. * @param customPartitioner A serializable partitioner for assigning messages to Kafka partitions (when passing null, we'll use Kafka's partitioner) * * @deprecated This is a deprecated since it does not correctly handle partitioning when * producing to multiple topics. Use * {@link FlinkKafkaProducer010#FlinkKafkaProducer010(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead. */ @Deprecated public FlinkKafkaProducer010(String topicId, SerializationSchema<T> serializationSchema, Properties producerConfig, KafkaPartitioner<T> customPartitioner) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces its input to * the topic. It accepts a key-less {@link SerializationSchema} and possibly a custom {@link FlinkKafkaPartitioner}. * * <p>Since a key-less {@link SerializationSchema} is used, all records sent to Kafka will not have an * attached key. Therefore, if a partitioner is also not provided, records will be distributed to Kafka * partitions in a round-robin fashion. * * @param topicId The topic to write data to * @param serializationSchema A key-less serializable serialization schema for turning user objects into a kafka-consumable byte[] * @param producerConfig Configuration properties for the KafkaProducer. 'bootstrap.servers.' is the only required argument. * @param customPartitioner A serializable partitioner for assigning messages to Kafka partitions. * If set to {@code null}, records will be distributed to Kafka partitions * in a round-robin fashion. */ public FlinkKafkaProducer010( String topicId, SerializationSchema<T> serializationSchema, Properties producerConfig, @Nullable FlinkKafkaPartitioner<T> customPartitioner) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces its input to * the topic. It accepts a key-less {@link SerializationSchema} and possibly a custom {@link FlinkKafkaPartitioner}. * * <p>Since a key-less {@link SerializationSchema} is used, all records sent to Kafka will not have an * attached key. Therefore, if a partitioner is also not provided, records will be distributed to Kafka * partitions in a round-robin fashion. * * @param topicId The topic to write data to * @param serializationSchema A key-less serializable serialization schema for turning user objects into a kafka-consumable byte[] * @param producerConfig Configuration properties for the KafkaProducer. 'bootstrap.servers.' is the only required argument. * @param customPartitioner A serializable partitioner for assigning messages to Kafka partitions. * If set to {@code null}, records will be distributed to Kafka partitions * in a round-robin fashion. */ public FlinkKafkaProducer09( String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig, @Nullable FlinkKafkaPartitioner<IN> customPartitioner) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces its input to * the topic. It accepts a key-less {@link SerializationSchema} and possibly a custom {@link FlinkKafkaPartitioner}. * * <p>Since a key-less {@link SerializationSchema} is used, all records sent to Kafka will not have an * attached key. Therefore, if a partitioner is also not provided, records will be distributed to Kafka * partitions in a round-robin fashion. * * @param topicId The topic to write data to * @param serializationSchema A key-less serializable serialization schema for turning user objects into a kafka-consumable byte[] * @param producerConfig Configuration properties for the KafkaProducer. 'bootstrap.servers.' is the only required argument. * @param customPartitioner A serializable partitioner for assigning messages to Kafka partitions. * If set to {@code null}, records will be distributed to Kafka partitions * in a round-robin fashion. */ public FlinkKafkaProducer08( String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig, @Nullable FlinkKafkaPartitioner<IN> customPartitioner) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, customPartitioner); }
/** * @deprecated Use {@link FlinkKafkaProducer08#FlinkKafkaProducer08(String, String, SerializationSchema)} */ @Deprecated public FlinkKafkaProducer(String brokerList, String topicId, SerializationSchema<IN> serializationSchema) { super(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), getPropertiesFromBrokerList(brokerList), (FlinkKafkaPartitioner<IN>) null); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces a DataStream to * the topic. * * <p>Using this constructor, the default {@link FlinkFixedPartitioner} will be used as * the partitioner. This default partitioner maps each sink subtask to a single Kafka * partition (i.e. all records received by a sink subtask will end up in the same * Kafka partition). * * <p>To use a custom partitioner, please use * {@link #FlinkKafkaProducer09(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead. * * @param topicId * ID of the Kafka topic. * @param serializationSchema * User defined key-less serialization schema. * @param producerConfig * Properties with the producer configuration. */ public FlinkKafkaProducer09(String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, new FlinkFixedPartitioner<IN>()); }
/** * Creates a FlinkKafkaProducer for a given topic. the sink produces a DataStream to * the topic. * * <p>Using this constructor, the default {@link FlinkFixedPartitioner} will be used as * the partitioner. This default partitioner maps each sink subtask to a single Kafka * partition (i.e. all records received by a sink subtask will end up in the same * Kafka partition). * * <p>To use a custom partitioner, please use * {@link #FlinkKafkaProducer010(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead. * * @param topicId * ID of the Kafka topic. * @param serializationSchema * User defined key-less serialization schema. * @param producerConfig * Properties with the producer configuration. */ public FlinkKafkaProducer010(String topicId, SerializationSchema<T> serializationSchema, Properties producerConfig) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, new FlinkFixedPartitioner<T>()); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces its input to * the topic. * * <p>Using this constructor, the default {@link FlinkFixedPartitioner} will be used as * the partitioner. This default partitioner maps each sink subtask to a single Kafka * partition (i.e. all records received by a sink subtask will end up in the same * Kafka partition). * * <p>To use a custom partitioner, please use * {@link #FlinkKafkaProducer08(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead. * * @param topicId * ID of the Kafka topic. * @param serializationSchema * User defined key-less serialization schema. * @param producerConfig * Properties with the producer configuration. */ public FlinkKafkaProducer08(String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, new FlinkFixedPartitioner<IN>()); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces a DataStream to * the topic. * * <p>Using this constructor, the default {@link FlinkFixedPartitioner} will be used as * the partitioner. This default partitioner maps each sink subtask to a single Kafka * partition (i.e. all records received by a sink subtask will end up in the same * Kafka partition). * * <p>To use a custom partitioner, please use * {@link #FlinkKafkaProducer(String, SerializationSchema, Properties, Optional)} instead. * * @param topicId * ID of the Kafka topic. * @param serializationSchema * User defined key-less serialization schema. * @param producerConfig * Properties with the producer configuration. */ public FlinkKafkaProducer(String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig) { this( topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, Optional.of(new FlinkFixedPartitioner<IN>())); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces a DataStream to * the topic. * * <p>Using this constructor, the default {@link FlinkFixedPartitioner} will be used as * the partitioner. This default partitioner maps each sink subtask to a single Kafka * partition (i.e. all records received by a sink subtask will end up in the same * Kafka partition). * * <p>To use a custom partitioner, please use * {@link #FlinkKafkaProducer011(String, SerializationSchema, Properties, Optional)} instead. * * @param topicId * ID of the Kafka topic. * @param serializationSchema * User defined key-less serialization schema. * @param producerConfig * Properties with the producer configuration. */ public FlinkKafkaProducer011(String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig) { this( topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), producerConfig, Optional.of(new FlinkFixedPartitioner<IN>())); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces a DataStream to * the topic. * * @param brokerList * Comma separated addresses of the brokers * @param topicId * ID of the Kafka topic. * @param serializationSchema * User defined (keyless) serialization schema. */ public FlinkKafkaProducer(String brokerList, String topicId, SerializationSchema<IN> serializationSchema) { this( topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), getPropertiesFromBrokerList(brokerList), Optional.of(new FlinkFixedPartitioner<IN>())); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces a DataStream to * the topic. * * @param brokerList * Comma separated addresses of the brokers * @param topicId * ID of the Kafka topic. * @param serializationSchema * User defined (keyless) serialization schema. */ public FlinkKafkaProducer011(String brokerList, String topicId, SerializationSchema<IN> serializationSchema) { this( topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), getPropertiesFromBrokerList(brokerList), Optional.of(new FlinkFixedPartitioner<IN>())); }