/** * Asynchronously send a record to a topic. Equivalent to <code>send(record, null)</code>. * See {@link #send(ProducerRecord, Callback)} for details. */ @Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); }
@Override public Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback) { return kafkaProducer.send(record, callback); }
@Override public Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback) { return kafkaProducer.send(record, callback); }
@Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return kafkaProducer.send(record); }
@Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return kafkaProducer.send(record); }
@Override public Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback) { return kafkaProducer.send(record, callback); }
@Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return kafkaProducer.send(record); }
@Override public void collect(Event evt) { byte[] serialized = serializer.serialize(evt); producer.send(new ProducerRecord<>(topic, partition, null, serialized)); }
/** * Push all byte array messages to the Kafka topic. * @param messages List of byte array messages to push to Kakfa. */ public void pushMessages(List<byte[]> messages) { for (byte[] message: messages) { producer.send(new ProducerRecord<>(topic, message), (recordMetadata, e) -> { if (e != null) { log.error("Failed to send message to topic {} due to exception: ", topic, e); } }); } }
/** * Push all keyed messages to the Kafka topic. * @param messages List of keyed messages to push to Kakfa. */ public void pushMessages(List<Pair<K, V>> messages) { for (Pair<K, V> message: messages) { this.producer.send(new ProducerRecord<>(topic, message.getKey(), message.getValue()), (recordMetadata, e) -> { if (e != null) { log.error("Failed to send message to topic {} due to exception: ", topic, e); } }); } }
public void sendMessage(ProducerRecord<String, String> producerRecord) throws InterruptedException, ExecutionException, TimeoutException { producer.send(producerRecord).get(10, TimeUnit.SECONDS); }
private void send(final KafkaRecord kafkaRecord) { final ProducerRecord<String, byte[]> record = new ProducerRecord<>(kafkaRecord.getKafkaTopic(), kafkaRecord.getKafkaData()); kafkaProducer.send(record, new LoggingCallback()); }
@SuppressWarnings("unchecked") DummyFlinkKafkaProducer(Properties producerConfig, KeyedSerializationSchema<T> schema, FlinkKafkaPartitioner partitioner) { super(DUMMY_TOPIC, schema, producerConfig, partitioner); this.mockProducer = mock(KafkaProducer.class); when(mockProducer.send(any(ProducerRecord.class), any(Callback.class))).thenAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocationOnMock) throws Throwable { pendingCallbacks.add(invocationOnMock.getArgument(1)); return null; } }); this.pendingCallbacks = new ArrayList<>(); this.flushLatch = new MultiShotLatch(); }
@Override public RecordMetadata send(BaseProducerRecord baseRecord, boolean sync) throws Exception { ProducerRecord<String, String> record = new ProducerRecord<>(baseRecord.topic(), baseRecord.partition(), baseRecord.key(), baseRecord.value()); Future<RecordMetadata> future = _producer.send(record); return sync ? future.get() : null; }
@Override public InteractiveProducer<K, V> write(ProducerRecord<K, V> record) { producer.send(record); producer.flush(); return this; }
@Override public InteractiveProducer<K, V> write(ProducerRecord<K, V> record) { kafkaProducer.send(record); kafkaProducer.flush(); return this; }
@Test public void testProducerWithInvalidCredentials() { Map<String, Object> props = new HashMap<>(saslClientConfigs); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:" + server.port()); StringSerializer serializer = new StringSerializer(); try (KafkaProducer<String, String> producer = new KafkaProducer<>(props, serializer, serializer)) { ProducerRecord<String, String> record = new ProducerRecord<>(topic, "message"); producer.send(record).get(); fail("Expected an authentication error!"); } catch (Exception e) { assertTrue("Expected SaslAuthenticationException, got " + e.getCause().getClass(), e.getCause() instanceof SaslAuthenticationException); } }
@Override public void write(Writable w) throws IOException { checkExceptions(); try { sentRecords++; producer.send(KafkaUtils.toProducerRecord(topic, (KafkaWritable) w), callback); } catch (KafkaException kafkaException) { handleKafkaException(kafkaException); checkExceptions(); } }
@Test public void testInterceptorPartitionSetOnTooLargeRecord() { Map<String, Object> configs = new HashMap<>(); configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); configs.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, "1"); String topic = "topic"; ProducerRecord<String, String> record = new ProducerRecord<>(topic, "value"); Metadata metadata = new Metadata(0, 90000, true); MetadataResponse initialUpdateResponse = TestUtils.metadataUpdateWith(1, singletonMap(topic, 1)); metadata.update(initialUpdateResponse, Time.SYSTEM.milliseconds()); @SuppressWarnings("unchecked") // it is safe to suppress, since this is a mock class ProducerInterceptors<String, String> interceptors = mock(ProducerInterceptors.class); KafkaProducer<String, String> producer = new KafkaProducer<>(configs, new StringSerializer(), new StringSerializer(), metadata, null, interceptors, Time.SYSTEM); when(interceptors.onSend(any())).then(invocation -> invocation.getArgument(0)); producer.send(record); verify(interceptors).onSend(record); verify(interceptors).onSendError(eq(record), notNull(), notNull()); producer.close(Duration.ofMillis(0)); }
/** * This test is meant to assure that testAtLeastOnceProducer is valid by testing that if flushing is disabled, * the snapshot method does indeed finishes without waiting for pending records; * we set a timeout because the test will not finish if the logic is broken. */ @SuppressWarnings("unchecked") @Test(timeout = 5000) public void testDoesNotWaitForPendingRecordsIfFlushingDisabled() throws Throwable { final DummyFlinkKafkaProducer<String> producer = new DummyFlinkKafkaProducer<>( FakeStandardProducerConfig.get(), new KeyedSerializationSchemaWrapper<>(new SimpleStringSchema()), null); producer.setFlushOnCheckpoint(false); final KafkaProducer<?, ?> mockProducer = producer.getMockKafkaProducer(); final OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink<>(producer)); testHarness.open(); testHarness.processElement(new StreamRecord<>("msg")); // make sure that all callbacks have not been completed verify(mockProducer, times(1)).send(any(ProducerRecord.class), any(Callback.class)); // should return even if there are pending records testHarness.snapshot(123L, 123L); testHarness.close(); }