/** * Creates a record with a specified timestamp to be sent to a specified topic and partition * * @param topic The topic the record will be appended to * @param partition The partition to which the record should be sent * @param timestamp The timestamp of the record, in milliseconds since epoch. If null, the producer will assign * the timestamp using System.currentTimeMillis(). * @param key The key that will be included in the record * @param value The record contents * @param headers the headers that will be included in the record */ public ProducerRecord(String topic, Integer partition, Long timestamp, K key, V value, Iterable<Header> headers) { if (topic == null) throw new IllegalArgumentException("Topic cannot be null."); if (timestamp != null && timestamp < 0) throw new IllegalArgumentException( String.format("Invalid timestamp: %d. Timestamp should always be non-negative or null.", timestamp)); if (partition != null && partition < 0) throw new IllegalArgumentException( String.format("Invalid partition: %d. Partition number should always be non-negative or null.", partition)); this.topic = topic; this.partition = partition; this.key = key; this.value = value; this.timestamp = timestamp; this.headers = new RecordHeaders(headers); }
V value) { this(topic, partition, offset, timestamp, timestampType, checksum, serializedKeySize, serializedValueSize, key, value, new RecordHeaders());
@Test @SuppressWarnings("deprecation") public void testNullChecksumInConstructor() { String key = "key"; String value = "value"; long timestamp = 242341324L; ConsumerRecord<String, String> record = new ConsumerRecord<>("topic", 0, 23L, timestamp, TimestampType.CREATE_TIME, null, key.length(), value.length(), key, value, new RecordHeaders()); assertEquals(DefaultRecord.computePartialChecksum(timestamp, key.length(), value.length()), record.checksum()); }
@Test public void testHasRoomForMethodWithHeaders() { if (magic >= RecordBatch.MAGIC_VALUE_V2) { MemoryRecordsBuilder builder = MemoryRecords.builder(ByteBuffer.allocate(100), magic, compression, TimestampType.CREATE_TIME, 0L); RecordHeaders headers = new RecordHeaders(); headers.add("hello", "world.world".getBytes()); headers.add("hello", "world.world".getBytes()); headers.add("hello", "world.world".getBytes()); headers.add("hello", "world.world".getBytes()); headers.add("hello", "world.world".getBytes()); builder.append(logAppendTime, "key".getBytes(), "value".getBytes()); // Make sure that hasRoomFor accounts for header sizes by letting a record without headers pass, but stopping // a record with a large number of headers. assertTrue(builder.hasRoomFor(logAppendTime, "key".getBytes(), "value".getBytes(), Record.EMPTY_HEADERS)); assertFalse(builder.hasRoomFor(logAppendTime, "key".getBytes(), "value".getBytes(), headers.toArray())); } }
@Test public void testRemove() { Headers headers = new RecordHeaders(); headers.add(new RecordHeader("key", "value".getBytes())); assertTrue(headers.iterator().hasNext()); headers.remove("key"); assertFalse(headers.iterator().hasNext()); }
@Test public void testHeaders() throws IOException { RecordHeaders headers = new RecordHeaders(); headers.add(new RecordHeader("key", "value".getBytes())); headers.add(new RecordHeader("key1", "key1value".getBytes())); headers.add(new RecordHeader("key", "value2".getBytes())); headers.add(new RecordHeader("key2", "key2value".getBytes())); Iterator<Header> keyHeaders = headers.headers("key").iterator(); assertHeader("key", "value", keyHeaders.next()); assertHeader("key", "value2", keyHeaders.next()); assertFalse(keyHeaders.hasNext()); keyHeaders = headers.headers("key1").iterator(); assertHeader("key1", "key1value", keyHeaders.next()); assertFalse(keyHeaders.hasNext()); keyHeaders = headers.headers("key2").iterator(); assertHeader("key2", "key2value", keyHeaders.next()); assertFalse(keyHeaders.hasNext()); }
@Test public void testLastHeader() { Headers headers = new RecordHeaders(); headers.add(new RecordHeader("key", "value".getBytes())); headers.add(new RecordHeader("key", "value2".getBytes())); headers.add(new RecordHeader("key", "value3".getBytes())); assertHeader("key", "value3", headers.lastHeader("key")); assertEquals(3, getCount(headers)); }
@Test public void testAdd() { Headers headers = new RecordHeaders(); headers.add(new RecordHeader("key", "value".getBytes())); Header header = headers.iterator().next(); assertHeader("key", "value", header); headers.add(new RecordHeader("key2", "value2".getBytes())); assertHeader("key2", "value2", headers.lastHeader("key2")); assertEquals(2, getCount(headers)); }
@Test public void testNew() throws IOException { RecordHeaders headers = new RecordHeaders(); headers.add(new RecordHeader("key", "value".getBytes())); headers.setReadOnly(); RecordHeaders newHeaders = new RecordHeaders(headers); newHeaders.add(new RecordHeader("key", "value2".getBytes())); //Ensure existing headers are not modified assertHeader("key", "value", headers.lastHeader("key")); assertEquals(1, getCount(headers)); //Ensure new headers are modified assertHeader("key", "value2", newHeaders.lastHeader("key")); assertEquals(2, getCount(newHeaders)); }
@Test public void testReadOnly() throws IOException { RecordHeaders headers = new RecordHeaders(); headers.add(new RecordHeader("key", "value".getBytes())); Iterator<Header> headerIteratorBeforeClose = headers.iterator();
/** * Subclasses can populate additional headers before they are mapped. * @param message the message. * @return the headers * @since 2.1 */ protected Headers initialRecordHeaders(Message<?> message) { return new RecordHeaders(); }
/** * Parse the record entry, deserializing the key / value fields if necessary */ private ConsumerRecord<K, V> parseRecord(TopicPartition partition, RecordBatch batch, Record record) { try { long offset = record.offset(); long timestamp = record.timestamp(); Optional<Integer> leaderEpoch = maybeLeaderEpoch(batch.partitionLeaderEpoch()); TimestampType timestampType = batch.timestampType(); Headers headers = new RecordHeaders(record.headers()); ByteBuffer keyBytes = record.key(); byte[] keyByteArray = keyBytes == null ? null : Utils.toArray(keyBytes); K key = keyBytes == null ? null : this.keyDeserializer.deserialize(partition.topic(), headers, keyByteArray); ByteBuffer valueBytes = record.value(); byte[] valueByteArray = valueBytes == null ? null : Utils.toArray(valueBytes); V value = valueBytes == null ? null : this.valueDeserializer.deserialize(partition.topic(), headers, valueByteArray); return new ConsumerRecord<>(partition.topic(), partition.partition(), offset, timestamp, timestampType, record.checksumOrNull(), keyByteArray == null ? ConsumerRecord.NULL_SIZE : keyByteArray.length, valueByteArray == null ? ConsumerRecord.NULL_SIZE : valueByteArray.length, key, value, headers, leaderEpoch); } catch (RuntimeException e) { throw new SerializationException("Error deserializing key/value for partition " + partition + " at offset " + record.offset() + ". If needed, please seek past the record to continue consumption.", e); } }
@Override protected Headers initialRecordHeaders(Message<?> message) { RecordHeaders headers = new RecordHeaders(); this.typeMapper.fromClass(message.getPayload().getClass(), headers); return headers; }
@Test @SuppressWarnings("deprecation") public void testOldConstructor() { String topic = "topic"; int partition = 0; long offset = 23; String key = "key"; String value = "value"; ConsumerRecord<String, String> record = new ConsumerRecord<>(topic, partition, offset, key, value); assertEquals(topic, record.topic()); assertEquals(partition, record.partition()); assertEquals(offset, record.offset()); assertEquals(key, record.key()); assertEquals(value, record.value()); assertEquals(TimestampType.NO_TIMESTAMP_TYPE, record.timestampType()); assertEquals(ConsumerRecord.NO_TIMESTAMP, record.timestamp()); assertEquals(ConsumerRecord.NULL_CHECKSUM, record.checksum()); assertEquals(ConsumerRecord.NULL_SIZE, record.serializedKeySize()); assertEquals(ConsumerRecord.NULL_SIZE, record.serializedValueSize()); assertEquals(Optional.empty(), record.leaderEpoch()); assertEquals(new RecordHeaders(), record.headers()); }
@Test public void testAddRemoveInterleaved() { Headers headers = new RecordHeaders(); headers.add(new RecordHeader("key", "value".getBytes())); headers.add(new RecordHeader("key2", "value2".getBytes()));
@Test public void testDeserializeSerializedEntityEquals() { assertThat(jsonReader.deserialize(topic, jsonWriter.serialize(topic, entity))).isEqualTo(entity); Headers headers = new RecordHeaders(); headers.add(AbstractJavaTypeMapper.DEFAULT_CLASSID_FIELD_NAME, DummyEntity.class.getName().getBytes()); assertThat(dummyEntityJsonDeserializer.deserialize(topic, headers, jsonWriter.serialize(topic, entity))).isEqualTo(entity); }
@Override public void accept(ConsumerRecord<?, ?> record, Exception exception) { TopicPartition tp = this.destinationResolver.apply(record, exception); RecordHeaders headers = new RecordHeaders(record.headers().toArray()); enhanceHeaders(headers, record, exception); ProducerRecord<Object, Object> outRecord = createProducerRecord(record, tp, headers); if (this.transactional && !this.template.inTransaction()) { this.template.executeInTransaction(t -> { publish(outRecord, t); return null; }); } else { publish(outRecord, this.template); } }
@Test public void testMimeBackwardsCompat() { DefaultKafkaHeaderMapper mapper = new DefaultKafkaHeaderMapper(); MessageHeaders headers = new MessageHeaders( Collections.singletonMap("foo", MimeType.valueOf("application/json"))); RecordHeaders recordHeaders = new RecordHeaders(); mapper.fromHeaders(headers, recordHeaders); Map<String, Object> receivedHeaders = new HashMap<>(); mapper.toHeaders(recordHeaders, receivedHeaders); Object fooHeader = receivedHeaders.get("foo"); assertThat(fooHeader).isInstanceOf(String.class); assertThat(fooHeader).isEqualTo("application/json"); KafkaTestUtils.getPropertyValue(mapper, "toStringClasses", Set.class).clear(); recordHeaders = new RecordHeaders(); mapper.fromHeaders(headers, recordHeaders); receivedHeaders = new HashMap<>(); mapper.toHeaders(recordHeaders, receivedHeaders); fooHeader = receivedHeaders.get("foo"); assertThat(fooHeader).isInstanceOf(MimeType.class); assertThat(fooHeader).isEqualTo(MimeType.valueOf("application/json")); }
@Test public void testDeserializeSerializedEntityArrayEquals() { assertThat(jsonArrayReader.deserialize(topic, jsonWriter.serialize(topic, entityArray))).isEqualTo(entityArray); Headers headers = new RecordHeaders(); headers.add(AbstractJavaTypeMapper.DEFAULT_CLASSID_FIELD_NAME, DummyEntity[].class.getName().getBytes()); assertThat(dummyEntityArrayJsonDeserializer.deserialize(topic, headers, jsonWriter.serialize(topic, entityArray))).isEqualTo(entityArray); }
@Test public void unitTests() throws Exception { ErrorHandlingDeserializer2<String> ehd = new ErrorHandlingDeserializer2<>(new StringDeserializer()); assertThat(ehd.deserialize("topic", "foo".getBytes())).isEqualTo("foo"); ehd.close(); ehd = new ErrorHandlingDeserializer2<>(new Deserializer<String>() { @Override public void configure(Map<String, ?> configs, boolean isKey) { } @Override public String deserialize(String topic, byte[] data) { throw new RuntimeException("fail"); } @Override public void close() { } }); Headers headers = new RecordHeaders(); Object result = ehd.deserialize("topic", headers, "foo".getBytes()); assertThat(result).isNull(); Header deser = headers.lastHeader(ErrorHandlingDeserializer2.VALUE_DESERIALIZER_EXCEPTION_HEADER); assertThat(new ObjectInputStream(new ByteArrayInputStream(deser.value())).readObject()).isInstanceOf(DeserializationException.class); ehd.close(); }