@Test @SuppressWarnings("deprecation") public void testConstructionWithMissingRelativeOffset() { TopicPartition tp = new TopicPartition("foo", 0); long timestamp = 2340234L; int keySize = 3; int valueSize = 5; Long checksum = 908923L; RecordMetadata metadata = new RecordMetadata(tp, -1L, -1L, timestamp, checksum, keySize, valueSize); assertEquals(tp.topic(), metadata.topic()); assertEquals(tp.partition(), metadata.partition()); assertEquals(timestamp, metadata.timestamp()); assertFalse(metadata.hasOffset()); assertEquals(-1L, metadata.offset()); assertEquals(checksum.longValue(), metadata.checksum()); assertEquals(keySize, metadata.serializedKeySize()); assertEquals(valueSize, metadata.serializedValueSize()); }
assertFalse(unknownMetadata.hasOffset()); assertEquals(-1L, unknownMetadata.offset());
@Override public boolean sendMessage(GelfMessage message) { ProducerRecord<byte[], byte[]> record = new ProducerRecord<byte[], byte[]>(topicName, message.toJson().getBytes()); boolean hasOffset; try { Future<RecordMetadata> metadata = kafkaProducer.send(record); hasOffset = metadata.get(30, TimeUnit.SECONDS).hasOffset(); } catch (Exception e) { errorReporter.reportError("Error sending log to kafka", e); return false; } return hasOffset; }
if(metadata.hasOffset()) { if(this.actorStore.isConcurrent()) { this.actorStore.put(persistentActor.getSelf().getActorId(), serializedActor, metadata.offset());