/** * computes partition for given record. * if the record has partition returns the value otherwise * calls configured partitioner class to compute the partition. */ private int partition(ProducerRecord<K, V> record, byte[] serializedKey, byte[] serializedValue, Cluster cluster) { Integer partition = record.partition(); return partition != null ? partition : partitioner.partition( record.topic(), record.key(), serializedKey, record.value(), serializedValue, cluster); }
@Override public ProducerRecord<String, String> onSend(ProducerRecord<String, String> record) { ONSEND_COUNT.incrementAndGet(); return new ProducerRecord<>( record.topic(), record.partition(), record.key(), record.value().concat(appendStr)); }
/** * computes partition for given record. */ private int partition(ProducerRecord<K, V> record, Cluster cluster) { Integer partition = record.partition(); String topic = record.topic(); if (partition != null) { List<PartitionInfo> partitions = cluster.partitionsForTopic(topic); int numPartitions = partitions.size(); // they have given us a partition, use it if (partition < 0 || partition >= numPartitions) throw new IllegalArgumentException("Invalid partition given with record: " + partition + " is not in the range [0..." + numPartitions + "]."); return partition; } byte[] keyBytes = keySerializer.serialize(topic, record.headers(), record.key()); byte[] valueBytes = valueSerializer.serialize(topic, record.headers(), record.value()); return this.partitioner.partition(topic, record.key(), keyBytes, record.value(), valueBytes, cluster); }
@Override public void sendAsync(RowMap r, AbstractAsyncProducer.CallbackCompleter cc) throws Exception { ProducerRecord<String, String> record = makeProducerRecord(r); /* if debug logging isn't enabled, release the reference to `value`, which can ease memory pressure somewhat */ String value = KafkaCallback.LOGGER.isDebugEnabled() ? record.value() : null; KafkaCallback callback = new KafkaCallback(cc, r.getNextPosition(), record.key(), value, this.succeededMessageCount, this.failedMessageCount, this.succeededMessageMeter, this.failedMessageMeter, this.context); sendAsync(record, callback); }
@Override public ProducerRecord<Integer, String> onSend(ProducerRecord<Integer, String> record) { onSendCount++; if (throwExceptionOnSend) throw new KafkaException("Injected exception in AppendProducerInterceptor.onSend"); return new ProducerRecord<>( record.topic(), record.partition(), record.key(), record.value().concat(appendStr)); }
@Test public void testWithKeySelector() { final KafkaStructuredLoggingServiceExposed service = new KafkaStructuredLoggingServiceExposed( producer, (res, log) -> log.name.getBytes(), false); final SimpleStructuredLog log = new SimpleStructuredLog("kawamuray"); service.writeLog(null, log); verify(producer, times(1)).send(captor.capture(), any(Callback.class)); final ProducerRecord<byte[], SimpleStructuredLog> record = captor.getValue(); assertThat(record.key()).isNotNull(); assertThat(new String(record.key())).isEqualTo(log.name); assertThat(record.value()).isEqualTo(log); }
@Override public Future<RecordMetadata> send(ProducerRecord<K, V> producerRecord, Callback callback) { com.twitter.util.Future<DLSN> dlsnFuture; if (null == producerRecord.key()) { dlsnFuture = getUnpartitionedMultiWriter(producerRecord.topic()).write(producerRecord.value()); } else { // TODO: be able to publish to a specific partition dlsnFuture = getPartitionedMultiWriter(producerRecord.topic()).write(producerRecord.key(), producerRecord.value()); } return new DLFutureRecordMetadata(producerRecord.topic(), dlsnFuture, callback); }
@Test public void withKeyExtractor() { final RequestLog log = mock(RequestLog.class); when(log.authority()).thenReturn("kawamuray"); when(log.decodedPath()).thenReturn("kyuto"); final KafkaAccessLogWriter<String, String> service = new KafkaAccessLogWriter<>(producer, TOPIC_NAME, RequestLog::decodedPath, RequestLog::authority); service.log(log); verify(producer, times(1)).send(captor.capture(), any(Callback.class)); final ProducerRecord<String, String> record = captor.getValue(); assertThat(record.key()).isEqualTo("kyuto"); assertThat(record.value()).isEqualTo("kawamuray"); }
@Test public void withoutKeyExtractor() { final RequestLog log = mock(RequestLog.class); when(log.authority()).thenReturn("kawamuray"); final KafkaAccessLogWriter<String, String> service = new KafkaAccessLogWriter<>(producer, TOPIC_NAME, RequestLog::authority); service.log(log); verify(producer, times(1)).send(captor.capture(), any(Callback.class)); final ProducerRecord<String, String> record = captor.getValue(); assertThat(record.key()).isNull(); assertThat(record.value()).isEqualTo("kawamuray"); }
@Test public void testSimple() { MockProducer<String, String> producer = new MockProducer<>(Cluster.empty(), false, null, null, null); KafkaBolt<String, String> bolt = makeBolt(producer); OutputCollector collector = mock(OutputCollector.class); TopologyContext context = mock(TopologyContext.class); Map<String, Object> conf = new HashMap<>(); bolt.prepare(conf, context, collector); String key = "KEY"; String value = "VALUE"; Tuple testTuple = createTestTuple(key, value); bolt.execute(testTuple); assertThat(producer.history().size(), is(1)); ProducerRecord<String, String> arg = producer.history().get(0); LOG.info("GOT {} ->", arg); LOG.info("{}, {}, {}", arg.topic(), arg.key(), arg.value()); assertThat(arg.topic(), is("MY_TOPIC")); assertThat(arg.key(), is(key)); assertThat(arg.value(), is(value)); // Complete the send producer.completeNext(); verify(collector).ack(testTuple); }
@Test public void testServiceWithoutKeySelector() { final KafkaStructuredLoggingServiceExposed service = new KafkaStructuredLoggingServiceExposed(producer, null, false); final SimpleStructuredLog log = new SimpleStructuredLog("kawamuray"); service.writeLog(null, log); verify(producer, times(1)).send(captor.capture(), any(Callback.class)); final ProducerRecord<byte[], SimpleStructuredLog> record = captor.getValue(); assertThat(record.key()).isNull(); assertThat(record.value()).isEqualTo(log); }
@Test public void testSimpleWithError() { MockProducer<String, String> producer = new MockProducer<>(Cluster.empty(), false, null, null, null); KafkaBolt<String, String> bolt = makeBolt(producer); OutputCollector collector = mock(OutputCollector.class); TopologyContext context = mock(TopologyContext.class); Map<String, Object> conf = new HashMap<>(); bolt.prepare(conf, context, collector); String key = "KEY"; String value = "VALUE"; Tuple testTuple = createTestTuple(key, value); bolt.execute(testTuple); assertThat(producer.history().size(), is(1)); ProducerRecord<String, String> arg = producer.history().get(0); LOG.info("GOT {} ->", arg); LOG.info("{}, {}, {}", arg.topic(), arg.key(), arg.value()); assertThat(arg.topic(), is("MY_TOPIC")); assertThat(arg.key(), is(key)); assertThat(arg.value(), is(value)); // Force a send error KafkaException ex = new KafkaException(); producer.errorNext(ex); verify(collector).reportError(ex); verify(collector).fail(testTuple); }
@Test public void testAppendWithKey() throws Exception { final Appender appender = ctx.getRequiredAppender("KafkaAppenderWithKey"); final LogEvent logEvent = createLogEvent(); appender.append(logEvent); final List<ProducerRecord<byte[], byte[]>> history = kafka.history(); assertEquals(1, history.size()); final ProducerRecord<byte[], byte[]> item = history.get(0); assertNotNull(item); assertEquals(TOPIC_NAME, item.topic()); String msgKey = item.key().toString(); byte[] keyValue = "key".getBytes(StandardCharsets.UTF_8); assertArrayEquals(item.key(), keyValue); assertEquals(LOG_MESSAGE, new String(item.value(), StandardCharsets.UTF_8)); }
@Test public void testAppendWithKeyLookup() throws Exception { final Appender appender = ctx.getRequiredAppender("KafkaAppenderWithKeyLookup"); final LogEvent logEvent = createLogEvent(); Date date = new Date(); SimpleDateFormat format = new SimpleDateFormat("dd-MM-yyyy"); appender.append(logEvent); final List<ProducerRecord<byte[], byte[]>> history = kafka.history(); assertEquals(1, history.size()); final ProducerRecord<byte[], byte[]> item = history.get(0); assertNotNull(item); assertEquals(TOPIC_NAME, item.topic()); byte[] keyValue = format.format(date).getBytes(StandardCharsets.UTF_8); assertArrayEquals(item.key(), keyValue); assertEquals(LOG_MESSAGE, new String(item.value(), StandardCharsets.UTF_8)); }
@Test public void testAsyncAppend() throws Exception { final Appender appender = ctx.getRequiredAppender("AsyncKafkaAppender"); appender.append(createLogEvent()); final List<ProducerRecord<byte[], byte[]>> history = kafka.history(); assertEquals(1, history.size()); final ProducerRecord<byte[], byte[]> item = history.get(0); assertNotNull(item); assertEquals(TOPIC_NAME, item.topic()); assertNull(item.key()); assertEquals(LOG_MESSAGE, new String(item.value(), StandardCharsets.UTF_8)); }
byte[] serializedKey; try { serializedKey = keySerializer.serialize(record.topic(), record.headers(), record.key()); } catch (ClassCastException cce) { throw new SerializationException("Can't convert key of class " + record.key().getClass().getName() + " to class " + producerConfig.getClass(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG).getName() + " specified in key.serializer", cce);
@Test public void testAppendWithLayout() throws Exception { final Appender appender = ctx.getRequiredAppender("KafkaAppenderWithLayout"); appender.append(createLogEvent()); final List<ProducerRecord<byte[], byte[]>> history = kafka.history(); assertEquals(1, history.size()); final ProducerRecord<byte[], byte[]> item = history.get(0); assertNotNull(item); assertEquals(TOPIC_NAME, item.topic()); assertNull(item.key()); assertEquals("[" + LOG_MESSAGE + "]", new String(item.value(), StandardCharsets.UTF_8)); }
@Test public void testAppendWithSerializedLayout() throws Exception { final Appender appender = ctx.getRequiredAppender("KafkaAppenderWithSerializedLayout"); final LogEvent logEvent = createLogEvent(); appender.append(logEvent); final List<ProducerRecord<byte[], byte[]>> history = kafka.history(); assertEquals(1, history.size()); final ProducerRecord<byte[], byte[]> item = history.get(0); assertNotNull(item); assertEquals(TOPIC_NAME, item.topic()); assertNull(item.key()); assertEquals(LOG_MESSAGE, deserializeLogEvent(item.value()).getMessage().getFormattedMessage()); }