@Nullable @Override public InputRow nextRow() { try { if (!nextIterator.hasNext()) { final byte[] message = iter.next().message(); if (message == null) { return null; } nextIterator = theParser.parseBatch(ByteBuffer.wrap(message)).iterator(); } return nextIterator.next(); } catch (InvalidMessageException e) { /* IF the CRC is caused within the wire transfer, this is not the best way to handel CRC. Probably it is better to shutdown the fireHose without commit and start it again. */ log.error(e, "Message failed its checksum and it is corrupt, will skip it"); return null; } }
private void testRecordsWritten(int totalSuccessful, String topic) throws UnsupportedEncodingException { final ConsumerIterator<byte[], byte[]> iterator = kafkaTestHelper.getIteratorForTopic(topic); for (int i = 0; i < totalSuccessful; ++i) { String message = new String(iterator.next().message(), "UTF-8"); log.debug(String.format("%d of %d: Message consumed: %s", (i+1), totalSuccessful, message)); } }
return; messagePlusMeta = iterator.next(); if (messagePlusMeta!=null) { byte[] payload = messagePlusMeta.message();
final MessageAndMetadata<byte[], byte[]> message = consumerIterator.next();
@Test public void test() throws IOException { KafkaPusher pusher = new KafkaPusher("localhost:" + kafkaPort, TOPIC); String msg1 = "msg1"; String msg2 = "msg2"; pusher.pushMessages(Lists.newArrayList(msg1.getBytes(), msg2.getBytes())); try { Thread.sleep(1000); } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } assert(iterator.hasNext()); Assert.assertEquals(new String(iterator.next().message()), msg1); assert(iterator.hasNext()); Assert.assertEquals(new String(iterator.next().message()), msg2); pusher.close(); }
if (kafkaIterator.hasNext()) { try { destination = _messageDecoder.decode(kafkaIterator.next().message(), destination); tableAndStreamRowsConsumed = _serverMetrics .addMeteredTableValue(_tableAndStreamName, ServerMeter.REALTIME_ROWS_CONSUMED, 1L,
@Test public void test() throws IOException { // Test that the scoped config overrides the generic config Pusher pusher = new KafkaProducerPusher("localhost:dummy", TOPIC, Optional.of(ConfigFactory.parseMap(ImmutableMap.of( ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:" + this.kafkaTestHelper.getKafkaServerPort())))); String msg1 = "msg1"; String msg2 = "msg2"; pusher.pushMessages(Lists.newArrayList(msg1.getBytes(), msg2.getBytes())); try { Thread.sleep(1000); } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } ConsumerIterator<byte[], byte[]> iterator = this.kafkaTestHelper.getIteratorForTopic(TOPIC); assert(iterator.hasNext()); Assert.assertEquals(new String(iterator.next().message()), msg1); assert(iterator.hasNext()); Assert.assertEquals(new String(iterator.next().message()), msg2); pusher.close(); }
final MessageAndMetadata<byte[], byte[]> mam = iterator.next();
@Test public void test() throws IOException { // Test that the scoped config overrides the generic config Pusher pusher = new KafkaKeyValueProducerPusher<byte[], byte[]>("localhost:dummy", TOPIC, Optional.of(ConfigFactory.parseMap(ImmutableMap.of( ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:" + this.kafkaTestHelper.getKafkaServerPort())))); String msg1 = "msg1"; String msg2 = "msg2"; pusher.pushMessages(Lists.newArrayList(Pair.of("key1", msg1.getBytes()), Pair.of("key2", msg2.getBytes()))); try { Thread.sleep(1000); } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } ConsumerIterator<byte[], byte[]> iterator = this.kafkaTestHelper.getIteratorForTopic(TOPIC); assert(iterator.hasNext()); MessageAndMetadata<byte[], byte[]> messageAndMetadata = iterator.next(); Assert.assertEquals(new String(messageAndMetadata.key()), "key1"); Assert.assertEquals(new String(messageAndMetadata.message()), msg1); assert(iterator.hasNext()); messageAndMetadata = iterator.next(); Assert.assertEquals(new String(messageAndMetadata.key()), "key2"); Assert.assertEquals(new String(messageAndMetadata.message()), msg2); pusher.close(); }
@Test public void testStringSerialization() throws IOException, InterruptedException { String topic = "testStringSerialization08"; _kafkaTestHelper.provisionTopic(topic); Properties props = new Properties(); props.setProperty(KafkaWriterConfigurationKeys.KAFKA_TOPIC, topic); props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX+"bootstrap.servers", "localhost:" + _kafkaTestHelper.getKafkaServerPort()); props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX+"value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); Kafka08DataWriter<String> kafka08DataWriter = new Kafka08DataWriter<String>(props); String messageString = "foobar"; WriteCallback callback = mock(WriteCallback.class); try { kafka08DataWriter.write(messageString, callback); } finally { kafka08DataWriter.close(); } verify(callback, times(1)).onSuccess(isA(WriteResponse.class)); verify(callback, never()).onFailure(isA(Exception.class)); byte[] message = _kafkaTestHelper.getIteratorForTopic(topic).next().message(); String messageReceived = new String(message); Assert.assertEquals(messageReceived, messageString); }
@Test public void testStringSerialization() throws IOException, InterruptedException, ExecutionException { String topic = "testStringSerialization08"; _kafkaTestHelper.provisionTopic(topic); Properties props = new Properties(); props.setProperty(KafkaWriterConfigurationKeys.KAFKA_TOPIC, topic); props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX+"bootstrap.servers", "localhost:" + _kafkaTestHelper.getKafkaServerPort()); props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX+"value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); Kafka09DataWriter<String> kafka09DataWriter = new Kafka09DataWriter<String>(props); String messageString = "foobar"; WriteCallback callback = mock(WriteCallback.class); Future<WriteResponse> future; try { future = kafka09DataWriter.write(messageString, callback); kafka09DataWriter.flush(); verify(callback, times(1)).onSuccess(isA(WriteResponse.class)); verify(callback, never()).onFailure(isA(Exception.class)); Assert.assertTrue(future.isDone(), "Future should be done"); System.out.println(future.get().getStringResponse()); byte[] message = _kafkaTestHelper.getIteratorForTopic(topic).next().message(); String messageReceived = new String(message); Assert.assertEquals(messageReceived, messageString); } finally { kafka09DataWriter.close(); } }
@Test public void testBinarySerialization() throws IOException, InterruptedException { String topic = "testBinarySerialization08"; _kafkaTestHelper.provisionTopic(topic); Properties props = new Properties(); props.setProperty(KafkaWriterConfigurationKeys.KAFKA_TOPIC, topic); props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX+"bootstrap.servers", "localhost:" + _kafkaTestHelper.getKafkaServerPort()); props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX+"value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer"); Kafka09DataWriter<byte[]> kafka09DataWriter = new Kafka09DataWriter<byte[]>(props); WriteCallback callback = mock(WriteCallback.class); byte[] messageBytes = TestUtils.generateRandomBytes(); try { kafka09DataWriter.write(messageBytes, callback); } finally { kafka09DataWriter.close(); } verify(callback, times(1)).onSuccess(isA(WriteResponse.class)); verify(callback, never()).onFailure(isA(Exception.class)); byte[] message = _kafkaTestHelper.getIteratorForTopic(topic).next().message(); Assert.assertEquals(message, messageBytes); }
@Test public void testBinarySerialization() throws IOException, InterruptedException { String topic = "testBinarySerialization08"; _kafkaTestHelper.provisionTopic(topic); Properties props = new Properties(); props.setProperty(KafkaWriterConfigurationKeys.KAFKA_TOPIC, topic); props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX+"bootstrap.servers", "localhost:" + _kafkaTestHelper.getKafkaServerPort()); props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX+"value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer"); Kafka08DataWriter<byte[]> kafka08DataWriter = new Kafka08DataWriter<byte[]>(props); WriteCallback callback = mock(WriteCallback.class); byte[] messageBytes = TestUtils.generateRandomBytes(); try { kafka08DataWriter.write(messageBytes, callback); } finally { kafka08DataWriter.close(); } verify(callback, times(1)).onSuccess(isA(WriteResponse.class)); verify(callback, never()).onFailure(isA(Exception.class)); byte[] message = _kafkaTestHelper.getIteratorForTopic(topic).next().message(); Assert.assertEquals(message, messageBytes); }
verify(callback, never()).onFailure(isA(Exception.class)); byte[] message = _kafkaTestHelper.getIteratorForTopic(topic).next().message(); ConfigDrivenMd5SchemaRegistry schemaReg = new ConfigDrivenMd5SchemaRegistry(topic, record.getSchema()); LiAvroDeserializer deser = new LiAvroDeserializer(schemaReg);
verify(callback, never()).onFailure(isA(Exception.class)); byte[] message = _kafkaTestHelper.getIteratorForTopic(topic).next().message(); ConfigDrivenMd5SchemaRegistry schemaReg = new ConfigDrivenMd5SchemaRegistry(topic, record.getSchema()); LiAvroDeserializer deser = new LiAvroDeserializer(schemaReg);
@Override public Message next() { MessageAndMetadata<byte[], byte[]> kafkaMessage; try { kafkaMessage = mIterator.next(); } catch (ConsumerTimeoutException e) { throw new LegacyConsumerTimeoutException(e); } long timestamp = 0L; if (mConfig.useKafkaTimestamp()) { timestamp = mKafkaMessageTimestampFactory.getKafkaMessageTimestamp().getTimestamp(kafkaMessage); } return new Message(kafkaMessage.topic(), kafkaMessage.partition(), kafkaMessage.offset(), kafkaMessage.key(), kafkaMessage.message(), timestamp); }
/** * Runs a continuous loop to consume messages over a Kafka Consumer stream. * It breaks out when the consumer has been shutdown and there are no more messages. */ @Override public void run() { try { // Set up the consumer stream and the iterators. // Not doing in the constructor so that it can happen in parallel init(); while (iterator.hasNext()) { fetchedMsgCounter.mark(); MessageAndMetadata mm = null; try { mm = iterator.next(); processAuditMsg(mm); } catch (Exception e) { failedToIngestCounter.mark(); logger.error("Got exception to iterate/process msg", e); } } logger.info("KafkaIngesterConsumer {} exiting", getName()); } finally { shutdownComplete.countDown(); } }
@Override public void run() { ConsumerIterator<byte[], byte[]> it = kafkaStream.iterator(); while (it.hasNext()) { byte[] messageData = it.next().message(); String reply = new String(messageData); executorPool.submit(executorTaskFactory.apply(reply)); System.out.println("Consumed Thread:" + threadNumber + ".Consuming User: " + reply); } System.out.println("Shutting down Thread: " + kafkaStream); } }
private String getNextMessage() { ConsumerIterator<String, String> it = stream.iterator(); try { return it.next().message(); } catch (ConsumerTimeoutException e) { System.out.println("waited " + waitTime + " and no messages arrived."); return null; } }
public void run() { Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); topicCountMap.put(topic, new Integer(1)); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap); KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0); ConsumerIterator<byte[], byte[]> it = stream.iterator(); while (it.hasNext()) System.out.println(new String(it.next().message())); }