@Override public boolean hasMore() { return nextIterator.hasNext() || iter.hasNext(); }
@Override public boolean hasNext() { try { return mIterator.hasNext(); } catch (ConsumerTimeoutException e) { throw new LegacyConsumerTimeoutException(e); } }
if (!iterator.hasNext()) { return;
while (consumerIterator.hasNext()) { if (paused) {
@Test public void test() throws IOException { KafkaPusher pusher = new KafkaPusher("localhost:" + kafkaPort, TOPIC); String msg1 = "msg1"; String msg2 = "msg2"; pusher.pushMessages(Lists.newArrayList(msg1.getBytes(), msg2.getBytes())); try { Thread.sleep(1000); } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } assert(iterator.hasNext()); Assert.assertEquals(new String(iterator.next().message()), msg1); assert(iterator.hasNext()); Assert.assertEquals(new String(iterator.next().message()), msg2); pusher.close(); }
@Override public GenericRow next(GenericRow destination) { if (kafkaIterator.hasNext()) { try { destination = _messageDecoder.decode(kafkaIterator.next().message(), destination);
@Test public void test() throws IOException { // Test that the scoped config overrides the generic config Pusher pusher = new KafkaProducerPusher("localhost:dummy", TOPIC, Optional.of(ConfigFactory.parseMap(ImmutableMap.of( ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:" + this.kafkaTestHelper.getKafkaServerPort())))); String msg1 = "msg1"; String msg2 = "msg2"; pusher.pushMessages(Lists.newArrayList(msg1.getBytes(), msg2.getBytes())); try { Thread.sleep(1000); } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } ConsumerIterator<byte[], byte[]> iterator = this.kafkaTestHelper.getIteratorForTopic(TOPIC); assert(iterator.hasNext()); Assert.assertEquals(new String(iterator.next().message()), msg1); assert(iterator.hasNext()); Assert.assertEquals(new String(iterator.next().message()), msg2); pusher.close(); }
for (; msgCount < batchSize && iterator.hasNext(); msgCount++) { final MessageAndMetadata<byte[], byte[]> mam = iterator.next();
@Test public void test() throws IOException { // Test that the scoped config overrides the generic config Pusher pusher = new KafkaKeyValueProducerPusher<byte[], byte[]>("localhost:dummy", TOPIC, Optional.of(ConfigFactory.parseMap(ImmutableMap.of( ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:" + this.kafkaTestHelper.getKafkaServerPort())))); String msg1 = "msg1"; String msg2 = "msg2"; pusher.pushMessages(Lists.newArrayList(Pair.of("key1", msg1.getBytes()), Pair.of("key2", msg2.getBytes()))); try { Thread.sleep(1000); } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } ConsumerIterator<byte[], byte[]> iterator = this.kafkaTestHelper.getIteratorForTopic(TOPIC); assert(iterator.hasNext()); MessageAndMetadata<byte[], byte[]> messageAndMetadata = iterator.next(); Assert.assertEquals(new String(messageAndMetadata.key()), "key1"); Assert.assertEquals(new String(messageAndMetadata.message()), msg1); assert(iterator.hasNext()); messageAndMetadata = iterator.next(); Assert.assertEquals(new String(messageAndMetadata.key()), "key2"); Assert.assertEquals(new String(messageAndMetadata.message()), msg2); pusher.close(); }
/** * Runs a continuous loop to consume messages over a Kafka Consumer stream. * It breaks out when the consumer has been shutdown and there are no more messages. */ @Override public void run() { try { // Set up the consumer stream and the iterators. // Not doing in the constructor so that it can happen in parallel init(); while (iterator.hasNext()) { fetchedMsgCounter.mark(); MessageAndMetadata mm = null; try { mm = iterator.next(); processAuditMsg(mm); } catch (Exception e) { failedToIngestCounter.mark(); logger.error("Got exception to iterate/process msg", e); } } logger.info("KafkaIngesterConsumer {} exiting", getName()); } finally { shutdownComplete.countDown(); } }
boolean hasNext() { try { this.it.hasNext(); return true; } catch (ConsumerTimeoutException var2) { return false; } } }
/** * Check if there are messages waiting in Kafka, * waiting until timeout (10ms by default) for messages to arrive. * and catching the timeout exception to return a boolean */ boolean hasNext() { try { it.hasNext(); return true; } catch (ConsumerTimeoutException e) { return false; } }
@Override public boolean hasMore() { return nextIterator.hasNext() || iter.hasNext(); }
@Override public boolean hasMore() { return nextIterator.hasNext() || iter.hasNext(); }
@Override public void run() { while (streamIterator.hasNext()) { MessageAndMetadata<byte[], byte[]> data = streamIterator.next(); String msg = new String(data.message()); System.out.println("Received " + msg); if (msg.equals("test")) { mainThread.interrupt(); return; } } } });
@Override public void run() { ConsumerIterator<byte[], byte[]> it = kafkaStream.iterator(); while (it.hasNext()) { byte[] messageData = it.next().message(); String reply = new String(messageData); executorPool.submit(executorTaskFactory.apply(reply)); System.out.println("Consumed Thread:" + threadNumber + ".Consuming User: " + reply); } System.out.println("Shutting down Thread: " + kafkaStream); } }
public void run() { Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); topicCountMap.put(topic, new Integer(1)); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap); KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0); ConsumerIterator<byte[], byte[]> it = stream.iterator(); while (it.hasNext()) System.out.println(new String(it.next().message())); }
public void run() { ConsumerIterator<byte[], byte[]> it = m_stream.iterator(); while (it.hasNext()) System.out.println("Thread " + m_threadNumber + ": " + new String(it.next().message())); System.out.println("Shutting down Thread: " + m_threadNumber); } }
public void start() { Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); topicCountMap.put(topics, 1); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer .createMessageStreams(topicCountMap); KafkaStream<byte[], byte[]> stream = consumerMap.get(topics).get(0); ConsumerIterator<byte[], byte[]> it = stream.iterator(); while (it.hasNext()) System.out.println(it.next().message()); }
public void nextTuple() { if(kafkaIterator.hasNext()) { List<Object> messages = new ArrayList<Object>(); messages.add(kafkaIterator.next().message()); _collector.emit(messages); } }