private void buildMockProducer(boolean autoComplete) { this.producer = new MockProducer<>(autoComplete, new MockSerializer(), new MockSerializer()); }
@Test public void testSimple() { MockProducer<String, String> producer = new MockProducer<>(Cluster.empty(), false, null, null, null); KafkaBolt<String, String> bolt = makeBolt(producer); OutputCollector collector = mock(OutputCollector.class); TopologyContext context = mock(TopologyContext.class); Map<String, Object> conf = new HashMap<>(); bolt.prepare(conf, context, collector); String key = "KEY"; String value = "VALUE"; Tuple testTuple = createTestTuple(key, value); bolt.execute(testTuple); assertThat(producer.history().size(), is(1)); ProducerRecord<String, String> arg = producer.history().get(0); LOG.info("GOT {} ->", arg); LOG.info("{}, {}, {}", arg.topic(), arg.key(), arg.value()); assertThat(arg.topic(), is("MY_TOPIC")); assertThat(arg.key(), is(key)); assertThat(arg.value(), is(value)); // Complete the send producer.completeNext(); verify(collector).ack(testTuple); }
@Test public void testPartitioner() throws Exception { PartitionInfo partitionInfo0 = new PartitionInfo(topic, 0, null, null, null); PartitionInfo partitionInfo1 = new PartitionInfo(topic, 1, null, null, null); Cluster cluster = new Cluster(null, new ArrayList<Node>(0), asList(partitionInfo0, partitionInfo1), Collections.<String>emptySet(), Collections.<String>emptySet()); MockProducer<String, String> producer = new MockProducer<>(cluster, true, new DefaultPartitioner(), new StringSerializer(), new StringSerializer()); ProducerRecord<String, String> record = new ProducerRecord<>(topic, "key", "value"); Future<RecordMetadata> metadata = producer.send(record); assertEquals("Partition should be correct", 1, metadata.get().partition()); producer.clear(); assertEquals("Clear should erase our history", 0, producer.history().size()); producer.close(); }
@Test public void testSimpleWithError() { MockProducer<String, String> producer = new MockProducer<>(Cluster.empty(), false, null, null, null); KafkaBolt<String, String> bolt = makeBolt(producer); OutputCollector collector = mock(OutputCollector.class); TopologyContext context = mock(TopologyContext.class); Map<String, Object> conf = new HashMap<>(); bolt.prepare(conf, context, collector); String key = "KEY"; String value = "VALUE"; Tuple testTuple = createTestTuple(key, value); bolt.execute(testTuple); assertThat(producer.history().size(), is(1)); ProducerRecord<String, String> arg = producer.history().get(0); LOG.info("GOT {} ->", arg); LOG.info("{}, {}, {}", arg.topic(), arg.key(), arg.value()); assertThat(arg.topic(), is("MY_TOPIC")); assertThat(arg.key(), is(key)); assertThat(arg.value(), is(value)); // Force a send error KafkaException ex = new KafkaException(); producer.errorNext(ex); verify(collector).reportError(ex); verify(collector).fail(testTuple); }
@Test public void testCustomCallbackIsWrappedByDefaultCallbackBehavior() { MockProducer<String, String> producer = new MockProducer<>(Cluster.empty(), false, null, null, null); KafkaBolt<String, String> bolt = makeBolt(producer);
@Test public void testTransactionSynchronization() { MockProducer<String, String> producer = new MockProducer<>(); producer.initTransactions(); @SuppressWarnings("unchecked") ProducerFactory<String, String> pf = mock(ProducerFactory.class); given(pf.transactionCapable()).willReturn(true); given(pf.createProducer()).willReturn(producer); KafkaTemplate<String, String> template = new KafkaTemplate<>(pf); template.setDefaultTopic(STRING_KEY_TOPIC); ResourcelessTransactionManager tm = new ResourcelessTransactionManager(); new TransactionTemplate(tm).execute(s -> { template.sendDefault("foo", "bar"); return null; }); assertThat(producer.history()).containsExactly(new ProducerRecord<>(STRING_KEY_TOPIC, "foo", "bar")); assertThat(producer.transactionCommitted()).isTrue(); assertThat(producer.closed()).isTrue(); }
@Test public void testFencedOnBegin() { MockProducer<String, String> producer = spy(new MockProducer<>()); producer.initTransactions(); producer.fenceProducer(); @SuppressWarnings("unchecked") ProducerFactory<String, String> pf = mock(ProducerFactory.class); given(pf.transactionCapable()).willReturn(true); given(pf.createProducer()).willReturn(producer); KafkaTemplate<String, String> template = new KafkaTemplate<>(pf); template.setDefaultTopic(STRING_KEY_TOPIC); assertThatThrownBy(() -> template.executeInTransaction(t -> { return null; })).isInstanceOf(ProducerFencedException.class); assertThat(producer.transactionCommitted()).isFalse(); assertThat(producer.transactionAborted()).isFalse(); assertThat(producer.closed()).isTrue(); verify(producer, never()).commitTransaction(); }
@Test public void testAbort() { MockProducer<String, String> producer = spy(new MockProducer<>()); producer.initTransactions(); @SuppressWarnings("unchecked") ProducerFactory<String, String> pf = mock(ProducerFactory.class); given(pf.transactionCapable()).willReturn(true); given(pf.createProducer()).willReturn(producer); KafkaTemplate<String, String> template = new KafkaTemplate<>(pf); template.setDefaultTopic(STRING_KEY_TOPIC); assertThatThrownBy(() -> template.executeInTransaction(t -> { throw new RuntimeException("foo"); })).isExactlyInstanceOf(RuntimeException.class).withFailMessage("foo"); assertThat(producer.transactionCommitted()).isFalse(); assertThat(producer.transactionAborted()).isTrue(); assertThat(producer.closed()).isTrue(); verify(producer, never()).commitTransaction(); }
@Test public void testNoAbortAfterCommitFailure() { MockProducer<String, String> producer = spy(new MockProducer<>()); producer.initTransactions(); @SuppressWarnings("unchecked") ProducerFactory<String, String> pf = mock(ProducerFactory.class); given(pf.transactionCapable()).willReturn(true); given(pf.createProducer()).willReturn(producer); KafkaTemplate<String, String> template = new KafkaTemplate<>(pf); template.setDefaultTopic(STRING_KEY_TOPIC); assertThatThrownBy(() -> template.executeInTransaction(t -> { producer.fenceProducer(); return null; })).isInstanceOf(ProducerFencedException.class); assertThat(producer.transactionCommitted()).isFalse(); assertThat(producer.transactionAborted()).isFalse(); assertThat(producer.closed()).isTrue(); verify(producer, never()).abortTransaction(); }
@Test public void testTransactionSynchronizationExceptionOnCommit() { MockProducer<String, String> producer = new MockProducer<>(); producer.initTransactions(); @SuppressWarnings("unchecked") ProducerFactory<String, String> pf = mock(ProducerFactory.class); given(pf.transactionCapable()).willReturn(true); given(pf.createProducer()).willReturn(producer); KafkaTemplate<String, String> template = new KafkaTemplate<>(pf); template.setDefaultTopic(STRING_KEY_TOPIC); ResourcelessTransactionManager tm = new ResourcelessTransactionManager(); new TransactionTemplate(tm).execute(s -> { template.sendDefault("foo", "bar"); // Mark the mock producer as fenced so it throws when committing the transaction producer.fenceProducer(); return null; }); assertThat(producer.transactionCommitted()).isFalse(); assertThat(producer.closed()).isTrue(); }
@Test public void shouldSendDataToKafkaUsingElasticsearchJsonFormat() throws IOException { // given Page target = new Page(new URL(url), html, responseHeaders); target.setCrawlerId("mycrawler"); target.setTargetRelevance(TargetRelevance.RELEVANT); String topicName = "ache-data-topic"; StringSerializer ss = new StringSerializer(); MockProducer<String, String> producer = new MockProducer<>(true, ss, ss); KafkaConfig.Format format = KafkaConfig.Format.ELASTIC; KafkaTargetRepository repository = new KafkaTargetRepository(producer, topicName, format); // when repository.insert(target); repository.close(); // then List<ProducerRecord<String, String>> history = producer.history(); TargetModelElasticSearch page = mapper.readValue(history.get(0).value(), TargetModelElasticSearch.class); assertThat(page.getHtml(), is(html)); assertThat(page.getUrl(), is(url)); assertThat(page.getResponseHeaders().get("content-type").get(0), is("text/html")); assertThat(page.getCrawlerId(), is("mycrawler")); }
@Test public void shouldSendDataToKafkaUsingCDR31() throws IOException { // given Page target = new Page(new URL(url), html, responseHeaders); target.setCrawlerId("mycrawler"); target.setTargetRelevance(TargetRelevance.RELEVANT); String topicName = "ache-data-topic"; StringSerializer ss = new StringSerializer(); MockProducer<String, String> producer = new MockProducer<>(true, ss, ss); KafkaConfig.Format format = KafkaConfig.Format.CDR31; KafkaTargetRepository repository = new KafkaTargetRepository(producer, topicName, format); // when repository.insert(target); repository.close(); // then List<ProducerRecord<String, String>> history = producer.history(); CDR31Document page = mapper.readValue(history.get(0).value(), CDR31Document.class); assertThat(page.getRawContent(), is(html)); assertThat(page.getUrl(), is(url)); assertThat(page.getResponseHeaders().get("content-type"), is("text/html")); assertThat(page.getCrawler(), is("mycrawler")); }
@Test public void shouldSendDataToKafka() throws IOException { // given Page target = new Page(new URL(url), html, responseHeaders); target.setCrawlerId("mycrawler"); target.setTargetRelevance(TargetRelevance.RELEVANT); String topicName = "ache-data-topic"; StringSerializer ss = new StringSerializer(); MockProducer<String, String> producer = new MockProducer<>(true, ss, ss); KafkaConfig.Format format = KafkaConfig.Format.JSON; KafkaTargetRepository repository = new KafkaTargetRepository(producer, topicName, format); // when repository.insert(target); repository.close(); // then List<ProducerRecord<String, String>> history = producer.history(); TargetModelJson page = mapper.readValue(history.get(0).value(), TargetModelJson.class); assertThat(page.getContentAsString(), is(html)); assertThat(page.getUrl(), is(url)); assertThat(page.getResponseHeaders().get("content-type").get(0), is("text/html")); assertThat(page.getRelevance().isRelevant(), is(TargetRelevance.RELEVANT.isRelevant())); assertThat(page.getRelevance().getRelevance(), is(TargetRelevance.RELEVANT.getRelevance())); assertThat(page.getCrawlerId(), is("mycrawler")); }
@Test public void testSendMessage() throws Exception { TestWorkItemManager manager = new TestWorkItemManager(); WorkItemImpl workItem = new WorkItemImpl(); workItem.setParameter("Topic", "myTopic"); workItem.setParameter("Key", "1"); workItem.setParameter("Value", "Sample"); Producer<Long, String> mockProducer = new MockProducer(); KafkaWorkItemHandler handler = new KafkaWorkItemHandler(mockProducer); handler.executeWorkItem(workItem, manager); assertNotNull(manager.getResults()); } }