@Test public void removeKeysOver6ReturnsSame() { Context largerThan6 = c.put(100, 200); assertThat(largerThan6) .isNotSameAs(c) .has(size(7)); Context test = largerThan6.delete(6); assertThat(test) .has(size(6)) .isNotSameAs(c) .isNotSameAs(largerThan6) .has(key(1)) .has(key(2)) .has(key(3)) .has(key(4)) .has(key(5)) .has(key(100)) .doesNotHave(key(6)); }
.as("delete(1)") .isInstanceOf(Context4.class) .has(keyValue(2, "B")) .has(keyValue(3, "C")) .has(keyValue(4, "D")) .has(keyValue(5, "E")) .doesNotHave(key(1)); .as("delete(2)") .isInstanceOf(Context4.class) .has(keyValue(1, "A")) .has(keyValue(3, "C")) .has(keyValue(4, "D")) .has(keyValue(5, "E")) .doesNotHave(key(2)); .as("delete(3)") .isInstanceOf(Context4.class) .has(keyValue(1, "A")) .has(keyValue(2, "B")) .has(keyValue(4, "D")) .has(keyValue(5, "E")) .doesNotHave(key(3)); .as("delete(4)") .isInstanceOf(Context4.class) .has(keyValue(1, "A")) .has(keyValue(2, "B")) .has(keyValue(3, "C"))
.as("delete(1)") .isInstanceOf(Context3.class) .has(keyValue(2, "B")) .has(keyValue(3, "C")) .has(keyValue(4, "D")) .doesNotHave(key(1)); .as("delete(2)") .isInstanceOf(Context3.class) .has(keyValue(1, "A")) .has(keyValue(3, "C")) .has(keyValue(4, "D")) .doesNotHave(key(2)); .as("delete(3)") .isInstanceOf(Context3.class) .has(keyValue(1, "A")) .has(keyValue(2, "B")) .has(keyValue(4, "D")) .doesNotHave(key(3)); .as("delete(4)") .isInstanceOf(Context3.class) .has(keyValue(1, "A")) .has(keyValue(2, "B")) .has(keyValue(3, "C")) .doesNotHave(key(4));
.as("delete(1)") .isInstanceOf(Context5.class) .has(keyValue(2, "B")) .has(keyValue(3, "C")) .has(keyValue(4, "D")) .has(keyValue(5, "E")) .has(keyValue(6, "F")) .doesNotHave(key(1)); .as("delete(2)") .isInstanceOf(Context5.class) .has(keyValue(1, "A")) .has(keyValue(3, "C")) .has(keyValue(4, "D")) .has(keyValue(5, "E")) .has(keyValue(6, "F")) .doesNotHave(key(2)); .as("delete(3)") .isInstanceOf(Context5.class) .has(keyValue(1, "A")) .has(keyValue(2, "B")) .has(keyValue(4, "D")) .has(keyValue(5, "E")) .has(keyValue(6, "F")) .doesNotHave(key(3)); .as("delete(4)") .isInstanceOf(Context5.class)
@Test public void removeKeys() { assertThat(c.delete(1)) .as("delete(1)") .isInstanceOf(Context2.class) .has(keyValue(2, "B")) .has(keyValue(3, "C")) .doesNotHave(key(1)); assertThat(c.delete(2)) .as("delete(2)") .isInstanceOf(Context2.class) .has(keyValue(1, "A")) .has(keyValue(3, "C")) .doesNotHave(key(2)); assertThat(c.delete(3)) .as("delete(3)") .isInstanceOf(Context2.class) .has(keyValue(1, "A")) .has(keyValue(2, "B")) .doesNotHave(key(3)); assertThat(c.delete(4)).isSameAs(c); }
@Test public void removeKeys() { assertThat(c.delete(1)) .as("delete(1)") .isInstanceOf(Context1.class) .has(keyValue(2, "B")) .doesNotHave(key(1)); assertThat(c.delete(2)) .as("delete(2)") .isInstanceOf(Context1.class) .has(keyValue(1, "A")) .doesNotHave(key(2)); assertThat(c.delete(3)).isSameAs(c); }
@Test public void testTemplateWithTimestamps() throws Exception { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic(INT_KEY_TOPIC); template.sendDefault(0, 1487694048607L, null, "foo-ts1"); ConsumerRecord<Integer, String> r1 = KafkaTestUtils.getSingleRecord(consumer, INT_KEY_TOPIC); assertThat(r1).has(value("foo-ts1")); assertThat(r1).has(timestamp(1487694048607L)); template.send(INT_KEY_TOPIC, 0, 1487694048610L, null, "foo-ts2"); ConsumerRecord<Integer, String> r2 = KafkaTestUtils.getSingleRecord(consumer, INT_KEY_TOPIC); assertThat(r2).has(value("foo-ts2")); assertThat(r2).has(timestamp(1487694048610L)); Map<MetricName, ? extends Metric> metrics = template.execute(Producer::metrics); assertThat(metrics).isNotNull(); metrics = template.metrics(); assertThat(metrics).isNotNull(); List<PartitionInfo> partitions = template.partitionsFor(INT_KEY_TOPIC); assertThat(partitions).isNotNull(); assertThat(partitions).hasSize(2); pf.destroy(); }
assertThat(KafkaTestUtils.getSingleRecord(consumer, INT_KEY_TOPIC)).has(value("foo")); assertThat(received).has(key(2)); assertThat(received).has(partition(0)); assertThat(received).has(value("bar")); assertThat(received).has(key(2)); assertThat(received).has(partition(0)); assertThat(received).has(value("baz")); assertThat(received).has(key((Integer) null)); assertThat(received).has(partition(0)); assertThat(received).has(value("qux")); .build()); received = KafkaTestUtils.getSingleRecord(consumer, INT_KEY_TOPIC); assertThat(received).has(key(2)); assertThat(received).has(partition(0)); assertThat(received).has(value("fiz")); .build()); received = KafkaTestUtils.getSingleRecord(consumer, INT_KEY_TOPIC); assertThat(received).has(key(2)); assertThat(received).has(partition(0)); assertThat(received).has(value("buz"));
@Test public void testWithCallback() throws Exception { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic(INT_KEY_TOPIC); ListenableFuture<SendResult<Integer, String>> future = template.sendDefault("foo"); template.flush(); final CountDownLatch latch = new CountDownLatch(1); final AtomicReference<SendResult<Integer, String>> theResult = new AtomicReference<>(); future.addCallback(new ListenableFutureCallback<SendResult<Integer, String>>() { @Override public void onSuccess(SendResult<Integer, String> result) { theResult.set(result); latch.countDown(); } @Override public void onFailure(Throwable ex) { } }); assertThat(KafkaTestUtils.getSingleRecord(consumer, INT_KEY_TOPIC)).has(value("foo")); assertThat(latch.await(10, TimeUnit.SECONDS)).isTrue(); pf.createProducer().close(); }
Iterator<ConsumerRecord<String, String>> iterator = records.iterator(); ConsumerRecord<String, String> record = iterator.next(); assertThat(record).has(Assertions.<ConsumerRecord<String, String>>allOf(key("foo"), value("bar"))); if (!iterator.hasNext()) { records = KafkaTestUtils.getRecords(consumer); assertThat(record).has(Assertions.<ConsumerRecord<String, String>>allOf(key("baz"), value("qux"))); consumer.close(); assertThat(KafkaTestUtils.getPropertyValue(pf, "cache", BlockingQueue.class).size()).isEqualTo(1);
Iterator<ConsumerRecord<String, String>> iterator = records.iterator(); ConsumerRecord<String, String> record = iterator.next(); assertThat(record).has(Assertions.<ConsumerRecord<String, String>>allOf(key("foo"), value("bar"))); if (!iterator.hasNext()) { records = KafkaTestUtils.getRecords(consumer); assertThat(record).has(Assertions.<ConsumerRecord<String, String>>allOf(key("baz"), value("qux"))); consumer.close(); assertThat(KafkaTestUtils.getPropertyValue(pf, "cache", BlockingQueue.class).size()).isEqualTo(1);
@Test public void testTemplateDisambiguation() throws Exception { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<String, String> pf = new DefaultKafkaProducerFactory<>(senderProps); pf.setKeySerializer(new StringSerializer()); KafkaTemplate<String, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic(STRING_KEY_TOPIC); Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("testTString", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps); cf.setKeyDeserializer(new StringDeserializer()); Consumer<String, String> consumer = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer, STRING_KEY_TOPIC); template.sendDefault("foo", "bar"); template.flush(); ConsumerRecord<String, String> record = KafkaTestUtils.getSingleRecord(consumer, STRING_KEY_TOPIC); assertThat(record).has(Assertions.<ConsumerRecord<String, String>>allOf(key("foo"), value("bar"))); consumer.close(); pf.createProducer().close(); pf.destroy(); }
@Test void correctLoading() { final ListenerService s1 = mock(ListenerService.class); final EventListenerSupplier<RoboZonkyStartingEvent> returned = () -> Optional.of(l); doAnswer(i -> Stream.of(returned)).when(s1).findListeners(eq(RoboZonkyStartingEvent.class)); final ListenerService s2 = mock(ListenerService.class); doAnswer(i -> Stream.of((EventListenerSupplier<RoboZonkyStartingEvent>) Optional::empty)) .when(s2).findListeners(eq(RoboZonkyStartingEvent.class)); final Iterable<ListenerService> s = () -> Arrays.asList(s1, s2).iterator(); final List<EventListenerSupplier<RoboZonkyStartingEvent>> r = ListenerServiceLoader.load(RoboZonkyStartingEvent.class, s); assertThat(r).hasSize(2); assertThat(r) .first() .has(new Condition<>(result -> result.get().isPresent() && Objects.equals(result.get().get(), l), "Exists")); assertThat(r) .last() .has(new Condition<>(result -> !result.get().isPresent(), "Does not exist")); }
@Test public void testOutboundWithTimestampExpression() throws Exception { DefaultKafkaProducerFactory<Integer, String> producerFactory = new DefaultKafkaProducerFactory<>( KafkaTestUtils.producerProps(embeddedKafka)); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(producerFactory); KafkaProducerMessageHandler<Integer, String> handler = new KafkaProducerMessageHandler<>(template); handler.setBeanFactory(mock(BeanFactory.class)); handler.afterPropertiesSet(); Message<?> message = MessageBuilder.withPayload("foo") .setHeader(KafkaHeaders.TOPIC, topic3) .setHeader(KafkaHeaders.MESSAGE_KEY, 2) .setHeader(KafkaHeaders.PARTITION_ID, 1) .build(); handler.setTimestampExpression(new ValueExpression<>(1487694048633L)); handler.handleMessage(message); ConsumerRecord<Integer, String> record1 = KafkaTestUtils.getSingleRecord(consumer, topic3); assertThat(record1).has(key(2)); assertThat(record1).has(partition(1)); assertThat(record1).has(value("foo")); assertThat(record1).has(timestamp(1487694048633L)); Long currentTimeMarker = System.currentTimeMillis(); handler.setTimestampExpression(new FunctionExpression<Message<?>>(m -> System.currentTimeMillis())); handler.handleMessage(message); ConsumerRecord<Integer, String> record2 = KafkaTestUtils.getSingleRecord(consumer, topic3); assertThat(record2).has(key(2)); assertThat(record2).has(partition(1)); assertThat(record2).has(value("foo")); assertThat(record2.timestamp()).isGreaterThanOrEqualTo(currentTimeMarker); producerFactory.destroy(); }
assertThat(receive1.getPayload()).isEqualTo("1".getBytes()); assertThat(receive2.getPayload()).isEqualTo("2".getBytes()); assertThat(receive2).has(correlationHeadersForPayload2);
@Test public void testOutboundWithTimestamp() throws Exception { DefaultKafkaProducerFactory<Integer, String> producerFactory = new DefaultKafkaProducerFactory<>( KafkaTestUtils.producerProps(embeddedKafka)); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(producerFactory); KafkaProducerMessageHandler<Integer, String> handler = new KafkaProducerMessageHandler<>(template); handler.setBeanFactory(mock(BeanFactory.class)); handler.afterPropertiesSet(); Message<?> message = MessageBuilder.withPayload("foo") .setHeader(KafkaHeaders.TOPIC, topic2) .setHeader(KafkaHeaders.MESSAGE_KEY, 2) .setHeader(KafkaHeaders.PARTITION_ID, 1) .setHeader(KafkaHeaders.TIMESTAMP, 1487694048607L) .setHeader("baz", "qux") .build(); handler.handleMessage(message); ConsumerRecord<Integer, String> record = KafkaTestUtils.getSingleRecord(consumer, topic2); assertThat(record).has(key(2)); assertThat(record).has(partition(1)); assertThat(record).has(value("foo")); assertThat(record).has(timestamp(1487694048607L)); Map<String, Object> headers = new HashMap<>(); new DefaultKafkaHeaderMapper().toHeaders(record.headers(), headers); assertThat(headers.size()).isEqualTo(1); assertThat(headers.get("baz")).isEqualTo("qux"); producerFactory.destroy(); }
assertThat(record).has(key(2)); assertThat(record).has(partition(1)); assertThat(record).has(value("foo")); handler.handleMessage(message); record = KafkaTestUtils.getSingleRecord(consumer, topic1); assertThat(record).has(key((Integer) null)); assertThat(record).has(partition(0)); assertThat(record).has(value("bar")); handler.handleMessage(message); record = KafkaTestUtils.getSingleRecord(consumer, topic1); assertThat(record).has(key((Integer) null)); assertThat(record).has(value("baz")); assertThat(record).has(key(2)); assertThat(record).has(partition(1)); assertThat(record.value()).isNull();
@Test public void fuzzyLevel() { final String searchWord = "abcdfgh"; final String productName = "abcdefgh"; withProduct(client(), builder -> builder.name(ofEnglish(productName)), product -> { final ProductProjectionSearch request = ProductProjectionSearch.ofStaged() .withText(ENGLISH, searchWord) .withFuzzy(true); assertEventually(() -> { softAssert(s -> { s.assertThat(client().executeBlocking(request.withFuzzyLevel(0))) .as("level 0 matches not") .doesNotHave(product()); s.assertThat(client().executeBlocking(request.withFuzzyLevel(1))) .as("level 1 matches") .has(product()); s.assertThat(client().executeBlocking(request.withFuzzyLevel(2))) .as("level 2 matches") .has(product()); }); }); }); }