consumer.subscribe(Collections.singleton("the_topic"));
@Override public KafkaConsumer<K, V> subscribe(Set<String> topics, Handler<AsyncResult<Void>> completionHandler) { this.stream.subscribe(topics, completionHandler); return this; }
consumerProps.put("auto.offset.reset", "earliest"); KafkaReadStream<String, String> consumer = KafkaReadStream.create(vertx, consumerProps); consumer.subscribe(Collections.singleton(topic)); AtomicInteger received = new AtomicInteger();
@Test public void testPollExceptionHandler(TestContext ctx) throws Exception { Properties config = kafkaCluster.useTo().getConsumerProperties("someRandomGroup", "someRandomClientID", OffsetResetStrategy.EARLIEST); config.remove("group.id"); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumer = createConsumer(vertx, config); Async done = ctx.async(); consumer.exceptionHandler(ex -> { ctx.assertTrue(ex instanceof InvalidGroupIdException); done.complete(); }); consumer.subscribe(Collections.singleton("someTopic")).handler(System.out::println); }
@Test public void testPollExceptionHandler(TestContext ctx) throws Exception { Properties config = kafkaCluster.useTo().getConsumerProperties("someRandomGroup", "someRandomClientID", OffsetResetStrategy.EARLIEST); config.remove("group.id"); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumer = createConsumer(vertx, config); Async done = ctx.async(); consumer.exceptionHandler(ex -> { ctx.assertTrue(ex instanceof InvalidGroupIdException); done.complete(); }); consumer.subscribe(Collections.singleton("someTopic")).handler(System.out::println); }
@Test public void testConsume(TestContext ctx) throws Exception { final String topicName = "testConsume"; String consumerId = topicName; Async batch = ctx.async(); AtomicInteger index = new AtomicInteger(); int numMessages = 1000; kafkaCluster.useTo().produceStrings(numMessages, batch::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch.awaitSuccess(20000); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumer = createConsumer(vertx, config); Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { if (count.decrementAndGet() == 0) { done.complete(); } }); consumer.subscribe(Collections.singleton(topicName)); }
@Test public void testConsume(TestContext ctx) throws Exception { final String topicName = "testConsume"; String consumerId = topicName; Async batch = ctx.async(); AtomicInteger index = new AtomicInteger(); int numMessages = 1000; kafkaCluster.useTo().produceStrings(numMessages, batch::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch.awaitSuccess(20000); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumer = createConsumer(vertx, config); Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { if (count.decrementAndGet() == 0) { done.complete(); } }); consumer.subscribe(Collections.singleton(topicName)); }
@Test public void testStreamWithHeader(TestContext ctx) { int numMessages = 1000; String topicName = "testStreamWithHeader"; Properties config = setupConsumeWithHeaders(ctx, numMessages, topicName); consumer = createConsumer(vertx, config); Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); AtomicInteger headerIndex = new AtomicInteger(); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { Header[] headers = rec.headers().toArray(); ctx.assertEquals(1, headers.length); Header header = headers[0]; ctx.assertEquals("header_key" + headerIndex.get(), header.key()); ctx.assertEquals("header_value" + headerIndex.getAndIncrement(), new String(header.value())); if (count.decrementAndGet() == 0) { done.complete(); } }); consumer.subscribe(Collections.singleton(topicName)); }
@Test public void testListTopics(TestContext ctx) throws Exception { String topicName = "testListTopics"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async done = ctx.async(); consumer.handler(record -> { // no need for handling incoming records in this test }); consumer.subscribe(Collections.singleton(topicName), asyncResult -> { if (asyncResult.succeeded()) { consumer.listTopics(asyncResult1 -> { if (asyncResult1.succeeded()) { ctx.assertTrue(asyncResult1.result().containsKey(topicName)); done.complete(); } else { ctx.fail(); } }); } else { ctx.fail(); } }); }
@Test public void testStreamWithHeader(TestContext ctx) { int numMessages = 1000; String topicName = "testStreamWithHeader"; Properties config = setupConsumeWithHeaders(ctx, numMessages, topicName); consumer = createConsumer(vertx, config); Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); AtomicInteger headerIndex = new AtomicInteger(); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { Header[] headers = rec.headers().toArray(); ctx.assertEquals(1, headers.length); Header header = headers[0]; ctx.assertEquals("header_key" + headerIndex.get(), header.key()); ctx.assertEquals("header_value" + headerIndex.getAndIncrement(), new String(header.value())); if (count.decrementAndGet() == 0) { done.complete(); } }); consumer.subscribe(Collections.singleton(topicName)); }
consumer.subscribe(Collections.singleton(topic));
@Test public void testSubscription(TestContext ctx) throws Exception { String topicName = "testSubscription"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async done = ctx.async(); consumer.handler(record -> { // no need for handling incoming records in this test }); consumer.subscribe(Collections.singleton(topicName), asyncResult -> { if (asyncResult.succeeded()) { consumer.subscription(asyncResult1 -> { if (asyncResult1.succeeded()) { ctx.assertTrue(asyncResult1.result().contains(topicName)); done.complete(); } else { ctx.fail(); } }); } else { ctx.fail(); } }); }
@Test public void testSubscription(TestContext ctx) throws Exception { String topicName = "testSubscription"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async done = ctx.async(); consumer.handler(record -> { // no need for handling incoming records in this test }); consumer.subscribe(Collections.singleton(topicName), asyncResult -> { if (asyncResult.succeeded()) { consumer.subscription(asyncResult1 -> { if (asyncResult1.succeeded()) { ctx.assertTrue(asyncResult1.result().contains(topicName)); done.complete(); } else { ctx.fail(); } }); } else { ctx.fail(); } }); }
@Test public void testBatchHandler(TestContext ctx) throws Exception { String topicName = "testBatchHandler"; String consumerId = topicName; Async batch1 = ctx.async(); AtomicInteger index = new AtomicInteger(); int numMessages = 500; kafkaCluster.useTo().produceStrings(numMessages, batch1::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch1.awaitSuccess(10000); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async batchHandler = ctx.async(); consumer.batchHandler(records -> { ctx.assertEquals(numMessages, records.count()); batchHandler.complete(); }); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> {}); consumer.subscribe(Collections.singleton(topicName)); }
@Test public void testBatchHandler(TestContext ctx) throws Exception { String topicName = "testBatchHandler"; String consumerId = topicName; Async batch1 = ctx.async(); AtomicInteger index = new AtomicInteger(); int numMessages = 500; kafkaCluster.useTo().produceStrings(numMessages, batch1::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch1.awaitSuccess(10000); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async batchHandler = ctx.async(); consumer.batchHandler(records -> { ctx.assertEquals(numMessages, records.count()); batchHandler.complete(); }); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> {}); consumer.subscribe(Collections.singleton(topicName)); }
@Test public void testBatch(TestContext ctx) throws Exception { int num = 50; MockConsumer<String, String> mock = new MockConsumer<>(OffsetResetStrategy.EARLIEST); KafkaReadStream<String, String> consumer = createConsumer(vertx, mock); Async doneLatch = ctx.async(); AtomicInteger count = new AtomicInteger(); consumer.handler(record -> { int val = count.getAndIncrement(); if (val < num) { ctx.assertEquals("the_topic", record.topic()); ctx.assertEquals(0, record.partition()); ctx.assertEquals("key-" + val, record.key()); ctx.assertEquals("value-" + val, record.value()); if (val == num - 1) { consumer.close(v -> doneLatch.complete()); } } }); consumer.subscribe(Collections.singleton("the_topic"), v -> { mock.schedulePollTask(() -> { mock.rebalance(Collections.singletonList(new TopicPartition("the_topic", 0))); mock.seek(new TopicPartition("the_topic", 0), 0); for (int i = 0; i < num; i++) { mock.addRecord(new ConsumerRecord<>("the_topic", 0, i, "key-" + i, "value-" + i)); } }); }); }
@Test public void testConsume(TestContext ctx) throws Exception { MockConsumer<String, String> mock = new MockConsumer<>(OffsetResetStrategy.EARLIEST); KafkaReadStream<String, String> consumer = createConsumer(vertx, mock); Async doneLatch = ctx.async(); consumer.handler(record -> { ctx.assertEquals("the_topic", record.topic()); ctx.assertEquals(0, record.partition()); ctx.assertEquals("abc", record.key()); ctx.assertEquals("def", record.value()); consumer.close(v -> doneLatch.complete()); }); consumer.subscribe(Collections.singleton("the_topic"), v -> { mock.schedulePollTask(() -> { mock.rebalance(Collections.singletonList(new TopicPartition("the_topic", 0))); mock.addRecord(new ConsumerRecord<>("the_topic", 0, 0L, "abc", "def")); mock.seek(new TopicPartition("the_topic", 0), 0L); }); }); }
@Test public void testConsume(TestContext ctx) throws Exception { MockConsumer<String, String> mock = new MockConsumer<>(OffsetResetStrategy.EARLIEST); KafkaReadStream<String, String> consumer = createConsumer(vertx, mock); Async doneLatch = ctx.async(); consumer.handler(record -> { ctx.assertEquals("the_topic", record.topic()); ctx.assertEquals(0, record.partition()); ctx.assertEquals("abc", record.key()); ctx.assertEquals("def", record.value()); consumer.close(v -> doneLatch.complete()); }); consumer.subscribe(Collections.singleton("the_topic"), v -> { mock.schedulePollTask(() -> { mock.rebalance(Collections.singletonList(new TopicPartition("the_topic", 0))); mock.addRecord(new ConsumerRecord<>("the_topic", 0, 0L, "abc", "def")); mock.seek(new TopicPartition("the_topic", 0), 0L); }); }); }
@Test public void testConsumeWithHeader(TestContext ctx) { MockConsumer<String, String> mock = new MockConsumer<>(OffsetResetStrategy.EARLIEST); KafkaReadStream<String, String> consumer = createConsumer(vertx, mock); Async doneLatch = ctx.async(); consumer.handler(record -> { ctx.assertEquals("the_topic", record.topic()); ctx.assertEquals(0, record.partition()); ctx.assertEquals("abc", record.key()); ctx.assertEquals("def", record.value()); Header[] headers = record.headers().toArray(); ctx.assertEquals(1, headers.length); Header header = headers[0]; ctx.assertEquals("header_key", header.key()); ctx.assertEquals("header_value", new String(header.value())); consumer.close(v -> doneLatch.complete()); }); consumer.subscribe(Collections.singleton("the_topic"), v -> { mock.schedulePollTask(() -> { mock.rebalance(Collections.singletonList(new TopicPartition("the_topic", 0))); mock.addRecord(new ConsumerRecord<>("the_topic", 0, 0L, 0L, TimestampType.NO_TIMESTAMP_TYPE, 0L, 0, 0, "abc", "def", new RecordHeaders(Collections.singletonList(new RecordHeader("header_key", "header_value".getBytes()))))); mock.seek(new TopicPartition("the_topic", 0), 0L); }); }); }
@Test public void testConsumeWithHeader(TestContext ctx) { MockConsumer<String, String> mock = new MockConsumer<>(OffsetResetStrategy.EARLIEST); KafkaReadStream<String, String> consumer = createConsumer(vertx, mock); Async doneLatch = ctx.async(); consumer.handler(record -> { ctx.assertEquals("the_topic", record.topic()); ctx.assertEquals(0, record.partition()); ctx.assertEquals("abc", record.key()); ctx.assertEquals("def", record.value()); Header[] headers = record.headers().toArray(); ctx.assertEquals(1, headers.length); Header header = headers[0]; ctx.assertEquals("header_key", header.key()); ctx.assertEquals("header_value", new String(header.value())); consumer.close(v -> doneLatch.complete()); }); consumer.subscribe(Collections.singleton("the_topic"), v -> { mock.schedulePollTask(() -> { mock.rebalance(Collections.singletonList(new TopicPartition("the_topic", 0))); mock.addRecord(new ConsumerRecord<>("the_topic", 0, 0L, 0L, TimestampType.NO_TIMESTAMP_TYPE, 0L, 0, 0, "abc", "def", new RecordHeaders(Collections.singletonList(new RecordHeader("header_key", "header_value".getBytes()))))); mock.seek(new TopicPartition("the_topic", 0), 0L); }); }); }