@Override public KafkaConsumer<K, V> partitionsFor(String topic, Handler<AsyncResult<List<PartitionInfo>>> handler) { this.stream.partitionsFor(topic, done -> { if (done.succeeded()) { // TODO: use Helper class and stream approach List<PartitionInfo> partitions = new ArrayList<>(); for (org.apache.kafka.common.PartitionInfo kafkaPartitionInfo: done.result()) { PartitionInfo partitionInfo = new PartitionInfo(); partitionInfo .setInSyncReplicas( Stream.of(kafkaPartitionInfo.inSyncReplicas()).map(Helper::from).collect(Collectors.toList())) .setLeader(Helper.from(kafkaPartitionInfo.leader())) .setPartition(kafkaPartitionInfo.partition()) .setReplicas( Stream.of(kafkaPartitionInfo.replicas()).map(Helper::from).collect(Collectors.toList())) .setTopic(kafkaPartitionInfo.topic()); partitions.add(partitionInfo); } handler.handle(Future.succeededFuture(partitions)); } else { handler.handle(Future.failedFuture(done.cause())); } }); return this; }
@Test public void testPartitionsFor(TestContext ctx) throws Exception { String topicName = "testPartitionsFor"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 2, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async done = ctx.async(); consumer.partitionsFor(topicName, ar -> { if (ar.succeeded()) { List<PartitionInfo> partitionInfo = ar.result(); ctx.assertEquals(2, partitionInfo.size()); } else { ctx.fail(); } done.complete(); }); }
@Test public void testPartitionsFor(TestContext ctx) throws Exception { String topicName = "testPartitionsFor"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 2, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async done = ctx.async(); consumer.partitionsFor(topicName, ar -> { if (ar.succeeded()) { List<PartitionInfo> partitionInfo = ar.result(); ctx.assertEquals(2, partitionInfo.size()); } else { ctx.fail(); } done.complete(); }); }
consumer.handler(rec -> { if (count.decrementAndGet() == 0) { consumer.partitionsFor(topicName, asyncResult -> { if (asyncResult.succeeded()) { for (org.apache.kafka.common.PartitionInfo pi : asyncResult.result()) {
consumer.handler(rec -> { if (count.decrementAndGet() == 0) { consumer.partitionsFor(topicName, asyncResult -> { if (asyncResult.succeeded()) { for (org.apache.kafka.common.PartitionInfo pi : asyncResult.result()) {
consumer.partitionsFor(topicName, asyncResult1 -> { if (asyncResult.succeeded()) { for (org.apache.kafka.common.PartitionInfo pi : asyncResult1.result()) {
consumer.partitionsFor(topicName, asyncResult1 -> { if (asyncResult.succeeded()) { for (org.apache.kafka.common.PartitionInfo pi : asyncResult1.result()) {