@Test public void shouldStartWithEmptyTopicAndStoreDataAndRecoverAllState() throws Exception { // Create the empty topic ... kafka.createTopic(topicName, 1, 1); testHistoryTopicContent(false); }
@Test public void shouldIgnoreUnparseableMessages() throws Exception { kafka.createTopic(topicName, 1, 1);
@Test(expected = ParsingException.class) public void shouldStopOnUnparseableSQL() throws Exception { // Create the empty topic ... kafka.createTopic(topicName, 1, 1); // Create invalid records final ProducerRecord<String, String> invalidSQL = new ProducerRecord<>(topicName, PARTITION_NO, null, "{\"source\":{\"server\":\"my-server\"},\"position\":{\"filename\":\"my-txn-file.log\",\"position\":39},\"databaseName\":\"db1\",\"ddl\":\"xxxDROP TABLE foo;\"}"); final Configuration intruderConfig = Configuration.create() .withDefault(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.brokerList()) .withDefault(ProducerConfig.CLIENT_ID_CONFIG, "intruder") .withDefault(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class) .withDefault(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class) .build(); try (final KafkaProducer<String, String> producer = new KafkaProducer<>(intruderConfig.asProperties())) { producer.send(invalidSQL).get(); } testHistoryTopicContent(false); }
@Test public void testPartitionsFor(TestContext ctx) throws Exception { String topicName = "testPartitionsFor"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 2, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async done = ctx.async(); consumer.partitionsFor(topicName, ar -> { if (ar.succeeded()) { List<PartitionInfo> partitionInfo = ar.result(); ctx.assertEquals(2, partitionInfo.size()); } else { ctx.fail(); } done.complete(); }); }
@Test public void testPartitionsFor(TestContext ctx) throws Exception { String topicName = "testPartitionsFor"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 2, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async done = ctx.async(); consumer.partitionsFor(topicName, ar -> { if (ar.succeeded()) { List<PartitionInfo> partitionInfo = ar.result(); ctx.assertEquals(2, partitionInfo.size()); } else { ctx.fail(); } done.complete(); }); }
@Test public void testAssign(TestContext ctx) throws Exception { String topicName = "testAssign"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async done = ctx.async(); consumer.handler(record -> { // no need for handling incoming records in this test }); TopicPartition partition = new TopicPartition(topicName, 0); consumer.assign(Collections.singleton(partition), asyncResult -> { if (asyncResult.succeeded()) { consumer.assignment(asyncResult1 -> { if (asyncResult1.succeeded()) { ctx.assertTrue(asyncResult1.result().contains(partition)); done.complete(); } else { ctx.fail(); } }); } else { ctx.fail(); } }); }
@Test public void testNotCommitted(TestContext ctx) throws Exception { String topicName = "testNotCommitted"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Async done = ctx.async(); KafkaConsumer<Object, Object> consumer = KafkaConsumer.create(vertx, config); consumer.handler(rec -> {}); consumer.partitionsAssignedHandler(partitions -> { for (io.vertx.kafka.client.common.TopicPartition partition : partitions) { consumer.committed(partition, ar -> { if (ar.succeeded()) { ctx.assertNull(ar.result()); } else { ctx.fail(ar.cause()); } }); } done.complete(); }); consumer.subscribe(Collections.singleton(topicName)); }
private void testSeek(String topic, int numMessages, TestContext ctx, Runnable seeker, int abc) throws Exception { kafkaCluster.createTopic(topic, 1, 1); String consumerId = topic; Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST);
private void testSeek(String topic, int numMessages, TestContext ctx, Runnable seeker, int abc) throws Exception { kafkaCluster.createTopic(topic, 1, 1); String consumerId = topic; Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST);
@Test public void testNotCommitted(TestContext ctx) throws Exception { String topicName = "testNotCommitted"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Async done = ctx.async(); KafkaConsumer<Object, Object> consumer = KafkaConsumer.create(vertx, config); consumer.handler(rec -> {}); consumer.partitionsAssignedHandler(partitions -> { for (io.vertx.kafka.client.common.TopicPartition partition : partitions) { consumer.committed(partition, ar -> { if (ar.succeeded()) { ctx.assertNull(ar.result()); } else { ctx.fail(ar.cause()); } }); } done.complete(); }); consumer.subscribe(Collections.singleton(topicName)); }
@Test public void testAssign(TestContext ctx) throws Exception { String topicName = "testAssign"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async done = ctx.async(); consumer.handler(record -> { // no need for handling incoming records in this test }); TopicPartition partition = new TopicPartition(topicName, 0); consumer.assign(Collections.singleton(partition), asyncResult -> { if (asyncResult.succeeded()) { consumer.assignment(asyncResult1 -> { if (asyncResult1.succeeded()) { ctx.assertTrue(asyncResult1.result().contains(partition)); done.complete(); } else { ctx.fail(); } }); } else { ctx.fail(); } }); }
@Test public void testSubscription(TestContext ctx) throws Exception { String topicName = "testSubscription"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async done = ctx.async(); consumer.handler(record -> { // no need for handling incoming records in this test }); consumer.subscribe(Collections.singleton(topicName), asyncResult -> { if (asyncResult.succeeded()) { consumer.subscription(asyncResult1 -> { if (asyncResult1.succeeded()) { ctx.assertTrue(asyncResult1.result().contains(topicName)); done.complete(); } else { ctx.fail(); } }); } else { ctx.fail(); } }); }
@Test public void testListTopics(TestContext ctx) throws Exception { String topicName = "testListTopics"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async done = ctx.async(); consumer.handler(record -> { // no need for handling incoming records in this test }); consumer.subscribe(Collections.singleton(topicName), asyncResult -> { if (asyncResult.succeeded()) { consumer.listTopics(asyncResult1 -> { if (asyncResult1.succeeded()) { ctx.assertTrue(asyncResult1.result().containsKey(topicName)); done.complete(); } else { ctx.fail(); } }); } else { ctx.fail(); } }); }
@Test public void testListTopics(TestContext ctx) throws Exception { String topicName = "testListTopics"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async done = ctx.async(); consumer.handler(record -> { // no need for handling incoming records in this test }); consumer.subscribe(Collections.singleton(topicName), asyncResult -> { if (asyncResult.succeeded()) { consumer.listTopics(asyncResult1 -> { if (asyncResult1.succeeded()) { ctx.assertTrue(asyncResult1.result().containsKey(topicName)); done.complete(); } else { ctx.fail(); } }); } else { ctx.fail(); } }); }
@Test public void testSubscription(TestContext ctx) throws Exception { String topicName = "testSubscription"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async done = ctx.async(); consumer.handler(record -> { // no need for handling incoming records in this test }); consumer.subscribe(Collections.singleton(topicName), asyncResult -> { if (asyncResult.succeeded()) { consumer.subscription(asyncResult1 -> { if (asyncResult1.succeeded()) { ctx.assertTrue(asyncResult1.result().contains(topicName)); done.complete(); } else { ctx.fail(); } }); } else { ctx.fail(); } }); }
String topicName = "testPositionEmptyTopic"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
String topicName = "testAssignThenSetHandler"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST);
String topicName = "testSetHandlerThenAssign"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
String topicName = "testAssignThenSetHandler"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST);
String topicName = "testSetHandlerThenAssign"; String consumerId = topicName; kafkaCluster.createTopic(topicName, 1, 1); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);