public ReceiverOptions<Integer, Person> receiverOptions() { Map<String, Object> props = new HashMap<>(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); props.put(ConsumerConfig.CLIENT_ID_CONFIG, "sample-consumer"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, PersonSerDes.class); return ReceiverOptions.<Integer, Person>create(props); }
public SampleConsumer(String bootstrapServers) { Map<String, Object> props = new HashMap<>(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); props.put(ConsumerConfig.CLIENT_ID_CONFIG, "sample-consumer"); props.put(ConsumerConfig.GROUP_ID_CONFIG, "sample-group"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); receiverOptions = ReceiverOptions.create(props); dateFormat = new SimpleDateFormat("HH:mm:ss:SSS z dd MMM yyyy"); }
ReactiveEndToEndLatency(Map<String, Object> consumerPropsOverride, Map<String, Object> producerPropsOverride, String bootstrapServers, String topic) { super(consumerPropsOverride, producerPropsOverride, bootstrapServers, topic); sender = KafkaSender.create(SenderOptions.create(producerProps)); ReceiverOptions<byte[], byte[]> receiverOptions = ReceiverOptions.<byte[], byte[]>create(consumerProps) .addAssignListener(partitions -> { if (assignSemaphore.availablePermits() == 0) { partitions.forEach(p -> p.seekToEnd()); assignSemaphore.release(); } }) .subscription(Collections.singleton(topic)); flux = KafkaReceiver.create(receiverOptions) .receive(); receiveQueue = new LinkedBlockingQueue<>(); System.out.println("Running latency test using Reactive API, class=" + this.getClass().getName()); } public void initialize() {
System.out.println("Running consumer performance test using reactive API, class=" + this.getClass().getSimpleName()); ReceiverOptions<byte[], byte[]> receiverOptions = ReceiverOptions.<byte[], byte[]>create(consumerProps) .addAssignListener(partitions -> { for (ReceiverPartition p : partitions) {
public ReceiverOptions<Integer, String> createReceiverOptions(Map<String, Object> propsOverride, String groupId) { Map<String, Object> props = consumerProps(groupId); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "2"); if (propsOverride != null) props.putAll(propsOverride); receiverOptions = ReceiverOptions.create(props); receiverOptions.commitInterval(Duration.ofMillis(50)); receiverOptions.maxCommitAttempts(1); return receiverOptions; }
@Before public void setUp() { Map<String, Object> kafkaProps = new HashMap<>(); kafkaProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); receiverOptions = ReceiverOptions. <String, String>create(kafkaProps) .withValueDeserializer(new TestDeserializer()) .withKeyDeserializer(new TestDeserializer()); }
private Consumer<Integer, String> createConsumer() throws Exception { String groupId = testName.getMethodName(); Map<String, Object> consumerProps = consumerProps(groupId); Consumer<Integer, String> consumer = ConsumerFactory.INSTANCE.createConsumer(ReceiverOptions.<Integer, String>create(consumerProps)); consumer.subscribe(Collections.singletonList(topic)); consumer.poll(requestTimeoutMillis); return consumer; }
@Before public void setUp() { topics = new ConcurrentHashMap<>(); for (int i : Arrays.asList(1, 2, 20, 200)) topics.put(i, "topic" + i); topic = topics.get(2); cluster = new MockCluster(2, topics); receiverOptions = ReceiverOptions.<Integer, String>create() .consumerProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId) .consumerProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") .addAssignListener(partitions -> { for (ReceiverPartition p : partitions) assignedPartitions.add(p.topicPartition()); }) .addRevokeListener(partitions -> { for (ReceiverPartition p : partitions) assignedPartitions.remove(p.topicPartition()); }); consumer = new MockConsumer(cluster); consumerFactory = new MockConsumer.Pool(Arrays.asList(consumer)); for (TopicPartition partition : cluster.partitions()) receiveStartOffsets.put(partition, 0L); }
@Before public void setUp() { cluster = new MockCluster(2, Collections.emptyMap()); cluster.addTopic(srcTopic, partitions); cluster.addTopic(destTopic, partitions); receiverOptions = ReceiverOptions.<Integer, String>create() .consumerProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId) .consumerProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") .consumerProperty(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed") .consumerProperty(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, String.valueOf(maxPollRecords)) .addAssignListener(partitions -> { for (ReceiverPartition p : partitions) assignedPartitions.add(p.topicPartition()); }) .addRevokeListener(partitions -> { for (ReceiverPartition p : partitions) assignedPartitions.remove(p.topicPartition()); }) .subscription(Collections.singleton(srcTopic)); consumerFactory = new MockConsumer.Pool(Arrays.asList(new MockConsumer(cluster), new MockConsumer(cluster))); receiver = new DefaultKafkaReceiver<Integer, String>(consumerFactory, receiverOptions); for (TopicPartition partition : cluster.partitions()) receiveStartOffsets.put(partition, 0L); SenderOptions<Integer, String> senderOptions = SenderOptions.<Integer, String>create() .producerProperty(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "exactlyOnce"); producer = new MockProducer(cluster); Pool producerFactory = new Pool(Arrays.asList(producer)); sender = new DefaultKafkaSender<>(producerFactory, senderOptions); }