private ConsumerRecords buildRecords(final String kafkaTopicName) { return new ConsumerRecords<>( ImmutableMap.of( new TopicPartition(kafkaTopicName, 1), Arrays.asList( new ConsumerRecord<>( kafkaTopicName, 1, 1, 1l, TimestampType.CREATE_TIME, 1l, 10, 10, "key", "1234567890") ) ) ); }
private static List<ConsumerRecord<byte[], byte[]>> getRecords(String topic) { return IntStream.range(0, RECORD_NUMBER).mapToObj(number -> { final byte[] value = ("VALUE-" + Integer.toString(number)).getBytes(Charset.forName("UTF-8")); return new ConsumerRecord<>(topic, 0, (long) number, 0L, null, 0L, 0, 0, KEY_BYTES, value); }).collect(Collectors.toList()); }
private static ConsumerRecords<CommandId, Command> buildRecords(final Object... args) { assertThat(args.length % 2, equalTo(0)); final List<ConsumerRecord<CommandId, Command>> records = new ArrayList<>(); for (int i = 0; i < args.length; i += 2) { assertThat(args[i], instanceOf(CommandId.class)); assertThat(args[i + 1], anyOf(is(nullValue()), instanceOf(Command.class))); records.add( new ConsumerRecord<>(COMMAND_TOPIC, 0, 0, (CommandId) args[i], (Command) args[i + 1])); } return new ConsumerRecords<>(Collections.singletonMap(COMMAND_TOPIC_PARTITION, records)); } }
/** * Creates sequential dummy records * * @param <K> The Kafka key type * @param <V> The Kafka value type * @param topic The topic partition to create records for * @param startingOffset The starting offset of the records * @param numRecords The number of records to create * @return The dummy records */ public static <K, V> List<ConsumerRecord<K, V>> createRecords(TopicPartition topic, long startingOffset, int numRecords) { List<ConsumerRecord<K, V>> recordsForPartition = new ArrayList<>(); for (int i = 0; i < numRecords; i++) { recordsForPartition.add(new ConsumerRecord<>(topic.topic(), topic.partition(), startingOffset + i, null, null)); } return recordsForPartition; }
@Test public void shouldFilterNullValues() { replay(schemaRegistryClient); final ConsumerRecord<String, Bytes> record = new ConsumerRecord<>( "some-topic", 1, 1, "key", null); final RecordFormatter formatter = new RecordFormatter(schemaRegistryClient, "some-topic"); final ConsumerRecords<String, Bytes> records = new ConsumerRecords<>( ImmutableMap.of(new TopicPartition("some-topic", 1), ImmutableList.of(record))); assertThat(formatter.format(records), empty()); }
@Test @SuppressWarnings("deprecation") public void testNullChecksumInConstructor() { String key = "key"; String value = "value"; long timestamp = 242341324L; ConsumerRecord<String, String> record = new ConsumerRecord<>("topic", 0, 23L, timestamp, TimestampType.CREATE_TIME, null, key.length(), value.length(), key, value, new RecordHeaders()); assertEquals(DefaultRecord.computePartialChecksum(timestamp, key.length(), value.length()), record.checksum()); }
@Test public void iterator() throws Exception { Map<TopicPartition, List<ConsumerRecord<Integer, String>>> records = new LinkedHashMap<>(); String topic = "topic"; records.put(new TopicPartition(topic, 0), new ArrayList<ConsumerRecord<Integer, String>>()); ConsumerRecord<Integer, String> record1 = new ConsumerRecord<>(topic, 1, 0, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, 1, "value1"); ConsumerRecord<Integer, String> record2 = new ConsumerRecord<>(topic, 1, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, 2, "value2"); records.put(new TopicPartition(topic, 1), Arrays.asList(record1, record2)); records.put(new TopicPartition(topic, 2), new ArrayList<ConsumerRecord<Integer, String>>()); ConsumerRecords<Integer, String> consumerRecords = new ConsumerRecords<>(records); Iterator<ConsumerRecord<Integer, String>> iter = consumerRecords.iterator(); int c = 0; for (; iter.hasNext(); c++) { ConsumerRecord<Integer, String> record = iter.next(); assertEquals(1, record.partition()); assertEquals(topic, record.topic()); assertEquals(c, record.offset()); } assertEquals(2, c); } }
@Test public void testConsumerRecordsIsEmptyWhenReturningNoRecords() { TopicPartition partition = new TopicPartition("test", 0); consumer.assign(Collections.singleton(partition)); consumer.addRecord(new ConsumerRecord<String, String>("test", 0, 0, null, null)); consumer.updateEndOffsets(Collections.singletonMap(partition, 1L)); consumer.seekToEnd(Collections.singleton(partition)); ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1)); assertThat(records.count(), is(0)); assertThat(records.isEmpty(), is(true)); }
@SuppressWarnings("unchecked") @Test public void shouldExtractTimestampFromStringWithFormat() throws ParseException { final StringTimestampExtractor timestampExtractor = new StringTimestampExtractor(format, 0); final String stringTime = "2010-Jan-11"; final long expectedTime = new SimpleDateFormat(format).parse(stringTime).getTime(); final long actualTime = timestampExtractor.extract(new ConsumerRecord("topic", 1, 1, null, new GenericRow(Collections.singletonList(stringTime))), 1); assertThat(actualTime, equalTo(expectedTime)); }
private Result getFormatter(final byte[] data) { final ConsumerRecord<String, Bytes> record = new ConsumerRecord<>( "some-topic", 1, 1, "key", new Bytes(data)); final RecordFormatter formatter = new RecordFormatter(schemaRegistryClient, "some-topic"); final ConsumerRecords<String, Bytes> records = new ConsumerRecords<>( ImmutableMap.of(new TopicPartition("some-topic", 1), ImmutableList.of(record))); final List<String> formatted = formatter.format(records); assertThat("Only expect one line", formatted, hasSize(1)); return new Result(formatter.getFormat(), formatted.get(0)); }
@Test public void shouldHandleNullValuesFromSTRINGPrint() throws IOException { final DateFormat dateFormat = SimpleDateFormat.getDateTimeInstance(3, 1, Locale.getDefault()); final ConsumerRecord<String, Bytes> record = new ConsumerRecord<>( "some-topic", 1, 1, "key", null); final String formatted = Format.STRING.maybeGetFormatter( "some-topic", record, null, dateFormat).get().print(record); assertThat(formatted, endsWith(", key , NULL\n")); }
@Test public void testSimpleMock() { consumer.subscribe(Collections.singleton("test")); assertEquals(0, consumer.poll(Duration.ZERO).count()); consumer.rebalance(Arrays.asList(new TopicPartition("test", 0), new TopicPartition("test", 1))); // Mock consumers need to seek manually since they cannot automatically reset offsets HashMap<TopicPartition, Long> beginningOffsets = new HashMap<>(); beginningOffsets.put(new TopicPartition("test", 0), 0L); beginningOffsets.put(new TopicPartition("test", 1), 0L); consumer.updateBeginningOffsets(beginningOffsets); consumer.seek(new TopicPartition("test", 0), 0); ConsumerRecord<String, String> rec1 = new ConsumerRecord<>("test", 0, 0, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, "key1", "value1"); ConsumerRecord<String, String> rec2 = new ConsumerRecord<>("test", 0, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, "key2", "value2"); consumer.addRecord(rec1); consumer.addRecord(rec2); ConsumerRecords<String, String> recs = consumer.poll(Duration.ofMillis(1)); Iterator<ConsumerRecord<String, String>> iter = recs.iterator(); assertEquals(rec1, iter.next()); assertEquals(rec2, iter.next()); assertFalse(iter.hasNext()); assertEquals(2L, consumer.position(new TopicPartition("test", 0))); consumer.commitSync(); assertEquals(2L, consumer.committed(new TopicPartition("test", 0)).offset()); }
@Override @SuppressWarnings("deprecation") public ConsumerRecords<String, String> onConsume(ConsumerRecords<String, String> records) { // This will ensure that we get the cluster metadata when onConsume is called for the first time // as subsequent compareAndSet operations will fail. CLUSTER_ID_BEFORE_ON_CONSUME.compareAndSet(NO_CLUSTER_ID, CLUSTER_META.get()); Map<TopicPartition, List<ConsumerRecord<String, String>>> recordMap = new HashMap<>(); for (TopicPartition tp : records.partitions()) { List<ConsumerRecord<String, String>> lst = new ArrayList<>(); for (ConsumerRecord<String, String> record: records.records(tp)) { lst.add(new ConsumerRecord<>(record.topic(), record.partition(), record.offset(), record.timestamp(), record.timestampType(), record.checksum(), record.serializedKeySize(), record.serializedValueSize(), record.key(), record.value().toUpperCase(Locale.ROOT))); } recordMap.put(tp, lst); } return new ConsumerRecords<String, String>(recordMap); }
@SuppressWarnings("deprecation") @Test public void testSimpleMockDeprecated() { consumer.subscribe(Collections.singleton("test")); assertEquals(0, consumer.poll(1000).count()); consumer.rebalance(Arrays.asList(new TopicPartition("test", 0), new TopicPartition("test", 1))); // Mock consumers need to seek manually since they cannot automatically reset offsets HashMap<TopicPartition, Long> beginningOffsets = new HashMap<>(); beginningOffsets.put(new TopicPartition("test", 0), 0L); beginningOffsets.put(new TopicPartition("test", 1), 0L); consumer.updateBeginningOffsets(beginningOffsets); consumer.seek(new TopicPartition("test", 0), 0); ConsumerRecord<String, String> rec1 = new ConsumerRecord<>("test", 0, 0, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, "key1", "value1"); ConsumerRecord<String, String> rec2 = new ConsumerRecord<>("test", 0, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, "key2", "value2"); consumer.addRecord(rec1); consumer.addRecord(rec2); ConsumerRecords<String, String> recs = consumer.poll(1); Iterator<ConsumerRecord<String, String>> iter = recs.iterator(); assertEquals(rec1, iter.next()); assertEquals(rec2, iter.next()); assertFalse(iter.hasNext()); assertEquals(2L, consumer.position(new TopicPartition("test", 0))); consumer.commitSync(); assertEquals(2L, consumer.committed(new TopicPartition("test", 0)).offset()); }
@Test public void testBasic() { SimpleRecordTranslator<String, String> trans = new SimpleRecordTranslator<>((r) -> new Values(r.value()), new Fields("value")); assertEquals(Arrays.asList("default"), trans.streams()); ConsumerRecord<String, String> cr = new ConsumerRecord<>("TOPIC", 100, 100, "THE KEY", "THE VALUE"); assertEquals(Arrays.asList("THE VALUE"), trans.apply(cr)); }
@Test public void testBasic() { DefaultRecordTranslator<String, String> trans = new DefaultRecordTranslator<>(); assertEquals(Arrays.asList("default"), trans.streams()); assertEquals(new Fields("topic", "partition", "offset", "key", "value"), trans.getFieldsFor("default")); ConsumerRecord<String, String> cr = new ConsumerRecord<>("TOPIC", 100, 100, "THE KEY", "THE VALUE"); assertEquals(Arrays.asList("TOPIC", 100, 100l, "THE KEY", "THE VALUE"), trans.apply(cr)); } }
@Test public void testNullTranslation() { ByTopicRecordTranslator<String, String> trans = new ByTopicRecordTranslator<>((r) -> null, new Fields("key")); ConsumerRecord<String, String> cr = new ConsumerRecord<>("TOPIC 1", 100, 100, "THE KEY", "THE VALUE"); assertEquals(null, trans.apply(cr)); }
@Test public void testBasic() { ByTopicRecordTranslator<String, String> trans = new ByTopicRecordTranslator<>((r) -> new Values(r.key()), new Fields("key")); trans.forTopic("TOPIC 1", (r) -> new Values(r.value()), new Fields("value"), "value-stream"); trans.forTopic("TOPIC 2", (r) -> new Values(r.key(), r.value()), new Fields("key", "value"), "key-value-stream"); HashSet<String> expectedStreams = new HashSet<>(); expectedStreams.add("default"); expectedStreams.add("value-stream"); expectedStreams.add("key-value-stream"); assertEquals(expectedStreams, new HashSet<>(trans.streams())); ConsumerRecord<String, String> cr1 = new ConsumerRecord<>("TOPIC OTHER", 100, 100, "THE KEY", "THE VALUE"); assertEquals(new Fields("key"), trans.getFieldsFor("default")); assertEquals(Arrays.asList("THE KEY"), trans.apply(cr1)); ConsumerRecord<String, String> cr2 = new ConsumerRecord<>("TOPIC 1", 100, 100, "THE KEY", "THE VALUE"); assertEquals(new Fields("value"), trans.getFieldsFor("value-stream")); assertEquals(Arrays.asList("THE VALUE"), trans.apply(cr2)); ConsumerRecord<String, String> cr3 = new ConsumerRecord<>("TOPIC 2", 100, 100, "THE KEY", "THE VALUE"); assertEquals(new Fields("key", "value"), trans.getFieldsFor("key-value-stream")); assertEquals(Arrays.asList("THE KEY", "THE VALUE"), trans.apply(cr3)); }
/** * Parse the record entry, deserializing the key / value fields if necessary */ private ConsumerRecord<K, V> parseRecord(TopicPartition partition, RecordBatch batch, Record record) { try { long offset = record.offset(); long timestamp = record.timestamp(); Optional<Integer> leaderEpoch = maybeLeaderEpoch(batch.partitionLeaderEpoch()); TimestampType timestampType = batch.timestampType(); Headers headers = new RecordHeaders(record.headers()); ByteBuffer keyBytes = record.key(); byte[] keyByteArray = keyBytes == null ? null : Utils.toArray(keyBytes); K key = keyBytes == null ? null : this.keyDeserializer.deserialize(partition.topic(), headers, keyByteArray); ByteBuffer valueBytes = record.value(); byte[] valueByteArray = valueBytes == null ? null : Utils.toArray(valueBytes); V value = valueBytes == null ? null : this.valueDeserializer.deserialize(partition.topic(), headers, valueByteArray); return new ConsumerRecord<>(partition.topic(), partition.partition(), offset, timestamp, timestampType, record.checksumOrNull(), keyByteArray == null ? ConsumerRecord.NULL_SIZE : keyByteArray.length, valueByteArray == null ? ConsumerRecord.NULL_SIZE : valueByteArray.length, key, value, headers, leaderEpoch); } catch (RuntimeException e) { throw new SerializationException("Error deserializing key/value for partition " + partition + " at offset " + record.offset() + ". If needed, please seek past the record to continue consumption.", e); } }
@Test @SuppressWarnings("deprecation") public void testOldConstructor() { String topic = "topic"; int partition = 0; long offset = 23; String key = "key"; String value = "value"; ConsumerRecord<String, String> record = new ConsumerRecord<>(topic, partition, offset, key, value); assertEquals(topic, record.topic()); assertEquals(partition, record.partition()); assertEquals(offset, record.offset()); assertEquals(key, record.key()); assertEquals(value, record.value()); assertEquals(TimestampType.NO_TIMESTAMP_TYPE, record.timestampType()); assertEquals(ConsumerRecord.NO_TIMESTAMP, record.timestamp()); assertEquals(ConsumerRecord.NULL_CHECKSUM, record.checksum()); assertEquals(ConsumerRecord.NULL_SIZE, record.serializedKeySize()); assertEquals(ConsumerRecord.NULL_SIZE, record.serializedValueSize()); assertEquals(Optional.empty(), record.leaderEpoch()); assertEquals(new RecordHeaders(), record.headers()); }