@Override public K getKey() { return this.consumerRecord.key(); }
/** When an upstream context was not present, lookup keys are unlikely added */ static void addTags(ConsumerRecord<?, ?> record, SpanCustomizer result) { if (record.key() instanceof String && !"".equals(record.key())) { result.tag(KafkaTags.KAFKA_KEY_TAG, record.key().toString()); } result.tag(KafkaTags.KAFKA_TOPIC_TAG, record.topic()); } }
void set(ConsumerRecord<byte[], byte[]> consumerRecord) { this.partition = consumerRecord.partition(); this.timestamp = consumerRecord.timestamp(); this.offset = consumerRecord.offset(); this.value = consumerRecord.value(); this.recordKey = consumerRecord.key(); }
@Override public List<Object> apply(ConsumerRecord<K, V> record) { return new Values(record.topic(), record.partition(), record.offset(), record.key(), record.value()); }
@Override public BaseConsumerRecord receive() { if (_recordIter == null || !_recordIter.hasNext()) _recordIter = _consumer.poll(Long.MAX_VALUE).iterator(); ConsumerRecord<String, String> record = _recordIter.next(); return new BaseConsumerRecord(record.topic(), record.partition(), record.offset(), record.key(), record.value()); }
private BundleTracker(final ConsumerRecord<byte[], byte[]> initialRecord, final TopicPartition topicPartition, final String keyEncoding) { this.initialOffset = initialRecord.offset(); this.partition = topicPartition.partition(); this.topic = topicPartition.topic(); this.key = encodeKafkaKey(initialRecord.key(), keyEncoding); }
private BundleTracker(final ConsumerRecord<byte[], byte[]> initialRecord, final TopicPartition topicPartition, final String keyEncoding, final RecordSetWriter recordWriter) { this.initialOffset = initialRecord.offset(); this.partition = topicPartition.partition(); this.topic = topicPartition.topic(); this.recordWriter = recordWriter; this.key = encodeKafkaKey(initialRecord.key(), keyEncoding); }
private BundleTracker(final ConsumerRecord<byte[], byte[]> initialRecord, final TopicPartition topicPartition, final String keyEncoding, final RecordSetWriter recordWriter) { this.initialOffset = initialRecord.offset(); this.partition = topicPartition.partition(); this.topic = topicPartition.topic(); this.recordWriter = recordWriter; this.key = encodeKafkaKey(initialRecord.key(), keyEncoding); }
private BundleTracker(final ConsumerRecord<byte[], byte[]> initialRecord, final TopicPartition topicPartition, final String keyEncoding, final RecordSetWriter recordWriter) { this.initialOffset = initialRecord.offset(); this.partition = topicPartition.partition(); this.topic = topicPartition.topic(); this.recordWriter = recordWriter; this.key = encodeKafkaKey(initialRecord.key(), keyEncoding); }
private BundleTracker(final ConsumerRecord<byte[], byte[]> initialRecord, final TopicPartition topicPartition, final String keyEncoding, final RecordSetWriter recordWriter) { this.initialOffset = initialRecord.offset(); this.partition = topicPartition.partition(); this.topic = topicPartition.topic(); this.recordWriter = recordWriter; this.key = encodeKafkaKey(initialRecord.key(), keyEncoding); }
@Override @SuppressWarnings("deprecation") public ConsumerRecords<String, String> onConsume(ConsumerRecords<String, String> records) { // This will ensure that we get the cluster metadata when onConsume is called for the first time // as subsequent compareAndSet operations will fail. CLUSTER_ID_BEFORE_ON_CONSUME.compareAndSet(NO_CLUSTER_ID, CLUSTER_META.get()); Map<TopicPartition, List<ConsumerRecord<String, String>>> recordMap = new HashMap<>(); for (TopicPartition tp : records.partitions()) { List<ConsumerRecord<String, String>> lst = new ArrayList<>(); for (ConsumerRecord<String, String> record: records.records(tp)) { lst.add(new ConsumerRecord<>(record.topic(), record.partition(), record.offset(), record.timestamp(), record.timestampType(), record.checksum(), record.serializedKeySize(), record.serializedValueSize(), record.key(), record.value().toUpperCase(Locale.ROOT))); } recordMap.put(tp, lst); } return new ConsumerRecords<String, String>(recordMap); }
@Test(expected = IllegalStateException.class) public void testTopicCollision() { ByTopicRecordTranslator<String, String> trans = new ByTopicRecordTranslator<>((r) -> new Values(r.key()), new Fields("key")); trans.forTopic("foo", (r) -> new Values(r.value()), new Fields("value"), "foo1"); trans.forTopic("foo", (r) -> new Values(r.key(), r.value()), new Fields("key", "value"), "foo2"); }
public static KafkaTridentSpoutConfig.Builder<String, String> setCommonSpoutConfig(KafkaTridentSpoutConfig.Builder<String, String> config) { return config.setRecordTranslator((r) -> new Values(r.topic(), r.key(), r.value()), new Fields("topic", "key", "value")) .setProp(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 5) .setFirstPollOffsetStrategy(EARLIEST) .setPollTimeoutMs(1000); } }
private void compareIterator(List<ConsumerRecord<byte[], byte[]>> expected, Iterator<ConsumerRecord<byte[], byte[]>> kafkaRecordIterator) { expected.forEach((expectedRecord) -> { Assert.assertTrue("Record with offset is missing" + expectedRecord.offset(), kafkaRecordIterator.hasNext()); ConsumerRecord record = kafkaRecordIterator.next(); Assert.assertEquals(expectedRecord.topic(), record.topic()); Assert.assertEquals(expectedRecord.partition(), record.partition()); Assert.assertEquals("Offsets not matching", expectedRecord.offset(), record.offset()); byte[] binaryExceptedValue = expectedRecord.value(); byte[] binaryExceptedKey = expectedRecord.key(); byte[] binaryValue = (byte[]) record.value(); byte[] binaryKey = (byte[]) record.key(); Assert.assertArrayEquals("Values not matching", binaryExceptedValue, binaryValue); Assert.assertArrayEquals("Keys not matching", binaryExceptedKey, binaryKey); }); Assert.assertFalse(kafkaRecordIterator.hasNext()); }
assertEquals(Integer.toString(i), new String(fetchedRecords.get(i).key()));
Set<String> actuallyCommittedKeys = new HashSet<>(); for (ConsumerRecord<byte[], byte[]> consumerRecord : fetchedConsumerRecords) { actuallyCommittedKeys.add(new String(consumerRecord.key(), StandardCharsets.UTF_8));
@Test(expected = IllegalArgumentException.class) public void testFieldCollision() { ByTopicRecordTranslator<String, String> trans = new ByTopicRecordTranslator<>((r) -> new Values(r.key()), new Fields("key")); trans.forTopic("foo", (r) -> new Values(r.value()), new Fields("value")); }
@Test public void testFetcherIgnoresControlRecords() { subscriptions.assignFromUser(singleton(tp0)); subscriptions.seek(tp0, 0); // normal fetch assertEquals(1, fetcher.sendFetches()); assertFalse(fetcher.hasCompletedFetches()); long producerId = 1; short producerEpoch = 0; int baseSequence = 0; int partitionLeaderEpoch = 0; ByteBuffer buffer = ByteBuffer.allocate(1024); MemoryRecordsBuilder builder = MemoryRecords.idempotentBuilder(buffer, CompressionType.NONE, 0L, producerId, producerEpoch, baseSequence); builder.append(0L, "key".getBytes(), null); builder.close(); MemoryRecords.writeEndTransactionalMarker(buffer, 1L, time.milliseconds(), partitionLeaderEpoch, producerId, producerEpoch, new EndTransactionMarker(ControlRecordType.ABORT, 0)); buffer.flip(); client.prepareResponse(fullFetchResponse(tp0, MemoryRecords.readableRecords(buffer), Errors.NONE, 100L, 0)); consumerClient.poll(time.timer(0)); assertTrue(fetcher.hasCompletedFetches()); Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords = fetcher.fetchedRecords(); assertTrue(partitionRecords.containsKey(tp0)); List<ConsumerRecord<byte[], byte[]>> records = partitionRecords.get(tp0); assertEquals(1, records.size()); assertEquals(2L, subscriptions.position(tp0).longValue()); ConsumerRecord<byte[], byte[]> record = records.get(0); assertArrayEquals("key".getBytes(), record.key()); }
@Test public void testBasic() { ByTopicRecordTranslator<String, String> trans = new ByTopicRecordTranslator<>((r) -> new Values(r.key()), new Fields("key")); trans.forTopic("TOPIC 1", (r) -> new Values(r.value()), new Fields("value"), "value-stream"); trans.forTopic("TOPIC 2", (r) -> new Values(r.key(), r.value()), new Fields("key", "value"), "key-value-stream"); HashSet<String> expectedStreams = new HashSet<>(); expectedStreams.add("default"); expectedStreams.add("value-stream"); expectedStreams.add("key-value-stream"); assertEquals(expectedStreams, new HashSet<>(trans.streams())); ConsumerRecord<String, String> cr1 = new ConsumerRecord<>("TOPIC OTHER", 100, 100, "THE KEY", "THE VALUE"); assertEquals(new Fields("key"), trans.getFieldsFor("default")); assertEquals(Arrays.asList("THE KEY"), trans.apply(cr1)); ConsumerRecord<String, String> cr2 = new ConsumerRecord<>("TOPIC 1", 100, 100, "THE KEY", "THE VALUE"); assertEquals(new Fields("value"), trans.getFieldsFor("value-stream")); assertEquals(Arrays.asList("THE VALUE"), trans.apply(cr2)); ConsumerRecord<String, String> cr3 = new ConsumerRecord<>("TOPIC 2", 100, 100, "THE KEY", "THE VALUE"); assertEquals(new Fields("key", "value"), trans.getFieldsFor("key-value-stream")); assertEquals(Arrays.asList("THE KEY", "THE VALUE"), trans.apply(cr3)); }
@Test @SuppressWarnings("deprecation") public void testOldConstructor() { String topic = "topic"; int partition = 0; long offset = 23; String key = "key"; String value = "value"; ConsumerRecord<String, String> record = new ConsumerRecord<>(topic, partition, offset, key, value); assertEquals(topic, record.topic()); assertEquals(partition, record.partition()); assertEquals(offset, record.offset()); assertEquals(key, record.key()); assertEquals(value, record.value()); assertEquals(TimestampType.NO_TIMESTAMP_TYPE, record.timestampType()); assertEquals(ConsumerRecord.NO_TIMESTAMP, record.timestamp()); assertEquals(ConsumerRecord.NULL_CHECKSUM, record.checksum()); assertEquals(ConsumerRecord.NULL_SIZE, record.serializedKeySize()); assertEquals(ConsumerRecord.NULL_SIZE, record.serializedValueSize()); assertEquals(Optional.empty(), record.leaderEpoch()); assertEquals(new RecordHeaders(), record.headers()); }