private String getSourceRecordHeaderByKey(SourceRecord record, String headerKey) { Iterator<Header> operationHeader = record.headers().allWithName(headerKey); if (!operationHeader.hasNext()) { return null; } return operationHeader.next().value().toString(); }
@Test @FixFor("DBZ-971") public void testUnwrapPropagatesRecordHeaders() { try (final UnwrapFromEnvelope<SourceRecord> transform = new UnwrapFromEnvelope<>()) { final Map<String, String> props = new HashMap<>(); transform.configure(props); final SourceRecord createRecord = createCreateRecord(); createRecord.headers().addString("application/debezium-test-header", "shouldPropagatePreviousRecordHeaders"); final SourceRecord unwrapped = transform.apply(createRecord); assertThat(((Struct)unwrapped.value()).getInt8("id")).isEqualTo((byte) 1); assertThat(unwrapped.headers()).hasSize(1); Iterator<Header> headers = unwrapped.headers().allWithName("application/debezium-test-header"); assertThat(headers.hasNext()).isTrue(); assertThat(headers.next().value().toString()).isEqualTo("shouldPropagatePreviousRecordHeaders"); } } }
@Test @FixFor("DBZ-971") public void shouldPropagatePreviousRecordHeaders() throws InterruptedException { BsonTimestamp ts = new BsonTimestamp(1000, 1); CollectionId collectionId = new CollectionId("rs0", "dbA", "c1"); ObjectId objId = new ObjectId(); Document obj = new Document().append("$set", new Document("name", "Sally")); // given Document event = new Document().append("o", obj) .append("o2", objId) .append("ns", "dbA.c1") .append("ts", ts) .append("h", Long.valueOf(12345678)) .append("op", "u"); RecordsForCollection records = recordMakers.forCollection(collectionId); records.recordEvent(event, 1002); assertThat(produced.size()).isEqualTo(1); SourceRecord record = produced.get(0); record.headers().addString("application/debezium-test-header", "shouldPropagatePreviousRecordHeaders"); // when SourceRecord transformedRecord = transformation.apply(record); assertThat(transformedRecord.headers()).hasSize(1); Iterator<Header> headers = transformedRecord.headers().allWithName("application/debezium-test-header"); assertThat(headers.hasNext()).isTrue(); assertThat(headers.next().value().toString()).isEqualTo("shouldPropagatePreviousRecordHeaders"); }
@Test public void testDeleteForwardConfigured() { try (final UnwrapFromEnvelope<SourceRecord> transform = new UnwrapFromEnvelope<>()) { final Map<String, String> props = new HashMap<>(); props.put(DROP_DELETES, "false"); props.put(OPERATION_HEADER, "true"); transform.configure(props); final SourceRecord deleteRecord = createDeleteRecord(); final SourceRecord tombstone = transform.apply(deleteRecord); assertThat(tombstone.value()).isNull(); assertThat(tombstone.headers()).hasSize(1); String headerValue = getSourceRecordHeaderByKey(tombstone, transform.DEBEZIUM_OPERATION_HEADER_KEY); assertThat(headerValue).isEqualTo(Envelope.Operation.DELETE.code()); } }
@Test public void testHandleCreateRewrite() { try (final UnwrapFromEnvelope<SourceRecord> transform = new UnwrapFromEnvelope<>()) { final Map<String, String> props = new HashMap<>(); props.put(HANDLE_DELETES, "rewrite"); props.put(OPERATION_HEADER, "true"); transform.configure(props); final SourceRecord createRecord = createCreateRecord(); final SourceRecord unwrapped = transform.apply(createRecord); assertThat(((Struct)unwrapped.value()).getString("__deleted")).isEqualTo("false"); assertThat(unwrapped.headers()).hasSize(1); String headerValue = getSourceRecordHeaderByKey(unwrapped, transform.DEBEZIUM_OPERATION_HEADER_KEY); assertThat(headerValue).isEqualTo(Envelope.Operation.CREATE.code()); } }
Iterator<Header> operationHeader = transformed.headers().allWithName(transformation.DEBEZIUM_OPERATION_HEADER_KEY); assertThat((operationHeader).hasNext()).isTrue(); assertThat(operationHeader.next().value().toString()).isEqualTo(Envelope.Operation.DELETE.code());
Iterator<Header> operationHeader = transformed.headers().allWithName(transformation.DEBEZIUM_OPERATION_HEADER_KEY); assertThat((operationHeader).hasNext()).isTrue(); assertThat(operationHeader.next().value().toString()).isEqualTo(Envelope.Operation.DELETE.code());
Iterator<Header> operationHeader = transformed.headers().allWithName(transformation.DEBEZIUM_OPERATION_HEADER_KEY); assertThat((operationHeader).hasNext()).isTrue(); assertThat(operationHeader.next().value().toString()).isEqualTo(Envelope.Operation.UPDATE.code());
Iterator<Header> operationHeader = transformed.headers().allWithName(transformation.DEBEZIUM_OPERATION_HEADER_KEY); assertThat((operationHeader).hasNext()).isTrue(); assertThat(operationHeader.next().value().toString()).isEqualTo(Envelope.Operation.CREATE.code());
@Override public SourceRecord newRecord(String topic, Integer kafkaPartition, Schema keySchema, Object key, Schema valueSchema, Object value, Long timestamp) { return newRecord(topic, kafkaPartition, keySchema, key, valueSchema, value, timestamp, headers().duplicate()); }
SourceRecord sourceRecord = new SourceRecord(sourcePartition, sourceOffset, topicSelector.getTopic(string), Schema.STRING_SCHEMA, string); for (Map.Entry<String, List<String>> header : response.getHeaders().entrySet()) { sourceRecord.headers().add(header.getKey(), header.getValue(), SchemaBuilder.array(Schema.STRING_SCHEMA).build());
@Override public void serialize(SourceRecord record, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonProcessingException { Storage storage = new Storage(); storage.sourcePartition = record.sourcePartition(); storage.sourceOffset = record.sourceOffset(); storage.topic = record.topic(); storage.kafkaPartition = record.kafkaPartition(); storage.keySchema = record.keySchema(); storage.key = record.key(); storage.valueSchema = record.valueSchema(); storage.value = record.value(); storage.timestamp = record.timestamp(); if (null != record.headers()) { List<Header> headers = new ArrayList<>(); for (Header header : record.headers()) { headers.add(header); } storage.headers = headers; } jsonGenerator.writeObject(storage); } }
private RecordHeaders convertHeaderFor(SourceRecord record) { Headers headers = record.headers(); RecordHeaders result = new RecordHeaders(); if (headers != null) { String topic = record.topic(); for (Header header : headers) { String key = header.key(); byte[] rawHeader = headerConverter.fromConnectHeader(topic, key, header.schema(), header.value()); result.add(key, rawHeader); } } return result; }
@Test public void testPollRecordReturnedNoIncludeHeaders() throws Exception { mockConsumerInitialization(); EasyMock.expect(consumer.poll(Duration.ofMillis(POLL_LOOP_TIMEOUT_MS_VALUE))).andReturn(createTestRecords()); replayAll(); objectUnderTest.start(opts); List<SourceRecord> records = objectUnderTest.poll(); SourceRecord testRecord = records.get(0); assertEquals(String.format("%s:%d", FIRST_TOPIC, FIRST_PARTITION), testRecord.sourcePartition().get(TOPIC_PARTITION_KEY)); assertEquals(FIRST_OFFSET, testRecord.sourceOffset().get(OFFSET_KEY)); assertEquals(0, testRecord.headers().size()); verifyAll(); }
testRecord.sourcePartition().get(TOPIC_PARTITION_KEY)); assertEquals(FIRST_OFFSET, testRecord.sourceOffset().get(OFFSET_KEY)); assertEquals(1, testRecord.headers().size());