public GenericRecord decodeMessage(String topic, byte[] message) { GenericRecord record = (GenericRecord) deserializer.deserialize(topic, message); Schema schema = record.getSchema(); schemas.put(topic, schema); return record; }
@Override public Optional<Formatter> maybeGetFormatter( final String topicName, final ConsumerRecord<String, Bytes> record, final KafkaAvroDeserializer avroDeserializer, final DateFormat dateFormat) { try { avroDeserializer.deserialize(topicName, record.value().get()); return Optional.of(createFormatter(topicName, avroDeserializer, dateFormat)); } catch (final Throwable t) { return Optional.empty(); } }
final KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow); Assert.assertNotNull(genericRecord); assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo
final KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow); Assert.assertNotNull(genericRecord); assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo
final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes);
@Test public void shouldTransformSourceNameDelimiterForInternal() { final Schema ksqlRecordSchema = SchemaBuilder.struct() .field("source.field0", Schema.OPTIONAL_INT32_SCHEMA) .build(); final GenericRow ksqlRecord = new GenericRow(ImmutableList.of(123)); final Serde<GenericRow> serde = new KsqlAvroTopicSerDe(KsqlConstants.DEFAULT_AVRO_SCHEMA_FULL_NAME).getGenericRowSerde( ksqlRecordSchema, new KsqlConfig(Collections.emptyMap()), true, () -> schemaRegistryClient, "loggerName" ); final byte[] bytes = serde.serializer().serialize("topic", ksqlRecord); final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes); assertThat(avroRecord.getSchema().getFields().size(), equalTo(1)); assertThat(avroRecord.getSchema().getFields().get(0).name(), equalTo("source_field0")); assertThat(avroRecord.get("source_field0"), equalTo(123)); final GenericRow deserializedKsqlRecord = serde.deserializer().deserialize("topic", bytes); assertThat(deserializedKsqlRecord, equalTo(ksqlRecord)); }
@Test public void shouldRemoveSourceName() { final Schema ksqlRecordSchema = SchemaBuilder.struct() .field("source.field0", Schema.OPTIONAL_INT32_SCHEMA) .build(); final GenericRow ksqlRecord = new GenericRow(ImmutableList.of(123)); final Serde<GenericRow> serde = new KsqlAvroTopicSerDe(KsqlConstants.DEFAULT_AVRO_SCHEMA_FULL_NAME).getGenericRowSerde( ksqlRecordSchema, new KsqlConfig(Collections.emptyMap()), false, () -> schemaRegistryClient, "loggerName" ); final byte[] bytes = serde.serializer().serialize("topic", ksqlRecord); final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes); assertThat(avroRecord.getSchema().getFields().size(), equalTo(1)); assertThat(avroRecord.get("field0"), equalTo(123)); final GenericRow deserializedKsqlRecord = serde.deserializer().deserialize("topic", bytes); assertThat(deserializedKsqlRecord, equalTo(ksqlRecord)); }
final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes);
@SuppressWarnings("unchecked") @Override public T deserialize(String s, byte[] bytes) { return (T) inner.deserialize(s, bytes); }
@Override public GenericRecord deserialize(String s, byte[] bytes) { return (GenericRecord) inner.deserialize(s, bytes); }
@Override public String deserializeMessage(ByteBuffer buffer) { KafkaAvroDeserializer deserializer = getDeserializer(); // Convert byte buffer to byte array byte[] bytes = ByteUtils.convertToByteArray(buffer); return formatJsonMessage(deserializer.deserialize(topicName, bytes).toString()); }
@Test public void testConfluentSerDes() throws Exception { org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA); GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build(); Map<String, Object> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, rootTarget.getUri().toString()); KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer(); kafkaAvroSerializer.configure(config, false); byte[] bytes = kafkaAvroSerializer.serialize("topic", record); KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(); kafkaAvroDeserializer.configure(config, false); GenericRecord result = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes); LOG.info(result.toString()); }
confluentKafkaAvroDeserializer.configure(confluentConfig, false); GenericRecord confluentResult = (GenericRecord) confluentKafkaAvroDeserializer.deserialize("topic", bytes); LOG.info(confluentResult.toString());
confluentKafkaAvroDeserializer.configure(confluentConfig, false); GenericRecord confluentResult = (GenericRecord) confluentKafkaAvroDeserializer.deserialize("topic", bytes); LOG.info(confluentResult.toString());