protected void init(SecorConfig config) { deserializer = new KafkaAvroDeserializer(schemaRegistryClient); }
public RecordFormatter(final SchemaRegistryClient schemaRegistryClient, final String topicName) { this.topicName = Objects.requireNonNull(topicName, "topicName"); this.avroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); }
private void initKafka() { schemaRegistryClient = new MockSchemaRegistryClient(); kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); Properties defaultConfig = new Properties(); defaultConfig.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "bogus"); avroSerializer = new KafkaAvroSerializer(schemaRegistryClient); }
@Test public void testConfluentShouldNotQuerySchemaRegistryWhenTheGapIsZero() throws IOException, RestClientException, SchemaRegistryException { WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L, 0L); mockWorkUnitState.setProp("schema.registry.url", TEST_URL); SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class); Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient); KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class); KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry); verify(mockKafkaSchemaRegistry, never()).getLatestSchemaByTopic(any()); kafkaDecoderExtractor.getSchema(); }
final KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
final KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes);
@Test public void shouldTransformSourceNameDelimiterForInternal() { final Schema ksqlRecordSchema = SchemaBuilder.struct() .field("source.field0", Schema.OPTIONAL_INT32_SCHEMA) .build(); final GenericRow ksqlRecord = new GenericRow(ImmutableList.of(123)); final Serde<GenericRow> serde = new KsqlAvroTopicSerDe(KsqlConstants.DEFAULT_AVRO_SCHEMA_FULL_NAME).getGenericRowSerde( ksqlRecordSchema, new KsqlConfig(Collections.emptyMap()), true, () -> schemaRegistryClient, "loggerName" ); final byte[] bytes = serde.serializer().serialize("topic", ksqlRecord); final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes); assertThat(avroRecord.getSchema().getFields().size(), equalTo(1)); assertThat(avroRecord.getSchema().getFields().get(0).name(), equalTo("source_field0")); assertThat(avroRecord.get("source_field0"), equalTo(123)); final GenericRow deserializedKsqlRecord = serde.deserializer().deserialize("topic", bytes); assertThat(deserializedKsqlRecord, equalTo(ksqlRecord)); }
@Test public void shouldRemoveSourceName() { final Schema ksqlRecordSchema = SchemaBuilder.struct() .field("source.field0", Schema.OPTIONAL_INT32_SCHEMA) .build(); final GenericRow ksqlRecord = new GenericRow(ImmutableList.of(123)); final Serde<GenericRow> serde = new KsqlAvroTopicSerDe(KsqlConstants.DEFAULT_AVRO_SCHEMA_FULL_NAME).getGenericRowSerde( ksqlRecordSchema, new KsqlConfig(Collections.emptyMap()), false, () -> schemaRegistryClient, "loggerName" ); final byte[] bytes = serde.serializer().serialize("topic", ksqlRecord); final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes); assertThat(avroRecord.getSchema().getFields().size(), equalTo(1)); assertThat(avroRecord.get("field0"), equalTo(123)); final GenericRow deserializedKsqlRecord = serde.deserializer().deserialize("topic", bytes); assertThat(deserializedKsqlRecord, equalTo(ksqlRecord)); }
@Test public void testConfluentAvroDeserializer() throws IOException, RestClientException { WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L,10L); mockWorkUnitState.setProp("schema.registry.url", TEST_URL); Schema schema = SchemaBuilder.record(TEST_RECORD_NAME) .namespace(TEST_NAMESPACE).fields() .name(TEST_FIELD_NAME).type().stringType().noDefault() .endRecord(); GenericRecord testGenericRecord = new GenericRecordBuilder(schema).set(TEST_FIELD_NAME, "testValue").build(); SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class); when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schema); Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient); Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient); ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord)); KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class); KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry); ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer); Assert.assertEquals(kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset), testGenericRecord); }
@Test public void testConfluentAvroDeserializerForSchemaEvolution() throws IOException, RestClientException, SchemaRegistryException { WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L, 10L); mockWorkUnitState.setProp("schema.registry.url", TEST_URL); Schema schemaV1 = SchemaBuilder.record(TEST_RECORD_NAME) .namespace(TEST_NAMESPACE).fields() .name(TEST_FIELD_NAME).type().stringType().noDefault() .endRecord(); Schema schemaV2 = SchemaBuilder.record(TEST_RECORD_NAME) .namespace(TEST_NAMESPACE).fields() .name(TEST_FIELD_NAME).type().stringType().noDefault() .optionalString(TEST_FIELD_NAME2).endRecord(); GenericRecord testGenericRecord = new GenericRecordBuilder(schemaV1).set(TEST_FIELD_NAME, "testValue").build(); SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class); when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schemaV1); Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient); Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient); ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord)); KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class); when(mockKafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)).thenReturn(schemaV2); KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry); when(kafkaDecoderExtractor.getSchema()).thenReturn(schemaV2); ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer); GenericRecord received = (GenericRecord) kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset); Assert.assertEquals(received.toString(), "{\"testField\": \"testValue\", \"testField2\": null}"); }
final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes);
/** * Constructor used by Kafka Streams. */ public GenericAvroDeserializer() { inner = new KafkaAvroDeserializer(); }
/** * Constructor used by Kafka Streams. */ public SpecificAvroDeserializer() { inner = new KafkaAvroDeserializer(); }
public GenericAvroDeserializer(SchemaRegistryClient client) { inner = new KafkaAvroDeserializer(client); }
public SpecificAvroDeserializer(SchemaRegistryClient client) { inner = new KafkaAvroDeserializer(client); }
public SpecificAvroDeserializer(SchemaRegistryClient client, Map<String, ?> props) { inner = new KafkaAvroDeserializer(client, props); }
public GenericAvroDeserializer(SchemaRegistryClient client, Map<String, ?> props) { inner = new KafkaAvroDeserializer(client, props); }
private KafkaAvroDeserializer getDeserializer() { Map<String, Object> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl); KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(); kafkaAvroDeserializer.configure(config, false); return kafkaAvroDeserializer; }
@Test public void testConfluentSerDes() throws Exception { org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA); GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build(); Map<String, Object> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, rootTarget.getUri().toString()); KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer(); kafkaAvroSerializer.configure(config, false); byte[] bytes = kafkaAvroSerializer.serialize("topic", record); KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(); kafkaAvroDeserializer.configure(config, false); GenericRecord result = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes); LOG.info(result.toString()); }