@Override protected GenericRecord decodeRecord(ByteArrayBasedKafkaRecord messageAndOffset) throws IOException { byte[] payload = messageAndOffset.getMessageBytes(); Schema recordSchema = getRecordSchema(payload); Decoder decoder = getDecoder(payload); this.reader.get().setSchema(recordSchema); try { GenericRecord record = this.reader.get().read(null, decoder); record = convertRecord(record); return record; } catch (IOException e) { log.error(String.format("Error during decoding record for partition %s: ", this.getCurrentPartition())); throw e; } }
public KafkaAvroExtractor(WorkUnitState state) { super(state); this.schemaRegistry = state.contains(KafkaSchemaRegistry.KAFKA_SCHEMA_REGISTRY_CLASS) ? Optional.of(KafkaSchemaRegistry.<K, Schema> get(state.getProperties())) : Optional.<KafkaSchemaRegistry<K, Schema>> absent(); this.schema = getExtractorSchema(); if (this.schema.isPresent()) { this.reader = Optional.of(new GenericDatumReader<Record>(this.schema.get())); } else { log.error(String.format("Cannot find latest schema for topic %s. This topic will be skipped", this.topicName)); this.reader = Optional.absent(); } }
/** * Get the schema to be used by this extractor. All extracted records that have different schemas * will be converted to this schema. */ protected Optional<Schema> getExtractorSchema() { return Optional.fromNullable(getLatestSchemaByTopic(this.topicName)); }
@Override protected GenericRecord decodeRecord(ByteArrayBasedKafkaRecord messageAndOffset) throws IOException { byte[] payload = messageAndOffset.getMessageBytes(); Schema recordSchema = getRecordSchema(payload); Decoder decoder = getDecoder(payload); this.reader.get().setSchema(recordSchema); try { GenericRecord record = this.reader.get().read(null, decoder); record = convertRecord(record); return record; } catch (IOException e) { log.error(String.format("Error during decoding record for partition %s: ", this.getCurrentPartition())); throw e; } }
/** * Get the schema to be used by this extractor. All extracted records that have different schemas * will be converted to this schema. */ protected Optional<Schema> getExtractorSchema() { return Optional.fromNullable(getLatestSchemaByTopic(this.topicName)); }
public KafkaAvroExtractor(WorkUnitState state) { super(state); this.schemaRegistry = state.contains(KafkaSchemaRegistry.KAFKA_SCHEMA_REGISTRY_CLASS) ? Optional.of(KafkaSchemaRegistry.<K, Schema> get(state.getProperties())) : Optional.<KafkaSchemaRegistry<K, Schema>> absent(); this.schema = getExtractorSchema(); if (this.schema.isPresent()) { this.reader = Optional.of(new GenericDatumReader<Record>(this.schema.get())); } else { log.error(String.format("Cannot find latest schema for topic %s. This topic will be skipped", this.topicName)); this.reader = Optional.absent(); } }