private ConsumerRecord<String, byte[]> testRecordWithOffset(long offset) { Envelope.Builder envelope = Envelope.newBuilder(); envelope.setMessageId("cruft"); return new ConsumerRecord<String, byte[]>(TOPIC, PARTITION, offset, KEY, envelope.build().toByteArray()); }
static Message<? extends com.google.protobuf.Message> fromKafka(com.google.protobuf.Message protoMessage, Envelope envelope, ConsumerRecord<String, byte[]> record) { boolean wasReceived = true; Topic topic = new Topic(record.topic()); String partitioningKey = record.key(); int partitionId = record.partition(); long offset = record.offset(); String messageId = envelope.getMessageId(); String correlationId = envelope.getCorrelationId(); MessageType type = MessageType.of(protoMessage); String requestCorrelationId = envelope.getRequestCorrelationId(); Topic replyTo = new Topic(envelope.getReplyTo()); Metadata meta = new Metadata(wasReceived, topic, partitioningKey, partitionId, offset, messageId, correlationId, requestCorrelationId, replyTo, type); return new Message<>(protoMessage, meta); }
private Message<? extends com.google.protobuf.Message> parseMessage() { Envelope envelope = null; try { envelope = Envelope.parseFrom(record.value()); } catch (InvalidProtocolBufferException parseError) { markAsConsumed(record.offset()); parsingFailed(envelope, parseError); return null; } try { MessageType type = new MessageType(envelope.getMessageType()); Parser<com.google.protobuf.Message> parser = typeDictionary.parserFor(type); if (parser == null) { throw new UnknownMessageTypeException(type); } com.google.protobuf.Message innerMessage = parser.parseFrom(envelope.getInnerMessage()); return Messages.fromKafka(innerMessage, envelope, record); } catch (InvalidProtocolBufferException | UnknownMessageTypeException unrecoverableParsingError) { markAsConsumed(record.offset()); parsingFailed(envelope, unrecoverableParsingError); return null; } }
private Marker logMarkerFromRecordAndEnvelope(Envelope envelope) { LogstashMarker logMarker = append("topic", record.topic()) .and(append("partitionId", record.partition())) .and(append("distributionKey", record.key())) .and(append("offset", record.offset())); if (envelope != null) { logMarker .and(append("messageId", envelope.getMessageId())) .and(append("correlationId", envelope.getCorrelationId())) .and(append("messageType", envelope.getMessageType())); } return logMarker; } }
Envelope envelope = Messages.toKafka(message); ProducerRecord<String, byte[]> record = new ProducerRecord<>(destinationTopic, partitioningKey, envelope.toByteArray());
static Envelope toKafka(Message message) { Envelope.Builder envelope = Envelope.newBuilder(); Metadata meta = message.getMetadata(); envelope.setMessageId(meta.getMessageId()); // Correlation ids are set when building the message if (!Strings.isNullOrEmpty(meta.getCorrelationId())) { envelope.setCorrelationId(meta.getCorrelationId()); } // Message exchange pattern headers if (meta.getReplyTo() != null) { envelope.setReplyTo(meta.getReplyTo().toString()); } if (!Strings.isNullOrEmpty(meta.getRequestCorrelationId())) { envelope.setRequestCorrelationId(meta.getRequestCorrelationId()); } // Payload (mandatory fields!) envelope.setMessageType(meta.getType().toString()); envelope.setInnerMessage(message.getPayload().toByteString()); // Serialize the proto payload to bytes return envelope.build(); } }
private void parsingFailed(Envelope envelope, Exception parseException) { String messageType = "NoValidEnvelope"; String topic = record.topic(); String warnMsg; if (envelope != null) { messageType = envelope.getMessageType(); warnMsg = "Cannot parse inner payload message."; } else { warnMsg = "Cannot parse Envelope from raw record."; } logger.warn(logMarkerFromRecordAndEnvelope(envelope), warnMsg, parseException); logger.debug(logMarkerFromRecordAndEnvelope(envelope), "Message {} with offset {} in {}-{} marked as consumed.", messageType, record.offset(), topic, record.partition()); if (metricsBuilderFactory != null) { GoCounter parsingFailureCounter = metricsBuilderFactory.newMetric("messaging_consumer_parse_failures") .withTag("messageType", messageType) .withTag("topic", topic) .buildCounter(); parsingFailureCounter.incFailure(); } }
private ConsumerRecord<String, byte[]> simulateKafkaInTheLoop(Message message, long offset) { Envelope envelope = Messages.toKafka(message); return new ConsumerRecord<String, byte[]>(message.getMetadata().getTopic().toString(), PARTITION, offset, message.getMetadata().getPartitioningKey(), envelope.toByteArray()); }