@Override public Message next() { MessageAndMetadata<byte[], byte[]> kafkaMessage; try { kafkaMessage = mIterator.next(); } catch (ConsumerTimeoutException e) { throw new LegacyConsumerTimeoutException(e); } long timestamp = 0L; if (mConfig.useKafkaTimestamp()) { timestamp = mKafkaMessageTimestampFactory.getKafkaMessageTimestamp().getTimestamp(kafkaMessage); } return new Message(kafkaMessage.topic(), kafkaMessage.partition(), kafkaMessage.offset(), kafkaMessage.key(), kafkaMessage.message(), timestamp); }
private void processAuditMsg(final MessageAndMetadata mm) throws Exception { JSONObject record = JSON.parseObject(StringUtils.toEncodedString((byte[]) mm.message(), Charset.forName("UTF-8"))); String topicName = record.getString(AuditMsgField.TOPICNAME.getName()); if (blacklistedTopics.contains(topicName)) { logger.debug("Topic={} is blacklisted", topicName); return; } if (deduplicator != null) { String uuid = record.getString(AuditMsgField.UUID.getName()); String host = record.getString(AuditMsgField.HOSTNAME.getName()); if (deduplicator.isDuplicated(topicName, mm.partition(), mm.offset(), host, uuid)) { return; } } if (enablePersistentStore) { auditReporter.submit(mm.topic(), mm.partition(), mm.offset(), record); } }
@Override public String topic() { return rec.topic(); }; @Override
@Override public String topic() { return rec.topic(); }; @Override
public List<ProducerRecord<byte[], byte[]>> handle(MessageAndMetadata<byte[], byte[]> record) { return Collections.singletonList(new ProducerRecord<byte[], byte[]>(topicPrefix + "." + record.topic(), record.partition(), record.key(), record.message())); }
public Message(MessageAndMetadata<byte[], byte[]> message) { this.topic = message.topic(); this.key = message.key() != null ? new String(message.key(), Charset.forName("utf-8")) : null; this.message = new String(message.message(), Charset.forName("utf-8")); this.partition = message.partition(); this.offset = message.offset(); } }
@Override public void run() { ConsumerIterator<byte[], byte[]> it = _stream.iterator(); while (it.hasNext()) { Thread.yield(); if (Thread.currentThread().isInterrupted()) { _logger.info("Interrupted... Will exit now."); break; } MessageAndMetadata<byte[], byte[]> m = it.next(); try { String message = new String(m.message()); String topic = m.topic(); if (message != null) { _topics.get(topic).getMessages().put(message); long c = count.incrementAndGet(); if (c % 50000 == 0) { _logger.debug("Read {} messages.", count.get()); } if (_topics.get(topic).getMessages().size() % 1000 == 0) { _logger.debug("Message queued. Queue size = {}", _topics.get(topic).getMessages().size()); } } } catch (InterruptedException ie) { _logger.debug("Interrupted while consuming message."); Thread.currentThread().interrupt(); } } } }
@Override public void run() { ConsumerIterator<byte[], byte[]> it = _stream.iterator(); while (it.hasNext()) { Thread.yield(); if (Thread.currentThread().isInterrupted()) { _logger.info("Interrupted... Will exit now."); break; } MessageAndMetadata<byte[], byte[]> m = it.next(); try { String message = new String(m.message()); String topic = m.topic(); if (message != null) { _topics.get(topic).getMessages().put(message); long c = count.incrementAndGet(); if (c % 50000 == 0) { _logger.debug("Read {} messages.", count.get()); } if (_topics.get(topic).getMessages().size() % 1000 == 0) { _logger.debug("Message queued. Queue size = {}", _topics.get(topic).getMessages().size()); } } } catch (InterruptedException ie) { _logger.debug("Interrupted while consuming message."); Thread.currentThread().interrupt(); } } } }
@Override @SuppressWarnings("unchecked") public void run() { String subprotocol = session.getNegotiatedSubprotocol(); for (MessageAndMetadata<byte[], byte[]> messageAndMetadata : (Iterable<MessageAndMetadata<byte[], byte[]>>) stream) { String topic = messageAndMetadata.topic(); byte[] message = messageAndMetadata.message(); switch(subprotocol) { case "kafka-binary": sendBinary(topic, message); break; default: sendText(topic, message); break; } if (Thread.currentThread().isInterrupted()) { try { session.close(); } catch (IOException e) { LOG.error("Error terminating session: {}", e.getMessage()); } return; } } }
String message = String.format( "Topic:%s, GroupID:%s, Consumer ID:%s, PartitionID:%s, Offset:%s, Message Key:%s, Message Payload: %s", messageAndMetadata.topic(), groupid, consumerid, messageAndMetadata.partition(), messageAndMetadata.offset(), new String(messageAndMetadata.key()), new String(messageAndMetadata.message()));
void accept(MessageAndMetadata<byte[],byte[]> rec) { try { trace.trace("{} received rec for topic:{} partition:{} offset:{}", id(), rec.topic(), rec.partition(), rec.offset()); T tuple; if (stringToTupleFn != null) tuple = stringToTupleFn.apply(new StringConsumerRecord(rec)); else tuple = byteToTupleFn.apply(new ByteConsumerRecord(rec)); eventSubmitter.accept(tuple); } catch (Exception e) { String tp = String.format("[%s,%d]", rec.topic(), rec.partition()); trace.error("{} failure processing record from {}", id(), tp, e); } }
@Override public void run() { try { long start = logger.isDebugEnabled() ? System.currentTimeMillis() : 0; messageHandler.p2Process(message); if(logger.isDebugEnabled()){ long useTime = System.currentTimeMillis() - start; if(useTime > 1000)logger.debug("received_topic_useTime [{}]process topic:{} use time {} ms",processorName,topicName,useTime); } //回执 if(message.isConsumerAckRequired()){ consumerContext.sendConsumerAck(message.getMsgId()); } consumerContext.saveOffsetsAfterProcessed(messageAndMeta.topic(), messageAndMeta.partition(), messageAndMeta.offset()); } catch (Exception e) { boolean processed = messageHandler.onProcessError(message); if(processed == false){ consumerContext.processErrorMessage(topicName, message); } logger.error("received_topic_process_error ["+processorName+"]processMessage error,topic:"+topicName,e); } consumerContext.updateConsumerStats(messageAndMeta.topic(),-1); } });
void accept(MessageAndMetadata<byte[],byte[]> rec) { try { trace.trace("{} received rec for topic:{} partition:{} offset:{}", id(), rec.topic(), rec.partition(), rec.offset()); T tuple; if (stringToTupleFn != null) tuple = stringToTupleFn.apply(new StringConsumerRecord(rec)); else tuple = byteToTupleFn.apply(new ByteConsumerRecord(rec)); eventSubmitter.accept(tuple); } catch (Exception e) { String tp = String.format("[%s,%d]", rec.topic(), rec.partition()); trace.error("{} failure processing record from {}", id(), tp, e); } }
message = new DefaultMessage(messageAndMeta.key(),(Serializable) _message); message.setTopicMetadata(messageAndMeta.topic(), messageAndMeta.partition(), messageAndMeta.offset()); consumerContext.updateConsumerStats(messageAndMeta.topic(),1); consumerContext.saveOffsetsBeforeProcessed(messageAndMeta.topic(), messageAndMeta.partition(), messageAndMeta.offset());
public void run() { ConsumerIterator<byte[], byte[]> it = stream.iterator(); while (it.hasNext()) { try { KafkaMsg msg = KafkaMsg.createBuilder() .key(new String(it.next().key())) .val(it.next().message()) .offset(it.next().offset()) .partition(it.next().partition()) .topic(it.next().topic()) .build(); while (true) {//retry put try { queue.put(msg); break; } catch (InterruptedException e) { logger.error(e.getMessage(), e); try { Thread.sleep(SLEEPING_INTERVAL); } catch (InterruptedException ee) { logger.error(e.getMessage(), e); } } } } catch (Throwable e) { logger.error(e.getMessage(), e); } } } }
@Override public void run() { ConsumerConfig cfg = new ConsumerConfig(props); consumer = Consumer.createJavaConsumerConnector(cfg); TopicFilter arg0 = new Whitelist(topic); List<KafkaStream<byte[], byte[]>> partitions = consumer.createMessageStreamsByFilter(arg0); while (!Thread.interrupted()) { for (KafkaStream<byte[], byte[]> partition : partitions) { ConsumerIterator<byte[], byte[]> it = partition .iterator(); while (it.hasNext()) { MessageAndMetadata<byte[], byte[]> msg = it.next(); onMessage(msg.topic(), new String(msg.message())); } } } } }, "consumer-" + topic);