@Test public void test() throws IOException { // Test that the scoped config overrides the generic config Pusher pusher = new KafkaKeyValueProducerPusher<byte[], byte[]>("localhost:dummy", TOPIC, Optional.of(ConfigFactory.parseMap(ImmutableMap.of( ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:" + this.kafkaTestHelper.getKafkaServerPort())))); String msg1 = "msg1"; String msg2 = "msg2"; pusher.pushMessages(Lists.newArrayList(Pair.of("key1", msg1.getBytes()), Pair.of("key2", msg2.getBytes()))); try { Thread.sleep(1000); } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } ConsumerIterator<byte[], byte[]> iterator = this.kafkaTestHelper.getIteratorForTopic(TOPIC); assert(iterator.hasNext()); MessageAndMetadata<byte[], byte[]> messageAndMetadata = iterator.next(); Assert.assertEquals(new String(messageAndMetadata.key()), "key1"); Assert.assertEquals(new String(messageAndMetadata.message()), msg1); assert(iterator.hasNext()); messageAndMetadata = iterator.next(); Assert.assertEquals(new String(messageAndMetadata.key()), "key2"); Assert.assertEquals(new String(messageAndMetadata.message()), msg2); pusher.close(); }
final byte[] key = mam.key();
@Override public Message next() { MessageAndMetadata<byte[], byte[]> kafkaMessage; try { kafkaMessage = mIterator.next(); } catch (ConsumerTimeoutException e) { throw new LegacyConsumerTimeoutException(e); } long timestamp = 0L; if (mConfig.useKafkaTimestamp()) { timestamp = mKafkaMessageTimestampFactory.getKafkaMessageTimestamp().getTimestamp(kafkaMessage); } return new Message(kafkaMessage.topic(), kafkaMessage.partition(), kafkaMessage.offset(), kafkaMessage.key(), kafkaMessage.message(), timestamp); }
@Override public byte[] key() { return rec.key(); } @Override
@Override public byte[] key() { return rec.key(); } @Override
@Override public String key() { byte[] key = rec.key(); if (key == null) return null; return new String(key, StandardCharsets.UTF_8); } @Override
@Override public String key() { byte[] key = rec.key(); if (key == null) return null; return new String(key, StandardCharsets.UTF_8); } @Override
public Message(MessageAndMetadata<byte[], byte[]> message) { this.topic = message.topic(); this.key = message.key() != null ? new String(message.key(), Charset.forName("utf-8")) : null; this.message = new String(message.message(), Charset.forName("utf-8")); this.partition = message.partition(); this.offset = message.offset(); } }
@Override public KeyedMessage<String, String> extract(MessageAndMetadata<String, String> messageAndMetadata) { return new KeyedMessage(topicName, messageAndMetadata.key(), messageAndMetadata.message()); } });
@Override public KeyedMessage<String, String> extract(MessageAndMetadata<String, String> messageAndMetadata) { return new KeyedMessage(topicName, messageAndMetadata.key(), messageAndMetadata.message()); } });
public List<ProducerRecord<byte[], byte[]>> handle(MessageAndMetadata<byte[], byte[]> record) { return Collections.singletonList(new ProducerRecord<byte[], byte[]>(topicPrefix + "." + record.topic(), record.partition(), record.key(), record.message())); }
public static String keyAndMessage(MessageAndMetadata<byte[], byte[]> mamd) { return asUtf8(mamd.key()) + "=" + asUtf8(mamd.message()); }
"Topic:%s, GroupID:%s, Consumer ID:%s, PartitionID:%s, Offset:%s, Message Key:%s, Message Payload: %s", messageAndMetadata.topic(), groupid, consumerid, messageAndMetadata.partition(), messageAndMetadata.offset(), new String(messageAndMetadata.key()), new String(messageAndMetadata.message())); System.out.println(message);
MessageAndMetadata messageAndMetadata = it.next(); try { String key = (String) messageAndMetadata.key(); IndexedRecord value = (IndexedRecord) messageAndMetadata.message();
/** * Processes a {@link Iterable} by iteratively processing each message. * * @param stream the stream of messages to process. * @param topic the topic the {@code stream} belongs to. * * @see StreamProcessor#process(Iterable, String) */ public void process(final Iterable<MessageAndMetadata<K, V>> stream, final String topic) { for (final MessageAndMetadata<K, V> entry : stream) { // final Timer.Context context = processed.time(); process(entry.key(), entry.message(), topic, entry.partition(), entry.offset()); // context.stop(); } } }
final String key = messageAndMetadata.key(); final String message = messageAndMetadata.message(); if (key == null || message == null) {
public void run() { ConsumerIterator<byte[], byte[]> it = stream.iterator(); while (it.hasNext()) { try { KafkaMsg msg = KafkaMsg.createBuilder() .key(new String(it.next().key())) .val(it.next().message()) .offset(it.next().offset()) .partition(it.next().partition()) .topic(it.next().topic()) .build(); while (true) {//retry put try { queue.put(msg); break; } catch (InterruptedException e) { logger.error(e.getMessage(), e); try { Thread.sleep(SLEEPING_INTERVAL); } catch (InterruptedException ee) { logger.error(e.getMessage(), e); } } } } catch (Throwable e) { logger.error(e.getMessage(), e); } } } }
final byte[] key = mam.key();
message = (DefaultMessage) _message; } catch (ClassCastException e) { message = new DefaultMessage(messageAndMeta.key(),(Serializable) _message);
public Object call() throws KettleException { try { long limit; String strData = meta.getLimit(); limit = getLimit(strData); if (limit > 0) { step.logDebug("Collecting up to " + limit + " messages"); } else { step.logDebug("Collecting unlimited messages"); } while (data.streamIterator.hasNext() && !data.canceled && (limit <= 0 || data.processed < limit)) { MessageAndMetadata<byte[], byte[]> messageAndMetadata = data.streamIterator.next(); messageReceived(messageAndMetadata.key(), messageAndMetadata.message()); ++data.processed; } } catch (ConsumerTimeoutException cte) { step.logDebug("Received a consumer timeout after " + data.processed + " messages"); if (!meta.isStopOnEmptyTopic()) { // Because we're not set to stop on empty, this is an abnormal // timeout throw new KettleException("Unexpected consumer timeout!", cte); } } // Notify that all messages were read successfully data.consumer.commitOffsets(); step.setOutputDone(); return null; }