protected List<KafkaStream<byte[], byte[]>> createStreams() { Map<String, Integer> topicCountMap = Maps.newHashMap(); topicCountMap.put(this.topic, this.numThreads); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = this.consumer.createMessageStreams(topicCountMap); return consumerMap.get(this.topic); }
public SimpleKafkaConsumer(Properties props, KafkaCheckpoint checkpoint) { Config config = ConfigFactory.parseProperties(props); topic = config.getString("topic"); String zkConnect = config.getString("zookeeper.connect"); schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props); deserializer = new LiAvroDeserializer(schemaRegistry); /** TODO: Make Confluent schema registry integration configurable * HashMap<String, String> avroSerDeConfig = new HashMap<>(); * avroSerDeConfig.put("schema.registry.url", "http://localhost:8081"); * deserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer(); * deserializer.configure(avroSerDeConfig, false); * **/ Properties consumeProps = new Properties(); consumeProps.put("zookeeper.connect", zkConnect); consumeProps.put("group.id", "gobblin-tool-" + System.nanoTime()); consumeProps.put("zookeeper.session.timeout.ms", "10000"); consumeProps.put("zookeeper.sync.time.ms", "10000"); consumeProps.put("auto.commit.interval.ms", "10000"); consumeProps.put("auto.offset.reset", "smallest"); consumeProps.put("auto.commit.enable", "false"); //consumeProps.put("consumer.timeout.ms", "10000"); consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps)); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(ImmutableMap.of(topic, 1)); List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this.topic); stream = streams.get(0); iterator = stream.iterator(); }
KafkaConsumerSuite(String zkConnectString, String topic) { _topic = topic; Properties consumeProps = new Properties(); consumeProps.put("zookeeper.connect", zkConnectString); consumeProps.put("group.id", _topic+"-"+System.nanoTime()); consumeProps.put("zookeeper.session.timeout.ms", "10000"); consumeProps.put("zookeeper.sync.time.ms", "10000"); consumeProps.put("auto.commit.interval.ms", "10000"); consumeProps.put("_consumer.timeout.ms", "10000"); _consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps)); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = _consumer.createMessageStreams(ImmutableMap.of(this._topic, 1)); List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this._topic); _stream = streams.get(0); _iterator = _stream.iterator(); }
KafkaConsumerSuite(String zkConnectString, String topic) { _topic = topic; Properties consumeProps = new Properties(); consumeProps.put("zookeeper.connect", zkConnectString); consumeProps.put("group.id", _topic+"-"+System.nanoTime()); consumeProps.put("zookeeper.session.timeout.ms", "10000"); consumeProps.put("zookeeper.sync.time.ms", "10000"); consumeProps.put("auto.commit.interval.ms", "10000"); consumeProps.put("_consumer.timeout.ms", "10000"); _consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps)); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = _consumer.createMessageStreams(ImmutableMap.of(this._topic, 1)); List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this._topic); _stream = streams.get(0); _iterator = _stream.iterator(); }
consumer.createMessageStreams(kafkaHighLevelStreamConfig.getTopicMap(1)). get(kafkaHighLevelStreamConfig.getKafkaTopicName()).get(0).iterator();
final Map<String, List<KafkaStream<byte[], byte[]>>> streams = connector.createMessageStreams( ImmutableMap.of( feed,
public KafkaTestBase(String topic) throws InterruptedException, RuntimeException { startServer(); this.topic = topic; AdminUtils.createTopic(zkClient, topic, 1, 1, new Properties()); List<KafkaServer> servers = new ArrayList<>(); servers.add(kafkaServer); TestUtils.waitUntilMetadataIsPropagated(scala.collection.JavaConversions.asScalaBuffer(servers), topic, 0, 5000); Properties consumeProps = new Properties(); consumeProps.put("zookeeper.connect", zkConnect); consumeProps.put("group.id", "testConsumer"); consumeProps.put("zookeeper.session.timeout.ms", "10000"); consumeProps.put("zookeeper.sync.time.ms", "10000"); consumeProps.put("auto.commit.interval.ms", "10000"); consumeProps.put("consumer.timeout.ms", "10000"); consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps)); Map<String, Integer> topicCountMap = new HashMap<>(); topicCountMap.put(this.topic, 1); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap); List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this.topic); stream = streams.get(0); iterator = stream.iterator(); }
final Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap); final List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);
protected List<KafkaStream<byte[], byte[]>> createStreams() { Map<String, Integer> topicCountMap = Maps.newHashMap(); topicCountMap.put(this.topic, this.numThreads); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = this.consumer.createMessageStreams(topicCountMap); return consumerMap.get(this.topic); }
protected List<KafkaStream<byte[], byte[]>> createStreams() { Map<String, Integer> topicCountMap = Maps.newHashMap(); topicCountMap.put(this.topic, this.numThreads); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = this.consumer.createMessageStreams(topicCountMap); return consumerMap.get(this.topic); }
/** * start the consumer client, start the thread pool to consume topic */ public void start() { Map<String, Integer> topicCountMap = new HashMap<>(); topicCountMap.put(topic, THREAD_COUNT); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap); List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic); executor = Executors.newFixedThreadPool(THREAD_COUNT); int threadNum = 0; for(final KafkaStream<byte[], byte[]> stream : streams) { executor.submit(new ConsumerTh(stream, threadNum)); threadNum++; } }
public void run() { Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); topicCountMap.put(topic, new Integer(1)); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap); KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0); ConsumerIterator<byte[], byte[]> it = stream.iterator(); while (it.hasNext()) System.out.println(new String(it.next().message())); }
@Override public void start() throws Exception { connector = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProps)); final Map<String, List<KafkaStream<byte[], byte[]>>> streams = connector.createMessageStreams(ImmutableMap.of(topic, 1)); final List<KafkaStream<byte[], byte[]>> streamList = streams.get(topic); if (streamList == null || streamList.size() != 1) { throw new RuntimeException(topic + " is not valid"); } stream = streamList.get(0).iterator(); startTakingTraffic(); }
private void start(String topic) { consumer = Consumer.createJavaConsumerConnector(config); /* We tell Kafka how many threads will read each topic. We have one topic and one thread */ Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); topicCountMap.put(topic,new Integer(1)); /* We will use a decoder to get Kafka to convert messages to Strings * valid property will be deserializer.encoding with the charset to use. * default is UTF8 which works for us */ StringDecoder decoder = new StringDecoder(new VerifiableProperties()); /* Kafka will give us a list of streams of messages for each topic. In this case, its just one topic with a list of a single stream */ stream = consumer.createMessageStreams(topicCountMap, decoder, decoder).get(topic).get(0); }
/** * {@inheritDoc} */ @Override public void initialize() throws StreamingException { ConsumerConfig consumerConfig = new ConsumerConfig(kafkaProperties); consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig); Map<String, Integer> topicCountMap = Maps.newHashMap(); topicCountMap.put(topic, TOPIC_COUNT); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumerConnector.createMessageStreams(topicCountMap); KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0); consumerIterator = stream.iterator(); }
public void start() { Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); topicCountMap.put(topics, 1); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer .createMessageStreams(topicCountMap); KafkaStream<byte[], byte[]> stream = consumerMap.get(topics).get(0); ConsumerIterator<byte[], byte[]> it = stream.iterator(); while (it.hasNext()) System.out.println(it.next().message()); }
public void start() { LOG.debug("Starting consumer for {}", session.getId()); this.connector = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig); Map<String, Integer> topicCountMap = new HashMap<>(); for (String topic : topics) { topicCountMap.put(topic, 1); } Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = connector.createMessageStreams(topicCountMap); for (String topic : topics) { LOG.debug("Adding stream for session {}, topic {}",session.getId(), topic); final List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic); for (KafkaStream<byte[], byte[]> stream : streams) { executorService.submit(new KafkaConsumerTask(stream, remoteEndpoint, transform, session)); } } }
@Override public void run() { try { logger.info("[KafkaConsumer][{}][run] ", groupId + ":" + topic); Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); topicCountMap.put(topic, new Integer(1)); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap); KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0); ConsumerIterator<byte[], byte[]> it = stream.iterator(); while (it.hasNext()) { receive(it.next().message()); } } catch (Exception e) { logger.error("[KafkaConsumer][{}][run] " + e.getMessage(), groupId + ":" + topic, e); } }
public KafkaConsumer(final RiverConfig riverConfig) { consumerConnector = kafka.consumer.Consumer.createJavaConsumerConnector(createConsumerConfig(riverConfig)); final Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); topicCountMap.put(riverConfig.getTopic(), AMOUNT_OF_THREADS_PER_CONSUMER); final Map<String, List<KafkaStream<byte[], byte[]>>> consumerStreams = consumerConnector.createMessageStreams(topicCountMap); streams = consumerStreams.get(riverConfig.getTopic()); logger.debug("Index: {}: Started kafka consumer for topic: {} with {} partitions in it.", riverConfig.getIndexName(), riverConfig.getTopic(), streams.size()); }
public void start() { ConsumerConnector consumer = createJavaConsumerConnector(getConsumerConfig()); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(getTopicCountMap()); for(Map.Entry<String, List<KafkaStream<byte[], byte[]>>> entry : consumerMap.entrySet()) { List<KafkaStream<byte[], byte[]>> streams = entry.getValue(); int threadNumber = 0; for (final KafkaStream<byte[], byte[]> stream : streams) { getListenerExecService().submit( new ConsumerListenTask(stream, threadNumber, getHandlerTaskFactory(entry.getKey()), getHandlerExecService())); threadNumber++; } } } }