protected final JavaInputDStream<ConsumerRecord<K,M>> buildInputDStream( JavaStreamingContext streamingContext) { Preconditions.checkArgument( KafkaUtils.topicExists(inputTopicLockMaster, inputTopic), "Topic %s does not exist; did you create it?", inputTopic); if (updateTopic != null && updateTopicLockMaster != null) { Preconditions.checkArgument( KafkaUtils.topicExists(updateTopicLockMaster, updateTopic), "Topic %s does not exist; did you create it?", updateTopic); } String groupID = getGroupID(); Map<String,Object> kafkaParams = new HashMap<>(); kafkaParams.put("group.id", groupID); // Don't re-consume old messages from input by default kafkaParams.put("auto.offset.reset", "latest"); // Ignored by Kafka 0.10 Spark integration kafkaParams.put("bootstrap.servers", inputBroker); kafkaParams.put("key.deserializer", keyDecoderClass.getName()); kafkaParams.put("value.deserializer", messageDecoderClass.getName()); LocationStrategy locationStrategy = LocationStrategies.PreferConsistent(); ConsumerStrategy<K,M> consumerStrategy = ConsumerStrategies.Subscribe( Collections.singleton(inputTopic), kafkaParams, Collections.emptyMap()); return org.apache.spark.streaming.kafka010.KafkaUtils.createDirectStream( streamingContext, locationStrategy, consumerStrategy); }
Preconditions.checkArgument(KafkaUtils.topicExists(inputTopicLockMaster, inputTopic), "Topic %s does not exist; did you create it?", inputTopic); Preconditions.checkArgument(KafkaUtils.topicExists(updateTopicLockMaster, updateTopic), "Topic %s does not exist; did you create it?", updateTopic); inputProducer = new TopicProducerImpl<>(inputTopicBroker, inputTopic);
protected final JavaInputDStream<ConsumerRecord<K,M>> buildInputDStream( JavaStreamingContext streamingContext) { Preconditions.checkArgument( KafkaUtils.topicExists(inputTopicLockMaster, inputTopic), "Topic %s does not exist; did you create it?", inputTopic); if (updateTopic != null && updateTopicLockMaster != null) { Preconditions.checkArgument( KafkaUtils.topicExists(updateTopicLockMaster, updateTopic), "Topic %s does not exist; did you create it?", updateTopic); } String groupID = getGroupID(); Map<String,Object> kafkaParams = new HashMap<>(); kafkaParams.put("group.id", groupID); // Don't re-consume old messages from input by default kafkaParams.put("auto.offset.reset", "latest"); // Ignored by Kafka 0.10 Spark integration kafkaParams.put("bootstrap.servers", inputBroker); kafkaParams.put("key.deserializer", keyDecoderClass.getName()); kafkaParams.put("value.deserializer", messageDecoderClass.getName()); LocationStrategy locationStrategy = LocationStrategies.PreferConsistent(); ConsumerStrategy<K,M> consumerStrategy = ConsumerStrategies.Subscribe( Collections.singleton(inputTopic), kafkaParams, Collections.emptyMap()); return org.apache.spark.streaming.kafka010.KafkaUtils.createDirectStream( streamingContext, locationStrategy, consumerStrategy); }