public ConsumerConfig getKafkaConsumerConfig() { Properties props = new Properties(); for (String key : defaultProps.keySet()) { props.put(key, defaultProps.get(key)); } for (String key : _kafkaConsumerProperties.keySet()) { props.put(key, _kafkaConsumerProperties.get(key)); } props.put("group.id", _groupId); props.put("zookeeper.connect", _zkBrokerUrl); return new ConsumerConfig(props); }
protected ConsumerConfig createConsumerConfig(Config config) { Properties props = ConfigUtils.configToProperties(config); if (!props.containsKey(GROUP_ID_KEY)) { props.setProperty(GROUP_ID_KEY, DEFAULT_GROUP_ID); } return new ConsumerConfig(props); }
public SimpleKafkaConsumer(Properties props, KafkaCheckpoint checkpoint) { Config config = ConfigFactory.parseProperties(props); topic = config.getString("topic"); String zkConnect = config.getString("zookeeper.connect"); schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props); deserializer = new LiAvroDeserializer(schemaRegistry); /** TODO: Make Confluent schema registry integration configurable * HashMap<String, String> avroSerDeConfig = new HashMap<>(); * avroSerDeConfig.put("schema.registry.url", "http://localhost:8081"); * deserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer(); * deserializer.configure(avroSerDeConfig, false); * **/ Properties consumeProps = new Properties(); consumeProps.put("zookeeper.connect", zkConnect); consumeProps.put("group.id", "gobblin-tool-" + System.nanoTime()); consumeProps.put("zookeeper.session.timeout.ms", "10000"); consumeProps.put("zookeeper.sync.time.ms", "10000"); consumeProps.put("auto.commit.interval.ms", "10000"); consumeProps.put("auto.offset.reset", "smallest"); consumeProps.put("auto.commit.enable", "false"); //consumeProps.put("consumer.timeout.ms", "10000"); consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps)); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(ImmutableMap.of(topic, 1)); List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this.topic); stream = streams.get(0); iterator = stream.iterator(); }
KafkaConsumerSuite(String zkConnectString, String topic) { _topic = topic; Properties consumeProps = new Properties(); consumeProps.put("zookeeper.connect", zkConnectString); consumeProps.put("group.id", _topic+"-"+System.nanoTime()); consumeProps.put("zookeeper.session.timeout.ms", "10000"); consumeProps.put("zookeeper.sync.time.ms", "10000"); consumeProps.put("auto.commit.interval.ms", "10000"); consumeProps.put("_consumer.timeout.ms", "10000"); _consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps)); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = _consumer.createMessageStreams(ImmutableMap.of(this._topic, 1)); List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this._topic); _stream = streams.get(0); _iterator = _stream.iterator(); }
final ConsumerConfig consumerConfig = new ConsumerConfig(props); cc = Consumer.createJavaConsumerConnector(consumerConfig);
KafkaConsumerSuite(String zkConnectString, String topic) { _topic = topic; Properties consumeProps = new Properties(); consumeProps.put("zookeeper.connect", zkConnectString); consumeProps.put("group.id", _topic+"-"+System.nanoTime()); consumeProps.put("zookeeper.session.timeout.ms", "10000"); consumeProps.put("zookeeper.sync.time.ms", "10000"); consumeProps.put("auto.commit.interval.ms", "10000"); consumeProps.put("_consumer.timeout.ms", "10000"); _consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps)); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = _consumer.createMessageStreams(ImmutableMap.of(this._topic, 1)); List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this._topic); _stream = streams.get(0); _iterator = _stream.iterator(); }
return new ConsumerConfig(props);
final ConsumerConnector connector = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProps));
public KafkaTestBase(String topic) throws InterruptedException, RuntimeException { startServer(); this.topic = topic; AdminUtils.createTopic(zkClient, topic, 1, 1, new Properties()); List<KafkaServer> servers = new ArrayList<>(); servers.add(kafkaServer); TestUtils.waitUntilMetadataIsPropagated(scala.collection.JavaConversions.asScalaBuffer(servers), topic, 0, 5000); Properties consumeProps = new Properties(); consumeProps.put("zookeeper.connect", zkConnect); consumeProps.put("group.id", "testConsumer"); consumeProps.put("zookeeper.session.timeout.ms", "10000"); consumeProps.put("zookeeper.sync.time.ms", "10000"); consumeProps.put("auto.commit.interval.ms", "10000"); consumeProps.put("consumer.timeout.ms", "10000"); consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps)); Map<String, Integer> topicCountMap = new HashMap<>(); topicCountMap.put(this.topic, 1); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap); List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this.topic); stream = streams.get(0); iterator = stream.iterator(); }
context.getProperty(ZOOKEEPER_CONNECTION_STRING).evaluateAttributeExpressions().getValue(), context.getProperty(TOPIC).evaluateAttributeExpressions().getValue()); final ConsumerConfig consumerConfig = new ConsumerConfig(props); consumer = Consumer.createJavaConsumerConnector(consumerConfig);
/** * Create a consumer configuration with all the configured static & runtime * properties. * * @return ConsumerConfig object. */ private ConsumerConfig createConsumerConfig() { Properties props = new Properties(); props.put("zookeeper.connect", AuditConfig.INGESTER_ZK_CONNECT); props.put("group.id", kafkaIngester.getConsumerGroupId()); props.put("zookeeper.session.timeout.ms", AuditConfig.INGESTER_ZK_SESSION_TIMEOUT_MS); props.put("zookeeper.sync.time.ms", AuditConfig.INGESTER_ZK_SYNC_TIME_MS); props.put("auto.offset.reset", AuditConfig.INGESTER_START_FROM_TAIL ? "largest" : AuditConfig.INGESTER_AUTO_OFFSET_RESET); // Offsets are managed by IAuditReporter that puts offsets in DB along with audit msg props.put("auto.commit.enable", AuditConfig.INGESTER_AUTO_COMMIT_ENABLE); props.put("auto.commit.interval.ms", AuditConfig.INGESTER_AUTO_COMMIT_INTERVAL_MS); return new ConsumerConfig(props); } }
private void configure(String zkUrl, String groupId) { kafkaProps.put("zookeeper.connect", zkUrl); kafkaProps.put("group.id",groupId); kafkaProps.put("auto.commit.interval.ms","1000"); kafkaProps.put("auto.offset.reset","largest"); // un-comment this if you want to commit offsets manually //kafkaProps.put("auto.commit.enable","false"); // un-comment this if you don't want to wait for data indefinitely kafkaProps.put("consumer.timeout.ms",waitTime); config = new ConsumerConfig(kafkaProps); }
public static ConsumerConnector getConsumer(Properties kafkaProps) { ConsumerConfig consumerConfig = new ConsumerConfig(kafkaProps); ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig); return consumer; }
public LogConsumer(String servers, String topics, String group) { this.topics = topics; Properties props = new Properties(); props.put("group.id", group); props.put("zookeeper.connect", servers); props.put("zookeeper.sync.time.ms", "200"); props.put("auto.commit.interval.ms", "1000"); props.put("zookeeper.session.timeout.ms", "400"); props.put("zookeeper.session.timeout.ms", "400"); consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig( props)); }
public ConsumerConfig createConsumerConfig(Consumer<Properties> f) { Properties props = defaultConsumerProperties(); if (f != null) f.accept(props); return new ConsumerConfig(props); }
protected ConsumerConfig createConsumerConfig(Config config) { Properties props = ConfigUtils.configToProperties(config); if (!props.containsKey(GROUP_ID_KEY)) { props.setProperty(GROUP_ID_KEY, DEFAULT_GROUP_ID); } return new ConsumerConfig(props); }
@SuppressWarnings("unchecked") public OldApiTopicConsumer(ConsumerContext context) { super(context); try { Class<?> deserializerClass = Class.forName(context.getProperties().getProperty("value.deserializer")); deserializer = (Deserializer<Object>) deserializerClass.newInstance(); } catch (Exception e) {} this.connector = kafka.consumer.Consumer.createJavaConsumerConnector(new ConsumerConfig(context.getProperties())); }
@Override public void start() throws Exception { connector = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProps)); final Map<String, List<KafkaStream<byte[], byte[]>>> streams = connector.createMessageStreams(ImmutableMap.of(topic, 1)); final List<KafkaStream<byte[], byte[]>> streamList = streams.get(topic); if (streamList == null || streamList.size() != 1) { throw new RuntimeException(topic + " is not valid"); } stream = streamList.get(0).iterator(); startTakingTraffic(); }
private ConsumerConnector buildKafkaConsumer() { final Properties kafkaProperties = kafkaOptions.getProperties(); final ConsumerConnector consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(kafkaProperties)); return consumer; }
/** * constructor with kafka conf * @param conf */ public KafkaHighConsumer(KafkaConf conf) { ConsumerConfig config = new ConsumerConfig(conf.getProps()); consumer = Consumer.createJavaConsumerConnector(config); topic = conf.getProp(KafkaConf.HIGH_TOPIC); }