Refine search
public SimpleKafkaConsumer(Properties props, KafkaCheckpoint checkpoint) { Config config = ConfigFactory.parseProperties(props); topic = config.getString("topic"); String zkConnect = config.getString("zookeeper.connect"); schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props); deserializer = new LiAvroDeserializer(schemaRegistry); /** TODO: Make Confluent schema registry integration configurable * HashMap<String, String> avroSerDeConfig = new HashMap<>(); * avroSerDeConfig.put("schema.registry.url", "http://localhost:8081"); * deserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer(); * deserializer.configure(avroSerDeConfig, false); * **/ Properties consumeProps = new Properties(); consumeProps.put("zookeeper.connect", zkConnect); consumeProps.put("group.id", "gobblin-tool-" + System.nanoTime()); consumeProps.put("zookeeper.session.timeout.ms", "10000"); consumeProps.put("zookeeper.sync.time.ms", "10000"); consumeProps.put("auto.commit.interval.ms", "10000"); consumeProps.put("auto.offset.reset", "smallest"); consumeProps.put("auto.commit.enable", "false"); //consumeProps.put("consumer.timeout.ms", "10000"); consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps)); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(ImmutableMap.of(topic, 1)); List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this.topic); stream = streams.get(0); iterator = stream.iterator(); }
@Override public void close() { connector.shutdown(); } };
@OnStopped public void shutdownConsumer() { this.consumerStreamsReady.set(false); if (consumer != null) { try { consumer.commitOffsets(); } finally { consumer.shutdown(); } } if (this.executor != null) { this.executor.shutdown(); try { if (!this.executor.awaitTermination(30000, TimeUnit.MILLISECONDS)) { this.executor.shutdownNow(); getLogger().warn("Executor did not stop in 30 sec. Terminated."); } this.executor = null; } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } }
@GET @Timed public Response consume( @QueryParam("topic") String topic, @QueryParam("timeout") Integer timeout ) { if (Strings.isNullOrEmpty(topic)) return Response.status(400) .entity(new String[]{"Undefined topic"}) .build(); Properties props = (Properties) consumerCfg.clone(); if (timeout != null) props.put("consumer.timeout.ms", "" + timeout); ConsumerConfig config = new ConsumerConfig(props); ConsumerConnector connector = Consumer.createJavaConsumerConnector(config); Map<String, Integer> streamCounts = Collections.singletonMap(topic, 1); Map<String, List<KafkaStream<byte[], byte[]>>> streams = connector.createMessageStreams(streamCounts); KafkaStream<byte[], byte[]> stream = streams.get(topic).get(0); List<Message> messages = new ArrayList<>(); try { for (MessageAndMetadata<byte[], byte[]> messageAndMetadata : stream) messages.add(new Message(messageAndMetadata)); } catch (ConsumerTimeoutException ignore) { } finally { connector.commitOffsets(); connector.shutdown(); } return Response.ok(messages).build(); }
try { ConsumerConfig ccfg = new ConsumerConfig(props); jcc = Consumer.createJavaConsumerConnector(ccfg); Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); topicCountMap.put(TEST_TOPIC_NAME, 1); Map<String, List<KafkaStream<byte[], byte[]>>> topicMap = jcc.createMessageStreams(topicCountMap); KafkaStream<byte[], byte[]> cstrm = topicMap.get(TEST_TOPIC_NAME).get(0); for (MessageAndMetadata<byte[], byte[]> mm : cstrm) { jcc.shutdown();
protected List<KafkaStream<byte[], byte[]>> createStreams() { Map<String, Integer> topicCountMap = Maps.newHashMap(); topicCountMap.put(this.topic, this.numThreads); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = this.consumer.createMessageStreams(topicCountMap); return consumerMap.get(this.topic); }
@Override public void init(SecorConfig config) throws UnknownHostException { this.mConfig = config; mConsumerConnector = Consumer.createJavaConsumerConnector(createConsumerConfig()); if (!mConfig.getKafkaTopicBlacklist().isEmpty() && !mConfig.getKafkaTopicFilter().isEmpty()) { throw new RuntimeException("Topic filter and blacklist cannot be both specified."); } TopicFilter topicFilter = !mConfig.getKafkaTopicBlacklist().isEmpty() ? new Blacklist(mConfig.getKafkaTopicBlacklist()) : new Whitelist(mConfig.getKafkaTopicFilter()); LOG.debug("Use TopicFilter {}({})", topicFilter.getClass(), topicFilter); List<KafkaStream<byte[], byte[]>> streams = mConsumerConnector.createMessageStreamsByFilter(topicFilter); KafkaStream<byte[], byte[]> stream = streams.get(0); mIterator = stream.iterator(); mKafkaMessageTimestampFactory = new KafkaMessageTimestampFactory(mConfig.getKafkaMessageTimestampClass()); }
ConsumerConnector consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumerConnector.createMessageStreams(topicCountMap); new String(messageAndMetadata.message())); System.out.println(message); consumerConnector.commitOffsets();
final ConsumerIterator<byte[], byte[]> streamIterator = consumer.createMessageStreams( topicCountMap) .get(config.healthCheckTopic) consumer.commitOffsets(); try { consumer.shutdown(); } catch (Exception ignore) {
@Override public void commit(TopicPartition topicPartition, long offset) { mConsumerConnector.commitOffsets(); }
final List<KafkaStream<String, String>> streams = consumerConnector.createMessageStreamsByFilter( new Whitelist(Pattern.quote(topic)), 1, DEFAULT_STRING_DECODER, DEFAULT_STRING_DECODER ); consumerConnector.shutdown();
private List<KafkaStream<byte[], byte[]>> _createStreams(ConsumerConnector consumer, String topicName) { int numStreams = Math.max(Integer.parseInt( _configuration.getValue(Property.KAFKA_CONSUMER_STREAMS_PER_TOPIC.getName(), Property.KAFKA_CONSUMER_STREAMS_PER_TOPIC.getDefaultValue())), 2); return consumer.createMessageStreamsByFilter(new Whitelist(topicName), numStreams); }
protected List<KafkaStream<byte[], byte[]>> createStreams() { Map<String, Integer> topicCountMap = Maps.newHashMap(); topicCountMap.put(this.topic, this.numThreads); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = this.consumer.createMessageStreams(topicCountMap); return consumerMap.get(this.topic); }
cc = Consumer.createJavaConsumerConnector(consumerConfig); final List<KafkaStream<byte[], byte[]>> streams = cc.createMessageStreamsByFilter(filter, numThreads); final ExecutorService executor = executorService(numThreads);
@Override public void commit() { consumer.commitOffsets(); _serverMetrics.addMeteredTableValue(_tableAndStreamName, ServerMeter.REALTIME_OFFSET_COMMITS, 1L); _serverMetrics.addMeteredGlobalValue(ServerMeter.REALTIME_OFFSET_COMMITS, 1L); }
private List<KafkaStream<byte[], byte[]>> _createStreams(ConsumerConnector consumer, String topicName) { int numStreams = Math.max(Integer.parseInt( _configuration.getValue(Property.KAFKA_CONSUMER_STREAMS_PER_TOPIC.getName(), Property.KAFKA_CONSUMER_STREAMS_PER_TOPIC.getDefaultValue())), 2); return consumer.createMessageStreamsByFilter(new Whitelist(topicName), numStreams); }
KafkaConsumerSuite(String zkConnectString, String topic) { _topic = topic; Properties consumeProps = new Properties(); consumeProps.put("zookeeper.connect", zkConnectString); consumeProps.put("group.id", _topic+"-"+System.nanoTime()); consumeProps.put("zookeeper.session.timeout.ms", "10000"); consumeProps.put("zookeeper.sync.time.ms", "10000"); consumeProps.put("auto.commit.interval.ms", "10000"); consumeProps.put("_consumer.timeout.ms", "10000"); _consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps)); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = _consumer.createMessageStreams(ImmutableMap.of(this._topic, 1)); List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this._topic); _stream = streams.get(0); _iterator = _stream.iterator(); }
public void close() { consumer.shutdown(); }
protected List<KafkaStream<byte[], byte[]>> createStreams() { Map<String, Integer> topicCountMap = Maps.newHashMap(); topicCountMap.put(this.topic, this.numThreads); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = this.consumer.createMessageStreams(topicCountMap); return consumerMap.get(this.topic); }
private void init() { // register kafka offset lag metrics, one Gauge is for per consumer level granularity MetricRegistry registry = Metrics.getRegistry(); try { fetchedMsgCounter = registry.meter("kafkaIngesterConsumer." + this.getName() + "-msgFetchRate"); failedToIngestCounter = registry.meter("kafkaIngesterConsumer." + this.getName() + "-failedToIngest"); kafkaOffsetLagGauge = registry.register("kafkaIngesterConsumer." + this.getName() + "-kafkaOffsetLag", new JmxAttributeGauge( new ObjectName(maxLagMetricName), "Value")); } catch (MalformedObjectNameException | IllegalArgumentException e) { logger.error("Register failure for metrics of KafkaIngesterConsumer", e); } TopicFilter topicFilter = new Whitelist(AuditConfig.AUDIT_TOPIC_NAME); logger.info("{}: Topic filter is {}", getName(), AuditConfig.AUDIT_TOPIC_NAME); this.consumer = Consumer.createJavaConsumerConnector(createConsumerConfig()); KafkaStream<byte[], byte[]> stream = consumer.createMessageStreamsByFilter(topicFilter, 1).get(0); iterator = stream.iterator(); logger.info("KafkaIngesterConsumer thread {} is initialized successfully", getName()); if (AuditConfig.INGESTER_ENABLE_DEDUP) { deduplicator = new Deduplicator(threadId, AuditConfig.INGESTER_REDIS_HOST, AuditConfig.INGESTER_REDIS_PORT, AuditConfig.INGESTER_REDIS_KEY_TTL_SEC, AuditConfig.INGESTER_DUP_HOST_PREFIX, AuditConfig.INGESTER_HOSTS_WITH_DUP); deduplicator.open(); } else { deduplicator = null; } }