public static Properties extractKafkaConfigToProperties(Configuration configuration) { Set<String> configNames = new HashSet<String>(); try { configNames = ConsumerConfig.configNames(); } catch (Exception e) { // the Kafka configNames api is supported on 0.10.1.0+, in case NoSuchMethodException which is an Error, not Exception String[] configNamesArray = ("metric.reporters, metadata.max.age.ms, partition.assignment.strategy, reconnect.backoff.ms," + "sasl.kerberos.ticket.renew.window.factor, max.partition.fetch.bytes, bootstrap.servers, ssl.keystore.type," + " enable.auto.commit, sasl.mechanism, interceptor.classes, exclude.internal.topics, ssl.truststore.password," + " client.id, ssl.endpoint.identification.algorithm, max.poll.records, check.crcs, request.timeout.ms, heartbeat.interval.ms," + " auto.commit.interval.ms, receive.buffer.bytes, ssl.truststore.type, ssl.truststore.location, ssl.keystore.password, fetch.min.bytes," + " fetch.max.bytes, send.buffer.bytes, max.poll.interval.ms, value.deserializer, group.id, retry.backoff.ms," + " ssl.secure.random.implementation, sasl.kerberos.kinit.cmd, sasl.kerberos.service.name, sasl.kerberos.ticket.renew.jitter, ssl.trustmanager.algorithm, ssl.key.password, fetch.max.wait.ms, sasl.kerberos.min.time.before.relogin, connections.max.idle.ms, session.timeout.ms, metrics.num.samples, key.deserializer, ssl.protocol, ssl.provider, ssl.enabled.protocols, ssl.keystore.location, ssl.cipher.suites, security.protocol, ssl.keymanager.algorithm, metrics.sample.window.ms, auto.offset.reset").split(","); configNames.addAll(Arrays.asList(configNamesArray)); } Properties result = new Properties(); for (Iterator<Map.Entry<String, String>> it = configuration.iterator(); it.hasNext();) { Map.Entry<String, String> entry = it.next(); String key = entry.getKey(); String value = entry.getValue(); if (configNames.contains(key)) { result.put(key, value); } } return result; }
/** * A consumer is instantiated by providing a {@link java.util.Properties} object as configuration, and a * key and a value {@link Deserializer}. * <p> * Valid configuration strings are documented at {@link ConsumerConfig}. * <p> * Note: after creating a {@code KafkaConsumer} you must always {@link #close()} it to avoid resource leaks. * * @param properties The consumer configuration properties * @param keyDeserializer The deserializer for key that implements {@link Deserializer}. The configure() method * won't be called in the consumer when the deserializer is passed in directly. * @param valueDeserializer The deserializer for value that implements {@link Deserializer}. The configure() method * won't be called in the consumer when the deserializer is passed in directly. */ public KafkaConsumer(Properties properties, Deserializer<K> keyDeserializer, Deserializer<V> valueDeserializer) { this(new ConsumerConfig(ConsumerConfig.addDeserializerToConfig(properties, keyDeserializer, valueDeserializer)), keyDeserializer, valueDeserializer); }
@SuppressWarnings("unchecked") private KafkaConsumer(ConsumerConfig config, Deserializer<K> keyDeserializer, Deserializer<V> valueDeserializer) { try { String clientId = config.getString(ConsumerConfig.CLIENT_ID_CONFIG); if (clientId.isEmpty()) clientId = "consumer-" + CONSUMER_CLIENT_ID_SEQUENCE.getAndIncrement(); this.clientId = clientId; this.groupId = config.getString(ConsumerConfig.GROUP_ID_CONFIG); LogContext logContext = new LogContext("[Consumer clientId=" + clientId + ", groupId=" + groupId + "] "); this.log = logContext.logger(getClass()); boolean enableAutoCommit = config.getBoolean(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG); if (groupId == null) { // overwrite in case of default group id where the config is not explicitly provided if (!config.originals().containsKey(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG)) enableAutoCommit = false; else if (enableAutoCommit) this.requestTimeoutMs = config.getInt(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG); this.defaultApiTimeoutMs = config.getInt(ConsumerConfig.DEFAULT_API_TIMEOUT_MS_CONFIG); this.time = Time.SYSTEM; MetricConfig metricConfig = new MetricConfig().samples(config.getInt(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG)) .timeWindow(config.getLong(ConsumerConfig.METRICS_SAMPLE_WINDOW_MS_CONFIG), TimeUnit.MILLISECONDS) .recordLevel(Sensor.RecordingLevel.forName(config.getString(ConsumerConfig.METRICS_RECORDING_LEVEL_CONFIG))) .tags(metricsTags); List<MetricsReporter> reporters = config.getConfiguredInstances(ConsumerConfig.METRIC_REPORTER_CLASSES_CONFIG, MetricsReporter.class, Collections.singletonMap(ConsumerConfig.CLIENT_ID_CONFIG, clientId)); reporters.add(new JmxReporter(JMX_PREFIX)); this.metrics = new Metrics(metricConfig, reporters, time); this.retryBackoffMs = config.getLong(ConsumerConfig.RETRY_BACKOFF_MS_CONFIG);
private KafkaConsumer(ConsumerConfig config, ConsumerRebalanceCallback callback, Deserializer<K> keyDeserializer, Deserializer<V> valueDeserializer) { log.trace("Starting the Kafka consumer"); subscribedTopics = new HashSet<String>(); subscribedPartitions = new HashSet<TopicPartition>(); this.metrics = new Metrics(new MetricConfig(), Collections.singletonList((MetricsReporter) new JmxReporter("kafka.consumer.")), new SystemTime()); this.metadataFetchTimeoutMs = config.getLong(ConsumerConfig.METADATA_FETCH_TIMEOUT_CONFIG); this.totalMemorySize = config.getLong(ConsumerConfig.TOTAL_BUFFER_MEMORY_CONFIG); List<InetSocketAddress> addresses = ClientUtils.parseAndValidateAddresses(config.getList(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG)); if (keyDeserializer == null) this.keyDeserializer = config.getConfiguredInstance(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, Deserializer.class); else this.keyDeserializer = keyDeserializer; if (valueDeserializer == null) this.valueDeserializer = config.getConfiguredInstance(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, Deserializer.class); else this.valueDeserializer = valueDeserializer; config.logUnused(); log.debug("Kafka consumer started"); }
@Test public void testDeserializerToPropertyConfig() { Properties properties = new Properties(); properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializerClassName); properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializerClassName); Properties newProperties = ConsumerConfig.addDeserializerToConfig(properties, null, null); assertEquals(newProperties.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG), keyDeserializerClassName); assertEquals(newProperties.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG), valueDeserializerClassName); properties.clear(); properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializerClassName); newProperties = ConsumerConfig.addDeserializerToConfig(properties, keyDeserializer, null); assertEquals(newProperties.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG), keyDeserializerClassName); assertEquals(newProperties.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG), valueDeserializerClassName); properties.clear(); properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializerClassName); newProperties = ConsumerConfig.addDeserializerToConfig(properties, null, valueDeserializer); assertEquals(newProperties.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG), keyDeserializerClassName); assertEquals(newProperties.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG), valueDeserializerClassName); properties.clear(); newProperties = ConsumerConfig.addDeserializerToConfig(properties, keyDeserializer, valueDeserializer); assertEquals(newProperties.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG), keyDeserializerClassName); assertEquals(newProperties.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG), valueDeserializerClassName); }
/** * A consumer is instantiated by providing a set of key-value pairs as configuration, a {@link ConsumerRebalanceCallback} * implementation, a key and a value {@link Deserializer}. * <p> * Valid configuration strings are documented at {@link ConsumerConfig} * @param configs The consumer configs * @param callback A callback interface that the user can implement to manage customized offsets on the start and end of * every rebalance operation. * @param keyDeserializer The deserializer for key that implements {@link Deserializer}. The configure() method won't * be called when the deserializer is passed in directly. * @param valueDeserializer The deserializer for value that implements {@link Deserializer}. The configure() method * won't be called when the deserializer is passed in directly. */ public KafkaConsumer(Map<String, Object> configs, ConsumerRebalanceCallback callback, Deserializer<K> keyDeserializer, Deserializer<V> valueDeserializer) { this(new ConsumerConfig(addDeserializerToConfig(configs, keyDeserializer, valueDeserializer)), callback, keyDeserializer, valueDeserializer); }
@Test public void testDeserializerToMapConfig() { Map<String, Object> configs = new HashMap<>(); configs.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializerClass); configs.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializerClass); Map<String, Object> newConfigs = ConsumerConfig.addDeserializerToConfig(configs, null, null); assertEquals(newConfigs.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG), keyDeserializerClass); assertEquals(newConfigs.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG), valueDeserializerClass); configs.clear(); configs.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializerClass); newConfigs = ConsumerConfig.addDeserializerToConfig(configs, keyDeserializer, null); assertEquals(newConfigs.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG), keyDeserializerClass); assertEquals(newConfigs.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG), valueDeserializerClass); configs.clear(); configs.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializerClass); newConfigs = ConsumerConfig.addDeserializerToConfig(configs, null, valueDeserializer); assertEquals(newConfigs.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG), keyDeserializerClass); assertEquals(newConfigs.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG), valueDeserializerClass); configs.clear(); newConfigs = ConsumerConfig.addDeserializerToConfig(configs, keyDeserializer, valueDeserializer); assertEquals(newConfigs.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG), keyDeserializerClass); assertEquals(newConfigs.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG), valueDeserializerClass); } }
/** * A consumer is instantiated by providing a {@link java.util.Properties} object as configuration and a * {@link ConsumerRebalanceCallback} implementation, a key and a value {@link Deserializer}. * <p> * Valid configuration strings are documented at {@link ConsumerConfig} * @param properties The consumer configuration properties * @param callback A callback interface that the user can implement to manage customized offsets on the start and end of * every rebalance operation. * @param keyDeserializer The deserializer for key that implements {@link Deserializer}. The configure() method won't * be called when the deserializer is passed in directly. * @param valueDeserializer The deserializer for value that implements {@link Deserializer}. The configure() method * won't be called when the deserializer is passed in directly. */ public KafkaConsumer(Properties properties, ConsumerRebalanceCallback callback, Deserializer<K> keyDeserializer, Deserializer<V> valueDeserializer) { this(new ConsumerConfig(addDeserializerToConfig(properties, keyDeserializer, valueDeserializer)), callback, keyDeserializer, valueDeserializer); }
public static Properties extractKafkaConfigToProperties(Configuration configuration) { Set<String> configNames = new HashSet<String>(); try { configNames = ConsumerConfig.configNames(); } catch (Exception e) { // the Kafka configNames api is supported on 0.10.1.0+, in case NoSuchMethodException which is an Error, not Exception String[] configNamesArray = ("metric.reporters, metadata.max.age.ms, partition.assignment.strategy, reconnect.backoff.ms," + "sasl.kerberos.ticket.renew.window.factor, max.partition.fetch.bytes, bootstrap.servers, ssl.keystore.type," + " enable.auto.commit, sasl.mechanism, interceptor.classes, exclude.internal.topics, ssl.truststore.password," + " client.id, ssl.endpoint.identification.algorithm, max.poll.records, check.crcs, request.timeout.ms, heartbeat.interval.ms," + " auto.commit.interval.ms, receive.buffer.bytes, ssl.truststore.type, ssl.truststore.location, ssl.keystore.password, fetch.min.bytes," + " fetch.max.bytes, send.buffer.bytes, max.poll.interval.ms, value.deserializer, group.id, retry.backoff.ms," + " ssl.secure.random.implementation, sasl.kerberos.kinit.cmd, sasl.kerberos.service.name, sasl.kerberos.ticket.renew.jitter, ssl.trustmanager.algorithm, ssl.key.password, fetch.max.wait.ms, sasl.kerberos.min.time.before.relogin, connections.max.idle.ms, session.timeout.ms, metrics.num.samples, key.deserializer, ssl.protocol, ssl.provider, ssl.enabled.protocols, ssl.keystore.location, ssl.cipher.suites, security.protocol, ssl.keymanager.algorithm, metrics.sample.window.ms, auto.offset.reset").split(","); configNames.addAll(Arrays.asList(configNamesArray)); } Properties result = new Properties(); for (Iterator<Map.Entry<String, String>> it = configuration.iterator(); it.hasNext();) { Map.Entry<String, String> entry = it.next(); String key = entry.getKey(); String value = entry.getValue(); if (configNames.contains(key)) { result.put(key, value); } } return result; }
/** * A consumer is instantiated by providing a set of key-value pairs as configuration, and a key and a value {@link Deserializer}. * <p> * Valid configuration strings are documented at {@link ConsumerConfig}. * <p> * Note: after creating a {@code KafkaConsumer} you must always {@link #close()} it to avoid resource leaks. * * @param configs The consumer configs * @param keyDeserializer The deserializer for key that implements {@link Deserializer}. The configure() method * won't be called in the consumer when the deserializer is passed in directly. * @param valueDeserializer The deserializer for value that implements {@link Deserializer}. The configure() method * won't be called in the consumer when the deserializer is passed in directly. */ public KafkaConsumer(Map<String, Object> configs, Deserializer<K> keyDeserializer, Deserializer<V> valueDeserializer) { this(new ConsumerConfig(ConsumerConfig.addDeserializerToConfig(configs, keyDeserializer, valueDeserializer)), keyDeserializer, valueDeserializer); }
private InternalConsumerConfig(final Map<String, Object> props) { super(ConsumerConfig.addDeserializerToConfig(props, new ByteArrayDeserializer(), new ByteArrayDeserializer()), false); } }
/** * Merge boot consumer properties, general properties from * {@link #setConfiguration(Map)} that apply to consumers, properties from * {@link #setConsumerProperties(Map)}, in that order. * @return the merged properties. */ public Map<String, Object> mergedConsumerConfiguration() { Map<String, Object> consumerConfiguration = new HashMap<>(); consumerConfiguration.putAll(this.kafkaProperties.buildConsumerProperties()); // Copy configured binder properties that apply to consumers for (Map.Entry<String, String> configurationEntry : this.configuration.entrySet()) { if (ConsumerConfig.configNames().contains(configurationEntry.getKey())) { consumerConfiguration.put(configurationEntry.getKey(), configurationEntry.getValue()); } } consumerConfiguration.putAll(this.consumerProperties); // Override Spring Boot bootstrap server setting if left to default with the value // configured in the binder return getConfigurationWithBootstrapServer(consumerConfiguration, ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG); }
/** * Merge boot consumer properties, general properties from * {@link #setConfiguration(Map)} that apply to consumers, properties from * {@link #setConsumerProperties(Map)}, in that order. * @return the merged properties. */ public Map<String, Object> mergedConsumerConfiguration() { Map<String, Object> consumerConfiguration = new HashMap<>(); consumerConfiguration.putAll(this.kafkaProperties.buildConsumerProperties()); // Copy configured binder properties that apply to consumers for (Map.Entry<String, String> configurationEntry : this.configuration.entrySet()) { if (ConsumerConfig.configNames().contains(configurationEntry.getKey())) { consumerConfiguration.put(configurationEntry.getKey(), configurationEntry.getValue()); } } consumerConfiguration.putAll(this.consumerProperties); // Override Spring Boot bootstrap server setting if left to default with the value // configured in the binder return getConfigurationWithBootstrapServer(consumerConfiguration, ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG); }
private Map<String, Object> getCommonConsumerConfigs() { final Map<String, Object> clientProvidedProps = getClientPropsWithPrefix(CONSUMER_PREFIX, ConsumerConfig.configNames()); checkIfUnexpectedUserSpecifiedConsumerConfig(clientProvidedProps, NON_CONFIGURABLE_CONSUMER_DEFAULT_CONFIGS); checkIfUnexpectedUserSpecifiedConsumerConfig(clientProvidedProps, NON_CONFIGURABLE_CONSUMER_EOS_CONFIGS); final Map<String, Object> consumerProps = new HashMap<>(eosEnabled ? CONSUMER_EOS_OVERRIDES : CONSUMER_DEFAULT_OVERRIDES); consumerProps.putAll(getClientCustomProps()); consumerProps.putAll(clientProvidedProps); // bootstrap.servers should be from StreamsConfig consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, originals().get(BOOTSTRAP_SERVERS_CONFIG)); return consumerProps; }
public Map<String, Object> getConsumerConfiguration() { Map<String, Object> consumerConfiguration = new HashMap<>(); // If Spring Boot Kafka properties are present, add them with lowest precedence if (this.kafkaProperties != null) { consumerConfiguration.putAll(this.kafkaProperties.buildConsumerProperties()); } // Copy configured binder properties for (Map.Entry<String, String> configurationEntry : this.configuration.entrySet()) { if (ConsumerConfig.configNames().contains(configurationEntry.getKey())) { consumerConfiguration.put(configurationEntry.getKey(), configurationEntry.getValue()); } } // Override Spring Boot bootstrap server setting if left to default with the value // configured in the binder if (ObjectUtils.isEmpty(consumerConfiguration.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG))) { consumerConfiguration.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, getKafkaConnectionString()); } else { Object boostrapServersConfig = consumerConfiguration.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG); if (boostrapServersConfig instanceof List) { @SuppressWarnings("unchecked") List<String> bootStrapServers = (List<String>) consumerConfiguration .get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG); if (bootStrapServers.size() == 1 && bootStrapServers.get(0).equals("localhost:9092")) { consumerConfiguration.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, getKafkaConnectionString()); } } } return Collections.unmodifiableMap(consumerConfiguration); }
/** * Get a map of custom configs by removing from the originals all the Streams, Consumer, Producer, and AdminClient configs. * Prefixed properties are also removed because they are already added by {@link #getClientPropsWithPrefix(String, Set)}. * This allows to set a custom property for a specific client alone if specified using a prefix, or for all * when no prefix is used. * * @return a map with the custom properties */ private Map<String, Object> getClientCustomProps() { final Map<String, Object> props = originals(); props.keySet().removeAll(CONFIG.names()); props.keySet().removeAll(ConsumerConfig.configNames()); props.keySet().removeAll(ProducerConfig.configNames()); props.keySet().removeAll(AdminClientConfig.configNames()); props.keySet().removeAll(originalsWithPrefix(CONSUMER_PREFIX, false).keySet()); props.keySet().removeAll(originalsWithPrefix(PRODUCER_PREFIX, false).keySet()); props.keySet().removeAll(originalsWithPrefix(ADMIN_CLIENT_PREFIX, false).keySet()); return props; }