public static Properties extractKafkaConfigToProperties(Configuration configuration) { Set<String> configNames = new HashSet<String>(); try { configNames = ConsumerConfig.configNames(); } catch (Exception e) { // the Kafka configNames api is supported on 0.10.1.0+, in case NoSuchMethodException which is an Error, not Exception String[] configNamesArray = ("metric.reporters, metadata.max.age.ms, partition.assignment.strategy, reconnect.backoff.ms," + "sasl.kerberos.ticket.renew.window.factor, max.partition.fetch.bytes, bootstrap.servers, ssl.keystore.type," + " enable.auto.commit, sasl.mechanism, interceptor.classes, exclude.internal.topics, ssl.truststore.password," + " client.id, ssl.endpoint.identification.algorithm, max.poll.records, check.crcs, request.timeout.ms, heartbeat.interval.ms," + " auto.commit.interval.ms, receive.buffer.bytes, ssl.truststore.type, ssl.truststore.location, ssl.keystore.password, fetch.min.bytes," + " fetch.max.bytes, send.buffer.bytes, max.poll.interval.ms, value.deserializer, group.id, retry.backoff.ms," + " ssl.secure.random.implementation, sasl.kerberos.kinit.cmd, sasl.kerberos.service.name, sasl.kerberos.ticket.renew.jitter, ssl.trustmanager.algorithm, ssl.key.password, fetch.max.wait.ms, sasl.kerberos.min.time.before.relogin, connections.max.idle.ms, session.timeout.ms, metrics.num.samples, key.deserializer, ssl.protocol, ssl.provider, ssl.enabled.protocols, ssl.keystore.location, ssl.cipher.suites, security.protocol, ssl.keymanager.algorithm, metrics.sample.window.ms, auto.offset.reset").split(","); configNames.addAll(Arrays.asList(configNamesArray)); } Properties result = new Properties(); for (Iterator<Map.Entry<String, String>> it = configuration.iterator(); it.hasNext();) { Map.Entry<String, String> entry = it.next(); String key = entry.getKey(); String value = entry.getValue(); if (configNames.contains(key)) { result.put(key, value); } } return result; }
public static Properties extractKafkaConfigToProperties(Configuration configuration) { Set<String> configNames = new HashSet<String>(); try { configNames = ConsumerConfig.configNames(); } catch (Exception e) { // the Kafka configNames api is supported on 0.10.1.0+, in case NoSuchMethodException which is an Error, not Exception String[] configNamesArray = ("metric.reporters, metadata.max.age.ms, partition.assignment.strategy, reconnect.backoff.ms," + "sasl.kerberos.ticket.renew.window.factor, max.partition.fetch.bytes, bootstrap.servers, ssl.keystore.type," + " enable.auto.commit, sasl.mechanism, interceptor.classes, exclude.internal.topics, ssl.truststore.password," + " client.id, ssl.endpoint.identification.algorithm, max.poll.records, check.crcs, request.timeout.ms, heartbeat.interval.ms," + " auto.commit.interval.ms, receive.buffer.bytes, ssl.truststore.type, ssl.truststore.location, ssl.keystore.password, fetch.min.bytes," + " fetch.max.bytes, send.buffer.bytes, max.poll.interval.ms, value.deserializer, group.id, retry.backoff.ms," + " ssl.secure.random.implementation, sasl.kerberos.kinit.cmd, sasl.kerberos.service.name, sasl.kerberos.ticket.renew.jitter, ssl.trustmanager.algorithm, ssl.key.password, fetch.max.wait.ms, sasl.kerberos.min.time.before.relogin, connections.max.idle.ms, session.timeout.ms, metrics.num.samples, key.deserializer, ssl.protocol, ssl.provider, ssl.enabled.protocols, ssl.keystore.location, ssl.cipher.suites, security.protocol, ssl.keymanager.algorithm, metrics.sample.window.ms, auto.offset.reset").split(","); configNames.addAll(Arrays.asList(configNamesArray)); } Properties result = new Properties(); for (Iterator<Map.Entry<String, String>> it = configuration.iterator(); it.hasNext();) { Map.Entry<String, String> entry = it.next(); String key = entry.getKey(); String value = entry.getValue(); if (configNames.contains(key)) { result.put(key, value); } } return result; }
/** * Merge boot consumer properties, general properties from * {@link #setConfiguration(Map)} that apply to consumers, properties from * {@link #setConsumerProperties(Map)}, in that order. * @return the merged properties. */ public Map<String, Object> mergedConsumerConfiguration() { Map<String, Object> consumerConfiguration = new HashMap<>(); consumerConfiguration.putAll(this.kafkaProperties.buildConsumerProperties()); // Copy configured binder properties that apply to consumers for (Map.Entry<String, String> configurationEntry : this.configuration.entrySet()) { if (ConsumerConfig.configNames().contains(configurationEntry.getKey())) { consumerConfiguration.put(configurationEntry.getKey(), configurationEntry.getValue()); } } consumerConfiguration.putAll(this.consumerProperties); // Override Spring Boot bootstrap server setting if left to default with the value // configured in the binder return getConfigurationWithBootstrapServer(consumerConfiguration, ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG); }
/** * Merge boot consumer properties, general properties from * {@link #setConfiguration(Map)} that apply to consumers, properties from * {@link #setConsumerProperties(Map)}, in that order. * @return the merged properties. */ public Map<String, Object> mergedConsumerConfiguration() { Map<String, Object> consumerConfiguration = new HashMap<>(); consumerConfiguration.putAll(this.kafkaProperties.buildConsumerProperties()); // Copy configured binder properties that apply to consumers for (Map.Entry<String, String> configurationEntry : this.configuration.entrySet()) { if (ConsumerConfig.configNames().contains(configurationEntry.getKey())) { consumerConfiguration.put(configurationEntry.getKey(), configurationEntry.getValue()); } } consumerConfiguration.putAll(this.consumerProperties); // Override Spring Boot bootstrap server setting if left to default with the value // configured in the binder return getConfigurationWithBootstrapServer(consumerConfiguration, ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG); }
private Map<String, Object> getCommonConsumerConfigs() { final Map<String, Object> clientProvidedProps = getClientPropsWithPrefix(CONSUMER_PREFIX, ConsumerConfig.configNames()); checkIfUnexpectedUserSpecifiedConsumerConfig(clientProvidedProps, NON_CONFIGURABLE_CONSUMER_DEFAULT_CONFIGS); checkIfUnexpectedUserSpecifiedConsumerConfig(clientProvidedProps, NON_CONFIGURABLE_CONSUMER_EOS_CONFIGS); final Map<String, Object> consumerProps = new HashMap<>(eosEnabled ? CONSUMER_EOS_OVERRIDES : CONSUMER_DEFAULT_OVERRIDES); consumerProps.putAll(getClientCustomProps()); consumerProps.putAll(clientProvidedProps); // bootstrap.servers should be from StreamsConfig consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, originals().get(BOOTSTRAP_SERVERS_CONFIG)); return consumerProps; }
public Map<String, Object> getConsumerConfiguration() { Map<String, Object> consumerConfiguration = new HashMap<>(); // If Spring Boot Kafka properties are present, add them with lowest precedence if (this.kafkaProperties != null) { consumerConfiguration.putAll(this.kafkaProperties.buildConsumerProperties()); } // Copy configured binder properties for (Map.Entry<String, String> configurationEntry : this.configuration.entrySet()) { if (ConsumerConfig.configNames().contains(configurationEntry.getKey())) { consumerConfiguration.put(configurationEntry.getKey(), configurationEntry.getValue()); } } // Override Spring Boot bootstrap server setting if left to default with the value // configured in the binder if (ObjectUtils.isEmpty(consumerConfiguration.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG))) { consumerConfiguration.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, getKafkaConnectionString()); } else { Object boostrapServersConfig = consumerConfiguration.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG); if (boostrapServersConfig instanceof List) { @SuppressWarnings("unchecked") List<String> bootStrapServers = (List<String>) consumerConfiguration .get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG); if (bootStrapServers.size() == 1 && bootStrapServers.get(0).equals("localhost:9092")) { consumerConfiguration.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, getKafkaConnectionString()); } } } return Collections.unmodifiableMap(consumerConfiguration); }
/** * Get a map of custom configs by removing from the originals all the Streams, Consumer, Producer, and AdminClient configs. * Prefixed properties are also removed because they are already added by {@link #getClientPropsWithPrefix(String, Set)}. * This allows to set a custom property for a specific client alone if specified using a prefix, or for all * when no prefix is used. * * @return a map with the custom properties */ private Map<String, Object> getClientCustomProps() { final Map<String, Object> props = originals(); props.keySet().removeAll(CONFIG.names()); props.keySet().removeAll(ConsumerConfig.configNames()); props.keySet().removeAll(ProducerConfig.configNames()); props.keySet().removeAll(AdminClientConfig.configNames()); props.keySet().removeAll(originalsWithPrefix(CONSUMER_PREFIX, false).keySet()); props.keySet().removeAll(originalsWithPrefix(PRODUCER_PREFIX, false).keySet()); props.keySet().removeAll(originalsWithPrefix(ADMIN_CLIENT_PREFIX, false).keySet()); return props; }