List<AlterConfigsRequest.ConfigEntry> configEntries = new ArrayList<>(); for (ConfigEntry configEntry: configs.get(resource).entries()) configEntries.add(new AlterConfigsRequest.ConfigEntry(configEntry.name(), configEntry.value())); requestMap.put(resource, new AlterConfigsRequest.Config(configEntries)); futures.put(resource, new KafkaFutureImpl<>());
@Override public void initializeStorage() { super.initializeStorage(); try (AdminClient admin = AdminClient.create(this.producerConfig.asProperties())) { // Find default replication factor Config brokerConfig = getKafkaBrokerConfig(admin); final short replicationFactor = Short.parseShort(brokerConfig.get(DEFAULT_TOPIC_REPLICATION_FACTOR_PROP_NAME).value()); // Create topic final NewTopic topic = new NewTopic(topicName, (short)1, replicationFactor); topic.configs(Collect.hashMapOf("cleanup.policy", "delete", "retention.ms", Long.toString(Long.MAX_VALUE), "retention.bytes", "-1")); admin.createTopics(Collections.singleton(topic)); logger.info("Database history topic '{}' created", topic); } catch (Exception e) { throw new ConnectException("Creation of database history topic failed, please create the topic manually", e); } }
private Map<String, String> topicConfiguration(String topicName) { Map<String, String> configMap = Collections.emptyMap(); try { ConfigResource cr = new ConfigResource(ConfigResource.Type.TOPIC, topicName); DescribeConfigsResult dc = adminClient.describeConfigs(singleton(cr)); Map<ConfigResource, Config> configs = dc.all().get(); Config config = configs.get(cr); configMap = config.entries().stream().filter(x -> !x.isDefault() && !x.isReadOnly()).collect(toMap(x -> x.name(), x -> x.value())); log.debug("Existing configuration for topic {} is {}", topicName, configMap); } catch (InterruptedException | ExecutionException e) { log.warn("Exception occured during topic configuration retrieval. name: {}", topicName, e); } return configMap; }
private List<ConfigItem> describeResource(final ConfigResource configResource) { final DescribeConfigsResult result = adminClient.describeConfigs(Collections.singleton(configResource)); final List<ConfigItem> configItems = new ArrayList<>(); try { final Map<ConfigResource, Config> configMap = result.all().get(); final Config config = configMap.get(configResource); for (final ConfigEntry configEntry : config.entries()) { // Skip sensitive entries if (configEntry.isSensitive()) { continue; } configItems.add( new ConfigItem(configEntry.name(), configEntry.value(), configEntry.isDefault()) ); } return configItems; } catch (InterruptedException | ExecutionException e) { // TODO Handle this throw new RuntimeException(e.getMessage(), e); } }
@Override public void initializeStorage() { super.initializeStorage(); try (AdminClient admin = AdminClient.create(this.producerConfig.asProperties())) { // Find default replication factor Config brokerConfig = getKafkaBrokerConfig(admin); final short replicationFactor = Short.parseShort(brokerConfig.get(DEFAULT_TOPIC_REPLICATION_FACTOR_PROP_NAME).value()); // Create topic final NewTopic topic = new NewTopic(topicName, (short)1, replicationFactor); topic.configs(Collect.hashMapOf("cleanup.policy", "delete", "retention.ms", Long.toString(Long.MAX_VALUE), "retention.bytes", "-1")); admin.createTopics(Collections.singleton(topic)); logger.info("Database history topic '{}' created", topic); } catch (Exception e) { throw new ConnectException("Creation of database history topic failed, please create the topic manually", e); } }
/** * Create a Topic to reflect the given TopicMetadata. */ public static Topic fromTopicMetadata(TopicMetadata meta) { if (meta == null) { return null; } Topic.Builder builder = new Topic.Builder() .withTopicName(meta.getDescription().name()) .withNumPartitions(meta.getDescription().partitions().size()) .withNumReplicas((short) meta.getDescription().partitions().get(0).replicas().size()) .withMetadata(null); for (ConfigEntry entry: meta.getConfig().entries()) { if (!entry.isDefault()) { builder.withConfigEntry(entry.name(), entry.value()); } } return builder.build(); }