/** * Get the configuration for the specified resources with the default options. * * This is a convenience method for #{@link AdminClient#describeConfigs(Collection, DescribeConfigsOptions)} with default options. * See the overload for more details. * * This operation is supported by brokers with version 0.11.0.0 or higher. * * @param resources The resources (topic and broker resource types are currently supported) * @return The DescribeConfigsResult */ public DescribeConfigsResult describeConfigs(Collection<ConfigResource> resources) { return describeConfigs(resources, new DescribeConfigsOptions()); }
private Config getKafkaBrokerConfig(AdminClient admin) throws Exception { final Collection<Node> nodes = admin.describeCluster().nodes().get(KAFKA_QUERY_TIMEOUT.toMillis(), TimeUnit.MILLISECONDS); if (nodes.isEmpty()) { throw new ConnectException("No brokers available to obtain default settings"); } String nodeId = nodes.iterator().next().idString(); Set<ConfigResource> resources = Collections.singleton(new ConfigResource(ConfigResource.Type.BROKER, nodeId)); final Map<ConfigResource, Config> configs = admin.describeConfigs(resources).all().get( KAFKA_QUERY_TIMEOUT.toMillis(), TimeUnit.MILLISECONDS ); if (configs.isEmpty()) { throw new ConnectException("No configs have been received"); } return configs.values().iterator().next(); } }
@Test public void testDescribeConfigs() throws Exception { try (AdminClientUnitTestEnv env = mockClientEnv()) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareResponse(new DescribeConfigsResponse(0, Collections.singletonMap(new ConfigResource(ConfigResource.Type.BROKER, "0"), new DescribeConfigsResponse.Config(ApiError.NONE, Collections.emptySet())))); DescribeConfigsResult result2 = env.adminClient().describeConfigs(Collections.singleton( new ConfigResource(ConfigResource.Type.BROKER, "0"))); result2.all().get(); } }
env.adminClient().describeConfigs(Collections.singleton(new ConfigResource(ConfigResource.Type.BROKER, "0"))).all().get(); fail("Expected an authentication error."); } catch (ExecutionException e) {
new DescribeConfigsResponse.Config(ApiError.NONE, Collections.emptySet())))); DescribeConfigsResult result2 = env.adminClient().describeConfigs(Collections.singleton( new ConfigResource(ConfigResource.Type.TOPIC, "foo"))); time.sleep(5000);
private Map<String, String> topicConfiguration(String topicName) { Map<String, String> configMap = Collections.emptyMap(); try { ConfigResource cr = new ConfigResource(ConfigResource.Type.TOPIC, topicName); DescribeConfigsResult dc = adminClient.describeConfigs(singleton(cr)); Map<ConfigResource, Config> configs = dc.all().get(); Config config = configs.get(cr); configMap = config.entries().stream().filter(x -> !x.isDefault() && !x.isReadOnly()).collect(toMap(x -> x.name(), x -> x.value())); log.debug("Existing configuration for topic {} is {}", topicName, configMap); } catch (InterruptedException | ExecutionException e) { log.warn("Exception occured during topic configuration retrieval. name: {}", topicName, e); } return configMap; }
private Config getKafkaBrokerConfig(AdminClient admin) throws Exception { final Collection<Node> nodes = admin.describeCluster().nodes().get(KAFKA_QUERY_TIMEOUT.toMillis(), TimeUnit.MILLISECONDS); if (nodes.isEmpty()) { throw new ConnectException("No brokers available to obtain default settings"); } String nodeId = nodes.iterator().next().idString(); Set<ConfigResource> resources = Collections.singleton(new ConfigResource(ConfigResource.Type.BROKER, nodeId)); final Map<ConfigResource, Config> configs = admin.describeConfigs(resources).all().get( KAFKA_QUERY_TIMEOUT.toMillis(), TimeUnit.MILLISECONDS ); if (configs.isEmpty()) { throw new ConnectException("No configs have been received"); } return configs.values().iterator().next(); } }
/** * Get a topic config via the Kafka AdminClient API, calling the given handler * (in a different thread) with the result. */ @Override public void topicMetadata(TopicName topicName, Handler<AsyncResult<TopicMetadata>> handler) { LOGGER.debug("Getting metadata for topic {}", topicName); ConfigResource resource = new ConfigResource(ConfigResource.Type.TOPIC, topicName.toString()); KafkaFuture<TopicDescription> descriptionFuture = adminClient.describeTopics( Collections.singleton(topicName.toString())).values().get(topicName.toString()); KafkaFuture<Config> configFuture = adminClient.describeConfigs( Collections.singleton(resource)).values().get(resource); queueWork(new MetadataWork(descriptionFuture, configFuture, result -> handler.handle(result))); }
private List<ConfigItem> describeResource(final ConfigResource configResource) { final DescribeConfigsResult result = adminClient.describeConfigs(Collections.singleton(configResource)); final List<ConfigItem> configItems = new ArrayList<>(); try { final Map<ConfigResource, Config> configMap = result.all().get(); final Config config = configMap.get(configResource); for (final ConfigEntry configEntry : config.entries()) { // Skip sensitive entries if (configEntry.isSensitive()) { continue; } configItems.add( new ConfigItem(configEntry.name(), configEntry.value(), configEntry.isDefault()) ); } return configItems; } catch (InterruptedException | ExecutionException e) { // TODO Handle this throw new RuntimeException(e.getMessage(), e); } }