[ 
https://issues.apache.org/jira/browse/KAFKA-14111?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

David Arthur resolved KAFKA-14111.
----------------------------------
    Resolution: Fixed

> Dynamic config update fails for "password" configs in KRaft
> -----------------------------------------------------------
>
>                 Key: KAFKA-14111
>                 URL: https://issues.apache.org/jira/browse/KAFKA-14111
>             Project: Kafka
>          Issue Type: Bug
>          Components: kraft
>            Reporter: David Arthur
>            Assignee: David Arthur
>            Priority: Major
>             Fix For: 3.3.0, 3.4.0
>
>
> Two related bugs found when working on updating 
> DynamicBrokerReconfigurationTest for KRaft. 
> Firstly, if we issue an AlterConfig (or IncrementalAlterConfig) for a broker 
> config that is defined as a "password", it will succeed on the controller, 
> but throw an error when the broker handles it. 
> For example, on a vanilla cluster running "config/kraft/server.properties"
> {code}
> /bin/kafka-configs.sh --bootstrap-server localhost:9092  --alter --broker 1 
> --add-config listener.name.external.ssl.key.password=foo 
> {code}
> results in
> {code}
> [2022-07-26 16:24:05,049] ERROR Dynamic password config 
> listener.name.external.ssl.key.password could not be decoded, ignoring. 
> (kafka.server.DynamicBrokerConfig)
> org.apache.kafka.common.config.ConfigException: Password encoder secret not 
> configured
>       at 
> kafka.server.DynamicBrokerConfig.$anonfun$passwordEncoder$1(DynamicBrokerConfig.scala:352)
>       at scala.Option.getOrElse(Option.scala:201)
>       at 
> kafka.server.DynamicBrokerConfig.passwordEncoder(DynamicBrokerConfig.scala:352)
>       at 
> kafka.server.DynamicBrokerConfig.decodePassword$1(DynamicBrokerConfig.scala:393)
>       at 
> kafka.server.DynamicBrokerConfig.$anonfun$fromPersistentProps$5(DynamicBrokerConfig.scala:404)
>       at 
> kafka.server.DynamicBrokerConfig.$anonfun$fromPersistentProps$5$adapted(DynamicBrokerConfig.scala:402)
>       at 
> kafka.utils.Implicits$MapExtensionMethods$.$anonfun$forKeyValue$1(Implicits.scala:62)
>       at scala.collection.MapOps.foreachEntry(Map.scala:244)
>       at scala.collection.MapOps.foreachEntry$(Map.scala:240)
>       at scala.collection.AbstractMap.foreachEntry(Map.scala:405)
>       at 
> kafka.server.DynamicBrokerConfig.fromPersistentProps(DynamicBrokerConfig.scala:402)
>       at 
> kafka.server.DynamicBrokerConfig.$anonfun$updateBrokerConfig$1(DynamicBrokerConfig.scala:300)
>       at 
> kafka.server.DynamicBrokerConfig.updateBrokerConfig(DynamicBrokerConfig.scala:299)
>       at 
> kafka.server.BrokerConfigHandler.processConfigChanges(ConfigHandler.scala:221)
>       at 
> kafka.server.metadata.BrokerMetadataPublisher.$anonfun$publish$15(BrokerMetadataPublisher.scala:212)
>       at java.base/java.util.HashMap$KeySet.forEach(HashMap.java:1008)
>       at 
> kafka.server.metadata.BrokerMetadataPublisher.$anonfun$publish$14(BrokerMetadataPublisher.scala:190)
>       at 
> kafka.server.metadata.BrokerMetadataPublisher.$anonfun$publish$14$adapted(BrokerMetadataPublisher.scala:189)
>       at scala.Option.foreach(Option.scala:437)
>       at 
> kafka.server.metadata.BrokerMetadataPublisher.publish(BrokerMetadataPublisher.scala:189)
>       at 
> kafka.server.metadata.BrokerMetadataListener.kafka$server$metadata$BrokerMetadataListener$$publish(BrokerMetadataListener.scala:293)
>       at 
> kafka.server.metadata.BrokerMetadataListener$HandleCommitsEvent.$anonfun$run$2(BrokerMetadataListener.scala:126)
>       at 
> kafka.server.metadata.BrokerMetadataListener$HandleCommitsEvent.$anonfun$run$2$adapted(BrokerMetadataListener.scala:126)
>       at scala.Option.foreach(Option.scala:437)
>       at 
> kafka.server.metadata.BrokerMetadataListener$HandleCommitsEvent.run(BrokerMetadataListener.scala:126)
>       at 
> org.apache.kafka.queue.KafkaEventQueue$EventContext.run(KafkaEventQueue.java:121)
>       at 
> org.apache.kafka.queue.KafkaEventQueue$EventHandler.handleEvents(KafkaEventQueue.java:200)
>       at 
> org.apache.kafka.queue.KafkaEventQueue$EventHandler.run(KafkaEventQueue.java:173)
>       at java.base/java.lang.Thread.run(Thread.java:833)
> {code}.
> If a {{password.encoder.secret}} is supplied, this still fails but with:
> {code}
> [2022-07-26 16:27:23,247] ERROR Dynamic password config 
> listener.name.external.ssl.key.password could not be decoded, ignoring. 
> (kafka.server.DynamicBrokerConfig)
> java.lang.StringIndexOutOfBoundsException: begin 0, end -1, length 3
>       at java.base/java.lang.String.checkBoundsBeginEnd(String.java:4604)
>       at java.base/java.lang.String.substring(String.java:2707)
>       at kafka.utils.CoreUtils$.$anonfun$parseCsvMap$1(CoreUtils.scala:173)
>       at scala.collection.ArrayOps$.map$extension(ArrayOps.scala:929)
>       at kafka.utils.CoreUtils$.parseCsvMap(CoreUtils.scala:171)
>       at kafka.utils.PasswordEncoder.decode(PasswordEncoder.scala:88)
>       at 
> kafka.server.DynamicBrokerConfig.decodePassword$1(DynamicBrokerConfig.scala:393)
>       at 
> kafka.server.DynamicBrokerConfig.$anonfun$fromPersistentProps$5(DynamicBrokerConfig.scala:404)
>       at 
> kafka.server.DynamicBrokerConfig.$anonfun$fromPersistentProps$5$adapted(DynamicBrokerConfig.scala:402)
>       at 
> kafka.utils.Implicits$MapExtensionMethods$.$anonfun$forKeyValue$1(Implicits.scala:62)
>       at scala.collection.MapOps.foreachEntry(Map.scala:244)
>       at scala.collection.MapOps.foreachEntry$(Map.scala:240)
>       at scala.collection.AbstractMap.foreachEntry(Map.scala:405)
>       at 
> kafka.server.DynamicBrokerConfig.fromPersistentProps(DynamicBrokerConfig.scala:402)
>       at 
> kafka.server.DynamicBrokerConfig.$anonfun$updateBrokerConfig$1(DynamicBrokerConfig.scala:300)
>       at 
> kafka.server.DynamicBrokerConfig.updateBrokerConfig(DynamicBrokerConfig.scala:299)
>       at 
> kafka.server.BrokerConfigHandler.processConfigChanges(ConfigHandler.scala:221)
>       at 
> kafka.server.metadata.BrokerMetadataPublisher.$anonfun$publish$15(BrokerMetadataPublisher.scala:212)
>       at java.base/java.util.HashMap$KeySet.forEach(HashMap.java:1008)
>       at 
> kafka.server.metadata.BrokerMetadataPublisher.$anonfun$publish$14(BrokerMetadataPublisher.scala:190)
>       at 
> kafka.server.metadata.BrokerMetadataPublisher.$anonfun$publish$14$adapted(BrokerMetadataPublisher.scala:189)
>       at scala.Option.foreach(Option.scala:437)
>       at 
> kafka.server.metadata.BrokerMetadataPublisher.publish(BrokerMetadataPublisher.scala:189)
>       at 
> kafka.server.metadata.BrokerMetadataListener.kafka$server$metadata$BrokerMetadataListener$$publish(BrokerMetadataListener.scala:293)
>       at 
> kafka.server.metadata.BrokerMetadataListener$StartPublishingEvent.run(BrokerMetadataListener.scala:258)
>       at 
> org.apache.kafka.queue.KafkaEventQueue$EventContext.run(KafkaEventQueue.java:121)
>       at 
> org.apache.kafka.queue.KafkaEventQueue$EventHandler.handleEvents(KafkaEventQueue.java:200)
>       at 
> org.apache.kafka.queue.KafkaEventQueue$EventHandler.run(KafkaEventQueue.java:173)
>       at java.base/java.lang.Thread.run(Thread.java:833)
> {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to