gengliangwang commented on code in PR #47275: URL: https://github.com/apache/spark/pull/47275#discussion_r1671336647
########## core/src/main/scala/org/apache/spark/api/python/PythonRunner.scala: ########## @@ -131,19 +131,23 @@ private[spark] abstract class BasePythonRunner[IN, OUT]( private val daemonModule = conf.get(PYTHON_DAEMON_MODULE).map { value => logInfo( - s"Python daemon module in PySpark is set to [$value] in '${PYTHON_DAEMON_MODULE.key}', " + - "using this to start the daemon up. Note that this configuration only has an effect when " + - s"'${PYTHON_USE_DAEMON.key}' is enabled and the platform is not Windows.") + log"Python daemon module in PySpark is set to " + + log"[${MDC(LogKeys.VALUE, value)}] in '${MDC(LogKeys.PYTHON_DAEMON_MODULE, + PYTHON_DAEMON_MODULE.key)}', using this to start the daemon up. Note that this " + + log"configuration only has an effect when '${MDC(LogKeys.PYTHON_USE_DAEMON, + PYTHON_USE_DAEMON.key)}' is enabled and the platform is not Windows.") value }.getOrElse("pyspark.daemon") // This configuration indicates the module to run each Python worker. private val workerModule = conf.get(PYTHON_WORKER_MODULE).map { value => logInfo( - s"Python worker module in PySpark is set to [$value] in '${PYTHON_WORKER_MODULE.key}', " + - "using this to start the worker up. Note that this configuration only has an effect when " + - s"'${PYTHON_USE_DAEMON.key}' is disabled or the platform is Windows.") + log"Python worker module in PySpark is set to ${MDC(LogKeys.VALUE, value)} " + + log"in ${MDC(LogKeys.PYTHON_WORKER_MODULE, PYTHON_WORKER_MODULE.key)}, " + Review Comment: ```suggestion log"in ${MDC(LogKeys.CONFIG, PYTHON_WORKER_MODULE.key)}, " + ``` -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org