mridulm commented on code in PR #46634:
URL: https://github.com/apache/spark/pull/46634#discussion_r1610164692


##########
common/utils/src/main/java/org/apache/spark/internal/SparkLogger.java:
##########
@@ -28,6 +28,50 @@
 import org.slf4j.Logger;
 // checkstyle.on: RegexpSinglelineJava
 
+// checkstyle.off: RegexpSinglelineJava
+/**
+ * Guidelines for the Structured Logging Framework - Java Logging
+ * <p>
+ *
+ * Use the `org.apache.spark.internal.SparkLoggerFactory` to get the logger 
instance in Java code:
+ * Getting Logger Instance:
+ *   Instead of using `org.slf4j.LoggerFactory`, use 
`org.apache.spark.internal.SparkLoggerFactory`
+ *   to ensure structured logging.
+ * <p>
+ *
+ * import org.apache.spark.internal.SparkLogger;
+ * import org.apache.spark.internal.SparkLoggerFactory;
+ * private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(JavaUtils.class);
+ * <p>
+ *
+ * Logging Messages with Variables:
+ *   When logging messages with variables, wrap all the variables with `MDC`s 
and they will be
+ *   automatically added to the Mapped Diagnostic Context (MDC).
+ * <p>
+ *
+ * import org.apache.spark.internal.LogKeys;
+ * import org.apache.spark.internal.MDC;
+ * logger.error("Unable to delete file for partition {}", 
MDC.of(LogKeys.PARTITION_ID$.MODULE$, i));
+ * <p>
+ *
+ * Constant String Messages:
+ *   For logging constant string messages, use the standard logging methods.
+ * <p>
+ *
+ * logger.error("Failed to abort the writer after failing to write map 
output.", e);
+ * <p>
+ *
+ * External third-party ecosystem access:

Review Comment:
   ```suggestion
   ```



##########
common/utils/src/main/java/org/apache/spark/internal/SparkLogger.java:
##########
@@ -28,6 +28,50 @@
 import org.slf4j.Logger;
 // checkstyle.on: RegexpSinglelineJava
 
+// checkstyle.off: RegexpSinglelineJava
+/**
+ * Guidelines for the Structured Logging Framework - Java Logging
+ * <p>
+ *
+ * Use the `org.apache.spark.internal.SparkLoggerFactory` to get the logger 
instance in Java code:
+ * Getting Logger Instance:
+ *   Instead of using `org.slf4j.LoggerFactory`, use 
`org.apache.spark.internal.SparkLoggerFactory`
+ *   to ensure structured logging.
+ * <p>
+ *
+ * import org.apache.spark.internal.SparkLogger;
+ * import org.apache.spark.internal.SparkLoggerFactory;
+ * private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(JavaUtils.class);
+ * <p>
+ *
+ * Logging Messages with Variables:
+ *   When logging messages with variables, wrap all the variables with `MDC`s 
and they will be
+ *   automatically added to the Mapped Diagnostic Context (MDC).
+ * <p>
+ *
+ * import org.apache.spark.internal.LogKeys;
+ * import org.apache.spark.internal.MDC;
+ * logger.error("Unable to delete file for partition {}", 
MDC.of(LogKeys.PARTITION_ID$.MODULE$, i));
+ * <p>
+ *
+ * Constant String Messages:
+ *   For logging constant string messages, use the standard logging methods.
+ * <p>
+ *
+ * logger.error("Failed to abort the writer after failing to write map 
output.", e);
+ * <p>
+ *
+ * External third-party ecosystem access:
+ *   If you want to output logs in `java code` through the structured log 
framework,
+ *   you can define `custom LogKey` and use it in `java` code as follows:
+ * <p>
+ *
+ * // External third-party ecosystem `custom LogKey` must be `implements 
LogKey`

Review Comment:
   ```suggestion
    * // To add a `custom LogKey`, implement `LogKey`
   ```



##########
common/utils/src/test/java/org/apache/spark/util/SparkLoggerSuiteBase.java:
##########
@@ -90,6 +94,9 @@ private String basicMsg() {
   // test for external system custom LogKey
   abstract String expectedPatternForExternalSystemCustomLogKey(Level level);
 
+  // test for external system java custom LogKey
+  abstract String expectedPatternForExternalSystemJavaCustomLogKey(Level 
level);

Review Comment:
   ```suggestion
     abstract String expectedPatternForJavaCustomLogKey(Level level);
   ```



##########
common/utils/src/test/java/org/apache/spark/util/StructuredSparkLoggerSuite.java:
##########
@@ -161,5 +161,19 @@ String expectedPatternForExternalSystemCustomLogKey(Level 
level) {
         "logger": "<className>"
       }""");
   }
+
+  @Override
+  String expectedPatternForExternalSystemJavaCustomLogKey(Level level) {

Review Comment:
   ```suggestion
     String expectedPatternForJavaCustomLogKey(Level level) {
   ```



##########
common/utils/src/test/java/org/apache/spark/util/PatternSparkLoggerSuite.java:
##########
@@ -87,4 +87,10 @@ String expectedPatternForMsgWithMDCValueIsNull(Level level) {
   String expectedPatternForExternalSystemCustomLogKey(Level level) {
     return toRegexPattern(level, ".*<level> <className>: External system 
custom log message.\n");
   }
+
+  @Override
+  String expectedPatternForExternalSystemJavaCustomLogKey(Level level) {
+    return toRegexPattern(level,
+ ".*<level> <className>: External system custom log message.\n");

Review Comment:
   ```suggestion
    ".*<level> <className>: custom log message.\n");
   ```



##########
common/utils/src/test/java/org/apache/spark/util/SparkLoggerSuiteBase.java:
##########
@@ -69,6 +70,9 @@ private String basicMsg() {
   private final MDC externalSystemCustomLog =
     MDC.of(CustomLogKeys.CUSTOM_LOG_KEY$.MODULE$, "External system custom log 
message.");
 
+  private final MDC externalSystemJavaCustomLog =
+    MDC.of(JavaCustomLogKeys.CUSTOM_LOG_KEY, "External system custom log 
message.");

Review Comment:
   ```suggestion
     private final MDC javaCustomLogMdc =
       MDC.of(JavaCustomLogKeys.CUSTOM_LOG_KEY, "Custom log message.");
   ```
   



##########
common/utils/src/test/java/org/apache/spark/util/SparkLoggerSuiteBase.java:
##########
@@ -227,4 +232,30 @@ public void testLoggerWithExternalSystemCustomLogKey() {
       }
     });
   }
+
+  @Test
+  public void testLoggerWithExternalSystemJavaCustomLogKey() {

Review Comment:
   ```suggestion
     public void testLoggerWithJavaCustomLogKey() {
   ```



##########
common/utils/src/main/scala/org/apache/spark/internal/Logging.scala:
##########
@@ -29,6 +29,45 @@ import org.slf4j.{Logger, LoggerFactory}
 import org.apache.spark.internal.Logging.SparkShellLoggingFilter
 import org.apache.spark.util.SparkClassUtils
 
+/**
+ * Guidelines for the Structured Logging Framework - Scala Logging
+ * <p>
+ *
+ * Use the `org.apache.spark.internal.Logging` trait for logging in Scala code:
+ * Logging Messages with Variables:
+ *   When logging a message with variables, wrap all the variables with `MDC`s 
and they will be
+ *   automatically added to the Mapped Diagnostic Context (MDC).
+ * This allows for structured logging and better log analysis.
+ * <p>
+ *
+ * logInfo(log"Trying to recover app: ${MDC(LogKeys.APP_ID, app.id)}")
+ * <p>
+ *
+ * Constant String Messages:
+ *   If you are logging a constant string message, use the log methods that 
accept a constant
+ *   string.
+ * <p>
+ *
+ * logInfo("StateStore stopped")
+ * <p>
+ *
+ * Exceptions:
+ *   To ensure logs are compatible with Spark SQL and log analysis tools, avoid
+ *   `Exception.printStackTrace()`. Use `logError`, `logWarning`, and 
`logInfo` methods from
+ *   the `Logging` trait to log exceptions, maintaining structured and 
parsable logs.
+ * <p>
+ *
+ * External third-party ecosystem access:
+ *   If you want to output logs in `scala code` through the structured log 
framework,
+ *   you can define `custom LogKey` and use it in `scala` code as follows:
+ * <p>
+ *
+ * // External third-party ecosystem `custom LogKey` must be `extends LogKey`

Review Comment:
   ```suggestion
    * // To add a `custom LogKey`, implement `LogKey`
   ```



##########
common/utils/src/test/java/org/apache/spark/util/PatternSparkLoggerSuite.java:
##########
@@ -87,4 +87,10 @@ String expectedPatternForMsgWithMDCValueIsNull(Level level) {
   String expectedPatternForExternalSystemCustomLogKey(Level level) {
     return toRegexPattern(level, ".*<level> <className>: External system 
custom log message.\n");
   }
+
+  @Override
+  String expectedPatternForExternalSystemJavaCustomLogKey(Level level) {

Review Comment:
   ```suggestion
     String expectedPatternForJavaCustomLogKey(Level level) {
   ```



##########
common/utils/src/test/java/org/apache/spark/util/SparkLoggerSuiteBase.java:
##########
@@ -69,6 +70,9 @@ private String basicMsg() {
   private final MDC externalSystemCustomLog =
     MDC.of(CustomLogKeys.CUSTOM_LOG_KEY$.MODULE$, "External system custom log 
message.");
 

Review Comment:
   ```suggestion
     private final MDC customLog =
       MDC.of(CustomLogKeys.CUSTOM_LOG_KEY$.MODULE$, "custom log message.");
   
   ```



##########
common/utils/src/test/java/org/apache/spark/util/PatternSparkLoggerSuite.java:
##########
@@ -87,4 +87,10 @@ String expectedPatternForMsgWithMDCValueIsNull(Level level) {
   String expectedPatternForExternalSystemCustomLogKey(Level level) {
     return toRegexPattern(level, ".*<level> <className>: External system 
custom log message.\n");
   }

Review Comment:
   ```suggestion
     String expectedPatternForCustomLogKey(Level level) {
       return toRegexPattern(level, ".*<level> <className>: custom log 
message.\n");
     }
   ```



##########
common/utils/src/main/scala/org/apache/spark/internal/Logging.scala:
##########
@@ -29,6 +29,45 @@ import org.slf4j.{Logger, LoggerFactory}
 import org.apache.spark.internal.Logging.SparkShellLoggingFilter
 import org.apache.spark.util.SparkClassUtils
 
+/**
+ * Guidelines for the Structured Logging Framework - Scala Logging
+ * <p>
+ *
+ * Use the `org.apache.spark.internal.Logging` trait for logging in Scala code:
+ * Logging Messages with Variables:
+ *   When logging a message with variables, wrap all the variables with `MDC`s 
and they will be
+ *   automatically added to the Mapped Diagnostic Context (MDC).
+ * This allows for structured logging and better log analysis.
+ * <p>
+ *
+ * logInfo(log"Trying to recover app: ${MDC(LogKeys.APP_ID, app.id)}")
+ * <p>
+ *
+ * Constant String Messages:
+ *   If you are logging a constant string message, use the log methods that 
accept a constant
+ *   string.
+ * <p>
+ *
+ * logInfo("StateStore stopped")
+ * <p>
+ *
+ * Exceptions:
+ *   To ensure logs are compatible with Spark SQL and log analysis tools, avoid
+ *   `Exception.printStackTrace()`. Use `logError`, `logWarning`, and 
`logInfo` methods from
+ *   the `Logging` trait to log exceptions, maintaining structured and 
parsable logs.
+ * <p>
+ *
+ * External third-party ecosystem access:

Review Comment:
   ```suggestion
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to