dtenedor commented on code in PR #45729: URL: https://github.com/apache/spark/pull/45729#discussion_r1540233859
########## common/utils/src/main/scala/org/apache/spark/internal/Logging.scala: ########## @@ -17,17 +17,39 @@ package org.apache.spark.internal +import java.util.Locale + import scala.jdk.CollectionConverters._ -import org.apache.logging.log4j.{Level, LogManager} +import org.apache.logging.log4j.{CloseableThreadContext, Level, LogManager} +import org.apache.logging.log4j.CloseableThreadContext.Instance import org.apache.logging.log4j.core.{Filter, LifeCycle, LogEvent, Logger => Log4jLogger, LoggerContext} import org.apache.logging.log4j.core.appender.ConsoleAppender import org.apache.logging.log4j.core.config.DefaultConfiguration import org.apache.logging.log4j.core.filter.AbstractFilter import org.slf4j.{Logger, LoggerFactory} +import org.apache.spark.SparkException import org.apache.spark.internal.Logging.SparkShellLoggingFilter -import org.apache.spark.util.SparkClassUtils +import org.apache.spark.util.{SparkClassUtils, SparkEnvUtils} + +// Mapped Diagnostic Context (MDC) that will be used in log messages. +// The values of the MDC will be inline in the log message, while the key-value pairs will be +// part of the ThreadContext. +case class MDC(key: LogKey.Value, value: String) Review Comment: This would probably be more readable fully spelled-out (`MappedDiagnosticContext`) rather than an acronym, no? ########## common/utils/src/main/scala/org/apache/spark/internal/Logging.scala: ########## @@ -55,6 +77,43 @@ trait Logging { log_ } + implicit class LogStringContext(val sc: StringContext) { + def log(args: Any*): (String, Option[Instance]) = { + val processedParts = sc.parts.iterator + val sb = new StringBuilder(processedParts.next()) + lazy val map = new java.util.HashMap[String, String]() + + args.foreach { arg => + arg match { Review Comment: you can skip this line and the `arg =>` on the end of the previous line and just list the cases directly instead ########## common/utils/src/main/scala/org/apache/spark/internal/Logging.scala: ########## @@ -17,17 +17,39 @@ package org.apache.spark.internal +import java.util.Locale + import scala.jdk.CollectionConverters._ -import org.apache.logging.log4j.{Level, LogManager} +import org.apache.logging.log4j.{CloseableThreadContext, Level, LogManager} +import org.apache.logging.log4j.CloseableThreadContext.Instance import org.apache.logging.log4j.core.{Filter, LifeCycle, LogEvent, Logger => Log4jLogger, LoggerContext} import org.apache.logging.log4j.core.appender.ConsoleAppender import org.apache.logging.log4j.core.config.DefaultConfiguration import org.apache.logging.log4j.core.filter.AbstractFilter import org.slf4j.{Logger, LoggerFactory} +import org.apache.spark.SparkException import org.apache.spark.internal.Logging.SparkShellLoggingFilter -import org.apache.spark.util.SparkClassUtils +import org.apache.spark.util.{SparkClassUtils, SparkEnvUtils} + +// Mapped Diagnostic Context (MDC) that will be used in log messages. +// The values of the MDC will be inline in the log message, while the key-value pairs will be +// part of the ThreadContext. +case class MDC(key: LogKey.Value, value: String) + +class LogEntry(entry: => (String, Option[Instance])) { Review Comment: this pair of `(String, Option[Instance])` might be more readable using a helper case class, since we return it frequently. ########## common/utils/src/main/scala/org/apache/spark/internal/LogKey.scala: ########## @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.internal + +object LogKey extends Enumeration { Review Comment: let's add a comment saying what this represents? ########## common/utils/src/test/scala/org/apache/spark/util/StructuredLoggingSuite.scala: ########## @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.util + +import java.io.{ByteArrayOutputStream, PrintStream} + +import org.apache.commons.io.output.TeeOutputStream +import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach} +import org.scalatest.funsuite.AnyFunSuite // scalastyle:ignore funsuite + +import org.apache.spark.internal.{Logging, MDC} +import org.apache.spark.internal.LogKey.EXECUTOR_ID + +abstract class LoggingSuiteBase + extends AnyFunSuite // scalastyle:ignore funsuite + with BeforeAndAfterAll + with BeforeAndAfterEach + with Logging { + protected val outContent = new ByteArrayOutputStream() + protected val originalErr = System.err + + override def beforeAll(): Unit = { + val teeStream = new TeeOutputStream(originalErr, outContent) + System.setErr(new PrintStream(teeStream)) + } + + override def afterAll(): Unit = { + System.setErr(originalErr) + } + + override def afterEach(): Unit = { + outContent.reset() + } +} + +class StructuredLoggingSuite extends LoggingSuiteBase { + val className = this.getClass.getName.stripSuffix("$") + override def beforeAll(): Unit = { + super.beforeAll() + Logging.enableStructuredLogging() + } + + test("Structured logging") { + val msg = "This is a log message" + logError(msg) + + val logOutput = outContent.toString.split("\n").filter(_.contains(msg)).head + assert(logOutput.nonEmpty) + // scalastyle:off line.size.limit + val pattern = s"""\\{"ts":"[^"]+","level":"ERROR","msg":"This is a log message","logger":"$className"}""".r Review Comment: Thanks for adding this test suite. It looks like it will become the main means of exercising that the structured logging framework will work as intended as we develop it. I know we want to regex out spurious text in the result here, but the set of expected test cases might be easier to read if we keep whitespace formatting in each expected result. Then when we compare expected results against actual results, we can strip whitespace from both. For example, this becomes something like: ``` val x = s""" |{ | "ts": [^"]+, | "level": "ERROR", | "msg": "This is a log message", | "logger": $className |} |""".stripMargin ``` -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org