This is an automated email from the ASF dual-hosted git repository. srowen pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 3faced8 [SPARK-37256][SQL] Replace `ScalaObjectMapper` with `ClassTagExtensions` to fix compilation warning 3faced8 is described below commit 3faced8b2ab922c2a3f25bdf393aa0511d87ccc8 Author: yangjie01 <yangji...@baidu.com> AuthorDate: Tue Jan 11 09:25:49 2022 -0600 [SPARK-37256][SQL] Replace `ScalaObjectMapper` with `ClassTagExtensions` to fix compilation warning ### What changes were proposed in this pull request? There are some compilation warning log like follows: ``` [WARNING] [Warn] /spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/RebaseDateTime.scala:268: [deprecation org.apache.spark.sql.catalyst.util.RebaseDateTime.loadRebaseRecords.mapper.$anon | origin=com.fasterxml.jackson.module.scala.ScalaObjectMapper | version=2.12.1] trait ScalaObjectMapper in package scala is deprecated (since 2.12.1): ScalaObjectMapper is deprecated because Manifests are not supported in Scala3 ``` Refer to the recommendations of `jackson-module-scala`, this PR use `ClassTagExtensions` instead of `ScalaObjectMapper` to fix this compilation warning ### Why are the changes needed? Fix compilation warning ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Pass the Jenkins or GitHub Action Closes #34532 from LuciferYang/fix-ScalaObjectMapper. Authored-by: yangjie01 <yangji...@baidu.com> Signed-off-by: Sean Owen <sro...@gmail.com> --- .../scala/org/apache/spark/sql/catalyst/util/RebaseDateTime.scala | 4 ++-- .../org/apache/spark/sql/catalyst/util/RebaseDateTimeSuite.scala | 4 ++-- .../spark/sql/execution/streaming/state/RocksDBFileManager.scala | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/RebaseDateTime.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/RebaseDateTime.scala index 72bb43b..dc1c4db 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/RebaseDateTime.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/RebaseDateTime.scala @@ -25,7 +25,7 @@ import java.util.Calendar.{DAY_OF_MONTH, DST_OFFSET, ERA, HOUR_OF_DAY, MINUTE, M import scala.collection.mutable.AnyRefMap import com.fasterxml.jackson.databind.ObjectMapper -import com.fasterxml.jackson.module.scala.{DefaultScalaModule, ScalaObjectMapper} +import com.fasterxml.jackson.module.scala.{ClassTagExtensions, DefaultScalaModule} import org.apache.spark.sql.catalyst.util.DateTimeConstants._ import org.apache.spark.sql.catalyst.util.DateTimeUtils._ @@ -273,7 +273,7 @@ object RebaseDateTime { // it is 2 times faster in DateTimeRebaseBenchmark. private[sql] def loadRebaseRecords(fileName: String): AnyRefMap[String, RebaseInfo] = { val file = Utils.getSparkClassLoader.getResource(fileName) - val mapper = new ObjectMapper() with ScalaObjectMapper + val mapper = new ObjectMapper() with ClassTagExtensions mapper.registerModule(DefaultScalaModule) val jsonRebaseRecords = mapper.readValue[Seq[JsonRebaseRecord]](file) val anyRefMap = new AnyRefMap[String, RebaseInfo]((3 * jsonRebaseRecords.size) / 2) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/RebaseDateTimeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/RebaseDateTimeSuite.scala index 428a0c0..0d3f681c 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/RebaseDateTimeSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/RebaseDateTimeSuite.scala @@ -252,7 +252,7 @@ class RebaseDateTimeSuite extends SparkFunSuite with Matchers with SQLHelper { import scala.collection.mutable.ArrayBuffer import com.fasterxml.jackson.databind.ObjectMapper - import com.fasterxml.jackson.module.scala.{DefaultScalaModule, ScalaObjectMapper} + import com.fasterxml.jackson.module.scala.{ClassTagExtensions, DefaultScalaModule} case class RebaseRecord(tz: String, switches: Array[Long], diffs: Array[Long]) val rebaseRecords = ThreadUtils.parmap(ALL_TIMEZONES, "JSON-rebase-gen", 16) { zid => @@ -296,7 +296,7 @@ class RebaseDateTimeSuite extends SparkFunSuite with Matchers with SQLHelper { } val result = new ArrayBuffer[RebaseRecord]() rebaseRecords.sortBy(_.tz).foreach(result.append(_)) - val mapper = (new ObjectMapper() with ScalaObjectMapper) + val mapper = (new ObjectMapper() with ClassTagExtensions) .registerModule(DefaultScalaModule) .writerWithDefaultPrettyPrinter() mapper.writeValue( diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/RocksDBFileManager.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/RocksDBFileManager.scala index 23cdbd0..4f2ce9b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/RocksDBFileManager.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/RocksDBFileManager.scala @@ -30,7 +30,7 @@ import scala.collection.mutable import com.fasterxml.jackson.annotation.JsonInclude.Include import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} import com.fasterxml.jackson.databind.annotation.JsonDeserialize -import com.fasterxml.jackson.module.scala.{DefaultScalaModule, ScalaObjectMapper} +import com.fasterxml.jackson.module.scala.{ClassTagExtensions, DefaultScalaModule} import org.apache.commons.io.{FilenameUtils, IOUtils} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{Path, PathFilter} @@ -572,7 +572,7 @@ object RocksDBCheckpointMetadata { /** Used to convert between classes and JSON. */ lazy val mapper = { - val _mapper = new ObjectMapper with ScalaObjectMapper + val _mapper = new ObjectMapper with ClassTagExtensions _mapper.setSerializationInclusion(Include.NON_ABSENT) _mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) _mapper.registerModule(DefaultScalaModule) --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org