This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b7e480a43d6 [SPARK-46047][BUILD][SQL] Fix the usage of Scala 2.13 
deprecated APIs and make the usage of Scala 2.13 deprecated APIs as compilation 
error
b7e480a43d6 is described below

commit b7e480a43d6bc99d66d67a345f707a3560173810
Author: yangjie01 <yangji...@baidu.com>
AuthorDate: Wed Nov 22 17:25:20 2023 +0800

    [SPARK-46047][BUILD][SQL] Fix the usage of Scala 2.13 deprecated APIs and 
make the usage of Scala 2.13 deprecated APIs as compilation error
    
    ### What changes were proposed in this pull request?
    This pr fixes the remaining usage of Scala 2.13 deprecated APIs and 
converts the following compilation rule from `silent ` to a compilation `error` 
to avoid using the related Scala 2.13 deprecated APIs.
    
    ```
    
-Wconf:msg=^(?=.*?method|value|type|object|trait|inheritance)(?=.*?deprecated)(?=.*?since
 2.13).+$:s
    ```
    
    to
    
    ```
    
-Wconf:msg=^(?=.*?method|value|type|object|trait|inheritance)(?=.*?deprecated)(?=.*?since
 2.13).+$:e
    ```
    
    From now on, new cases will  become compilation errors, and developers need 
to avoid using them》
    
    ### Why are the changes needed?
    Avoid using Scala 2.13 deprecated APIs.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass GitHub Actions
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #43950 from LuciferYang/SPARK-46047.
    
    Authored-by: yangjie01 <yangji...@baidu.com>
    Signed-off-by: yangjie01 <yangji...@baidu.com>
---
 pom.xml                                                              | 2 +-
 project/SparkBuild.scala                                             | 2 +-
 .../apache/spark/sql/streaming/ui/StreamingQueryStatisticsPage.scala | 2 +-
 sql/core/src/test/scala/org/apache/spark/sql/VariantSuite.scala      | 5 +++--
 .../scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala    | 2 +-
 5 files changed, 7 insertions(+), 6 deletions(-)

diff --git a/pom.xml b/pom.xml
index fc653b6ffee..c65794a25c9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2977,7 +2977,7 @@
                 SPARK-33775 Suppress compilation warnings that contain the 
following contents.
                 TODO(SPARK-33805): Undo the corresponding deprecated usage 
suppression rule after fixed.
               -->
-              
<arg>-Wconf:msg=^(?=.*?method|value|type|object|trait|inheritance)(?=.*?deprecated)(?=.*?since
 2.13).+$:s</arg>
+              
<arg>-Wconf:msg=^(?=.*?method|value|type|object|trait|inheritance)(?=.*?deprecated)(?=.*?since
 2.13).+$:e</arg>
               <arg>-Wconf:msg=^(?=.*?Widening conversion from)(?=.*?is 
deprecated because it loses precision).+$:s</arg>
               <!-- SPARK-45610 Convert "Auto-application to `()` is 
deprecated" to compile error, as it will become a compile error in Scala 3. -->
               <arg>-Wconf:cat=deprecation&amp;msg=Auto-application to \`\(\)\` 
is deprecated:e</arg>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 74c0a975dca..5d2a9cfef98 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -235,7 +235,7 @@ object SparkBuild extends PomBuild {
         // SPARK-33775 Suppress compilation warnings that contain the 
following contents.
         // TODO(SPARK-33805): Undo the corresponding deprecated usage 
suppression rule after
         //  fixed.
-        
"-Wconf:msg=^(?=.*?method|value|type|object|trait|inheritance)(?=.*?deprecated)(?=.*?since
 2.13).+$:s",
+        
"-Wconf:msg=^(?=.*?method|value|type|object|trait|inheritance)(?=.*?deprecated)(?=.*?since
 2.13).+$:e",
         "-Wconf:msg=^(?=.*?Widening conversion from)(?=.*?is deprecated 
because it loses precision).+$:s",
         // SPARK-45610 Convert "Auto-application to `()` is deprecated" to 
compile error, as it will become a compile error in Scala 3.
         "-Wconf:cat=deprecation&msg=Auto-application to \\`\\(\\)\\` is 
deprecated:e",
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/streaming/ui/StreamingQueryStatisticsPage.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/streaming/ui/StreamingQueryStatisticsPage.scala
index d499f8f4a96..26cdbcab79f 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/streaming/ui/StreamingQueryStatisticsPage.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/streaming/ui/StreamingQueryStatisticsPage.scala
@@ -251,7 +251,7 @@ private[ui] class StreamingQueryStatisticsPage(parent: 
StreamingQueryTab)
         new GraphUIData(
           "aggregated-num-removed-state-rows-timeline",
           "aggregated-num-removed-state-rows-histogram",
-          numRowsRemovedData,
+          numRowsRemovedData.toImmutableArraySeq,
           minBatchTime,
           maxBatchTime,
           0,
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/VariantSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/VariantSuite.scala
index 58e0d7eeef3..35a1444f0e9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/VariantSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/VariantSuite.scala
@@ -24,6 +24,7 @@ import scala.util.Random
 import org.apache.spark.sql.test.SharedSparkSession
 import org.apache.spark.sql.types.StructType
 import org.apache.spark.unsafe.types.VariantVal
+import org.apache.spark.util.ArrayImplicits._
 
 class VariantSuite extends QueryTest with SharedSparkSession {
   test("basic tests") {
@@ -72,13 +73,13 @@ class VariantSuite extends QueryTest with 
SharedSparkSession {
     def prepareAnswer(values: Seq[VariantVal]): Seq[String] = {
       values.map(v => if (v == null) "null" else v.debugString()).sorted
     }
-    assert(prepareAnswer(input) == prepareAnswer(result))
+    assert(prepareAnswer(input) == prepareAnswer(result.toImmutableArraySeq))
 
     withTempDir { dir =>
       val tempDir = new File(dir, "files").getCanonicalPath
       df.write.parquet(tempDir)
       val readResult = 
spark.read.parquet(tempDir).collect().map(_.get(0).asInstanceOf[VariantVal])
-      assert(prepareAnswer(input) == prepareAnswer(readResult))
+      assert(prepareAnswer(input) == 
prepareAnswer(readResult.toImmutableArraySeq))
     }
   }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala
index a3804516017..4692c685c80 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala
@@ -893,7 +893,7 @@ class StreamingOuterJoinSuite extends StreamingJoinSuite {
     ("left_outer", Row(3, null, 5, null)),
     ("right_outer", Row(null, 2, null, 5))
   ).foreach { case (joinType: String, outerResult) =>
-    test(s"${joinType.replaceAllLiterally("_", " ")} with watermark range 
condition") {
+    test(s"${joinType.replace("_", " ")} with watermark range condition") {
       val (leftInput, rightInput, joined) = 
setupJoinWithRangeCondition(joinType)
 
       testStream(joined)(


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to