This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e1bc48b729e [SPARK-45704][BUILD] Fix compile warning - using symbols 
inherited from a superclass shadow symbols defined in an outer scope
e1bc48b729e is described below

commit e1bc48b729e40390a4b0f977eec4a9050c7cac77
Author: panbingkun <pbk1...@gmail.com>
AuthorDate: Tue Oct 31 22:02:39 2023 -0700

    [SPARK-45704][BUILD] Fix compile warning - using symbols inherited from a 
superclass shadow symbols defined in an outer scope
    
    ### What changes were proposed in this pull request?
    After upgrade to scala 2.13, when using symbols inherited from a superclass 
shadow symbols defined in an outer scope, the following warning will appear:
    ```
    [error] 
/Users/panbingkun/Developer/spark/spark-community/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala:1315:39:
 reference to child is ambiguous;
    [error] it is both defined in the enclosing method apply and inherited in 
the enclosing anonymous class as value child (defined in class IsNull)
    [error] In Scala 2, symbols inherited from a superclass shadow symbols 
defined in an outer scope.
    [error] Such references are ambiguous in Scala 3. To continue using the 
inherited symbol, write `this.child`.
    [error] Or use `-Wconf:msg=legacy-binding:s` to silence this warning. 
[quickfixable]
    [error] Applicable -Wconf / nowarn filters for this fatal warning: 
msg=<part of the message>, cat=other, 
site=org.apache.spark.sql.catalyst.expressions.IsUnknown.apply
    [error]       override def sql: String = s"(${child.sql} IS UNKNOWN)"
    [error]                                       ^
    ```
    The pr aims to fix it.
    
    ### Why are the changes needed?
    Prepare for upgrading to scala 3.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    - Pass GA
    - Manually test:
       ```
       build/sbt -Phadoop-3 -Pdocker-integration-tests -Pspark-ganglia-lgpl 
-Pkinesis-asl -Pkubernetes -Phive-thriftserver -Pconnect -Pyarn -Phive 
-Phadoop-cloud -Pvolcano -Pkubernetes-integration-tests Test/package 
streaming-kinesis-asl-assembly/assembly connect/assembly
       ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #43593 from panbingkun/SPARK-45704.
    
    Authored-by: panbingkun <pbk1...@gmail.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../spark/deploy/client/StandaloneAppClient.scala    |  6 +++---
 .../cluster/CoarseGrainedSchedulerBackend.scala      |  2 +-
 .../apache/spark/storage/DiskBlockObjectWriter.scala |  2 +-
 .../executor/CoarseGrainedExecutorBackendSuite.scala | 20 ++++++++++----------
 pom.xml                                              |  7 -------
 project/SparkBuild.scala                             |  5 -----
 .../spark/sql/catalyst/expressions/predicates.scala  |  4 ++--
 .../sql/connector/catalog/InMemoryBaseTable.scala    |  2 +-
 .../datasources/parquet/ParquetRowConverter.scala    | 18 +++++++++---------
 .../apache/spark/sql/execution/python/RowQueue.scala |  2 +-
 .../spark/sql/internal/BaseSessionStateBuilder.scala |  2 +-
 .../command/AlignAssignmentsSuiteBase.scala          |  2 +-
 .../sql/execution/command/PlanResolutionSuite.scala  |  2 +-
 .../spark/sql/hive/HiveSessionStateBuilder.scala     |  2 +-
 14 files changed, 32 insertions(+), 44 deletions(-)

diff --git 
a/core/src/main/scala/org/apache/spark/deploy/client/StandaloneAppClient.scala 
b/core/src/main/scala/org/apache/spark/deploy/client/StandaloneAppClient.scala
index a7e4c1fbab2..b0ee6018970 100644
--- 
a/core/src/main/scala/org/apache/spark/deploy/client/StandaloneAppClient.scala
+++ 
b/core/src/main/scala/org/apache/spark/deploy/client/StandaloneAppClient.scala
@@ -90,7 +90,7 @@ private[spark] class StandaloneAppClient(
         case e: Exception =>
           logWarning("Failed to connect to master", e)
           markDisconnected()
-          stop()
+          this.stop()
       }
     }
 
@@ -168,7 +168,7 @@ private[spark] class StandaloneAppClient(
 
       case ApplicationRemoved(message) =>
         markDead("Master removed our application: %s".format(message))
-        stop()
+        this.stop()
 
       case ExecutorAdded(id: Int, workerId: String, hostPort: String, cores: 
Int, memory: Int) =>
         val fullId = s"$appId/$id"
@@ -203,7 +203,7 @@ private[spark] class StandaloneAppClient(
         markDead("Application has been stopped.")
         sendToMaster(UnregisterApplication(appId.get))
         context.reply(true)
-        stop()
+        this.stop()
 
       case r: RequestExecutors =>
         master match {
diff --git 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
index c49b2411e76..e02dd279370 100644
--- 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
+++ 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
@@ -319,7 +319,7 @@ class CoarseGrainedSchedulerBackend(scheduler: 
TaskSchedulerImpl, val rpcEnv: Rp
 
       case StopDriver =>
         context.reply(true)
-        stop()
+        this.stop()
 
       case UpdateExecutorsLogLevel(logLevel) =>
         currentLogLevel = Some(logLevel)
diff --git 
a/core/src/main/scala/org/apache/spark/storage/DiskBlockObjectWriter.scala 
b/core/src/main/scala/org/apache/spark/storage/DiskBlockObjectWriter.scala
index f8bd73e6561..2096da2fca0 100644
--- a/core/src/main/scala/org/apache/spark/storage/DiskBlockObjectWriter.scala
+++ b/core/src/main/scala/org/apache/spark/storage/DiskBlockObjectWriter.scala
@@ -63,7 +63,7 @@ private[spark] class DiskBlockObjectWriter(
    */
   private trait ManualCloseOutputStream extends OutputStream {
     abstract override def close(): Unit = {
-      flush()
+      this.flush()
     }
 
     def manualClose(): Unit = {
diff --git 
a/core/src/test/scala/org/apache/spark/executor/CoarseGrainedExecutorBackendSuite.scala
 
b/core/src/test/scala/org/apache/spark/executor/CoarseGrainedExecutorBackendSuite.scala
index 3ef4da6d3d3..28af0656869 100644
--- 
a/core/src/test/scala/org/apache/spark/executor/CoarseGrainedExecutorBackendSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/executor/CoarseGrainedExecutorBackendSuite.scala
@@ -326,11 +326,11 @@ class CoarseGrainedExecutorBackendSuite extends 
SparkFunSuite
       def getFakeTaskRunner(taskDescription: TaskDescription): 
Executor#TaskRunner = {
         new executor.TaskRunner(backend, taskDescription, None) {
           override def run(): Unit = {
-            logInfo(s"task ${taskDescription.taskId} runs.")
+            logInfo(s"task ${this.taskDescription.taskId} runs.")
           }
 
           override def kill(interruptThread: Boolean, reason: String): Unit = {
-            logInfo(s"task ${taskDescription.taskId} killed.")
+            logInfo(s"task ${this.taskDescription.taskId} killed.")
           }
         }
       }
@@ -434,13 +434,13 @@ class CoarseGrainedExecutorBackendSuite extends 
SparkFunSuite
       def getFakeTaskRunner(taskDescription: TaskDescription): 
Executor#TaskRunner = {
         new executor.TaskRunner(backend, taskDescription, None) {
           override def run(): Unit = {
-            tasksExecuted.put(taskDescription.taskId, true)
-            logInfo(s"task ${taskDescription.taskId} runs.")
+            tasksExecuted.put(this.taskDescription.taskId, true)
+            logInfo(s"task ${this.taskDescription.taskId} runs.")
           }
 
           override def kill(interruptThread: Boolean, reason: String): Unit = {
-            logInfo(s"task ${taskDescription.taskId} killed.")
-            tasksKilled.put(taskDescription.taskId, true)
+            logInfo(s"task ${this.taskDescription.taskId} killed.")
+            tasksKilled.put(this.taskDescription.taskId, true)
           }
         }
       }
@@ -523,13 +523,13 @@ class CoarseGrainedExecutorBackendSuite extends 
SparkFunSuite
       def getFakeTaskRunner(taskDescription: TaskDescription): 
Executor#TaskRunner = {
         new executor.TaskRunner(backend, taskDescription, None) {
           override def run(): Unit = {
-            tasksExecuted.put(taskDescription.taskId, true)
-            logInfo(s"task ${taskDescription.taskId} runs.")
+            tasksExecuted.put(this.taskDescription.taskId, true)
+            logInfo(s"task ${this.taskDescription.taskId} runs.")
           }
 
           override def kill(interruptThread: Boolean, reason: String): Unit = {
-            logInfo(s"task ${taskDescription.taskId} killed.")
-            tasksKilled.put(taskDescription.taskId, true)
+            logInfo(s"task ${this.taskDescription.taskId} killed.")
+            tasksKilled.put(this.taskDescription.taskId, true)
           }
         }
       }
diff --git a/pom.xml b/pom.xml
index d545c743928..e29d81f6887 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2985,13 +2985,6 @@
                 SPARK-40497 Upgrade Scala to 2.13.11 and suppress `Implicit 
definition should have explicit type`
               -->
               <arg>-Wconf:msg=Implicit definition should have explicit 
type:s</arg>
-              <!--
-                SPARK-45331 Upgrade Scala to 2.13.12 and suppress "In Scala 2, 
symbols inherited
-                from a superclass shadow symbols defined in an outer scope. 
Such references are
-                ambiguous in Scala 3. To continue using the inherited symbol, 
write `this.stop`.
-                Or use `-Wconf:msg=legacy-binding:s` to silence this warning. 
[quickfixable]"
-              -->
-              <arg>-Wconf:msg=legacy-binding:s</arg>
               <!--
                 SPARK-45627 Symbol literals are deprecated in Scala 2.13 and 
it's a compile error in Scala 3.
               -->
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index d9d4a836ab5..d76af6a06cf 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -249,11 +249,6 @@ object SparkBuild extends PomBuild {
         "-Wconf:cat=deprecation&msg=procedure syntax is deprecated:e",
         // SPARK-40497 Upgrade Scala to 2.13.11 and suppress `Implicit 
definition should have explicit type`
         "-Wconf:msg=Implicit definition should have explicit type:s",
-        // SPARK-45331 Upgrade Scala to 2.13.12 and suppress "In Scala 2, 
symbols inherited
-        // from a superclass shadow symbols defined in an outer scope. Such 
references are
-        // ambiguous in Scala 3. To continue using the inherited symbol, write 
`this.stop`.
-        // Or use `-Wconf:msg=legacy-binding:s` to silence this warning. 
[quickfixable]"
-        "-Wconf:msg=legacy-binding:s",
         // SPARK-45627 Symbol literals are deprecated in Scala 2.13 and it's a 
compile error in Scala 3.
         "-Wconf:cat=deprecation&msg=symbol literal is deprecated:e",
         // SPARK-45627 `enum`, `export` and `given` will become keywords in 
Scala 3,
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
index 9eefcef8e17..761bd3f3358 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
@@ -1312,7 +1312,7 @@ object IsUnknown {
   def apply(child: Expression): Predicate = {
     new IsNull(child) with ExpectsInputTypes {
       override def inputTypes: Seq[DataType] = Seq(BooleanType)
-      override def sql: String = s"(${child.sql} IS UNKNOWN)"
+      override def sql: String = s"(${this.child.sql} IS UNKNOWN)"
     }
   }
 }
@@ -1321,7 +1321,7 @@ object IsNotUnknown {
   def apply(child: Expression): Predicate = {
     new IsNotNull(child) with ExpectsInputTypes {
       override def inputTypes: Seq[DataType] = Seq(BooleanType)
-      override def sql: String = s"(${child.sql} IS NOT UNKNOWN)"
+      override def sql: String = s"(${this.child.sql} IS NOT UNKNOWN)"
     }
   }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryBaseTable.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryBaseTable.scala
index 7765bc26741..cd7f7295d5c 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryBaseTable.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryBaseTable.scala
@@ -452,7 +452,7 @@ abstract class InMemoryBaseTable(
             val matchingKeys = values.map { value =>
               if (value != null) value.toString else null
             }.toSet
-            data = data.filter(partition => {
+            this.data = this.data.filter(partition => {
               val rows = partition.asInstanceOf[BufferedRows]
               rows.key match {
                 // null partitions are represented as Seq(null)
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
index 936339e091d..89c7cae175a 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
@@ -311,7 +311,7 @@ private[parquet] class ParquetRowConverter(
       case LongType if isUnsignedIntTypeMatched(32) =>
         new ParquetPrimitiveConverter(updater) {
           override def addInt(value: Int): Unit =
-            updater.setLong(Integer.toUnsignedLong(value))
+            this.updater.setLong(Integer.toUnsignedLong(value))
         }
       case BooleanType | IntegerType | LongType | FloatType | DoubleType | 
BinaryType |
         _: AnsiIntervalType =>
@@ -320,13 +320,13 @@ private[parquet] class ParquetRowConverter(
       case ByteType =>
         new ParquetPrimitiveConverter(updater) {
           override def addInt(value: Int): Unit =
-            updater.setByte(value.asInstanceOf[PhysicalByteType#InternalType])
+            
this.updater.setByte(value.asInstanceOf[PhysicalByteType#InternalType])
         }
 
       case ShortType =>
         new ParquetPrimitiveConverter(updater) {
           override def addInt(value: Int): Unit =
-            
updater.setShort(value.asInstanceOf[PhysicalShortType#InternalType])
+            
this.updater.setShort(value.asInstanceOf[PhysicalShortType#InternalType])
         }
 
       // For INT32 backed decimals
@@ -346,7 +346,7 @@ private[parquet] class ParquetRowConverter(
       case _: DecimalType if isUnsignedIntTypeMatched(64) =>
         new ParquetPrimitiveConverter(updater) {
           override def addLong(value: Long): Unit = {
-            updater.set(Decimal(java.lang.Long.toUnsignedString(value)))
+            this.updater.set(Decimal(java.lang.Long.toUnsignedString(value)))
           }
         }
 
@@ -391,7 +391,7 @@ private[parquet] class ParquetRowConverter(
              .asInstanceOf[TimestampLogicalTypeAnnotation].getUnit == 
TimeUnit.MICROS =>
         new ParquetPrimitiveConverter(updater) {
           override def addLong(value: Long): Unit = {
-            updater.setLong(timestampRebaseFunc(value))
+            this.updater.setLong(timestampRebaseFunc(value))
           }
         }
 
@@ -404,7 +404,7 @@ private[parquet] class ParquetRowConverter(
         new ParquetPrimitiveConverter(updater) {
           override def addLong(value: Long): Unit = {
             val micros = DateTimeUtils.millisToMicros(value)
-            updater.setLong(timestampRebaseFunc(micros))
+            this.updater.setLong(timestampRebaseFunc(micros))
           }
         }
 
@@ -417,7 +417,7 @@ private[parquet] class ParquetRowConverter(
             val gregorianMicros = int96RebaseFunc(julianMicros)
             val adjTime = 
convertTz.map(DateTimeUtils.convertTz(gregorianMicros, _, ZoneOffset.UTC))
               .getOrElse(gregorianMicros)
-            updater.setLong(adjTime)
+            this.updater.setLong(adjTime)
           }
         }
 
@@ -434,14 +434,14 @@ private[parquet] class ParquetRowConverter(
         new ParquetPrimitiveConverter(updater) {
           override def addLong(value: Long): Unit = {
             val micros = DateTimeUtils.millisToMicros(value)
-            updater.setLong(micros)
+            this.updater.setLong(micros)
           }
         }
 
       case DateType =>
         new ParquetPrimitiveConverter(updater) {
           override def addInt(value: Int): Unit = {
-            updater.set(dateRebaseFunc(value))
+            this.updater.set(dateRebaseFunc(value))
           }
         }
 
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/RowQueue.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/RowQueue.scala
index 0e3243eac62..5e0c5ff92fd 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/RowQueue.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/RowQueue.scala
@@ -233,7 +233,7 @@ private[python] case class HybridRowQueue(
     val buffer = if (page != null) {
       new InMemoryRowQueue(page, numFields) {
         override def close(): Unit = {
-          freePage(page)
+          freePage(this.page)
         }
       }
     } else {
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala
index 5543b409d17..1d496b027ef 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala
@@ -188,7 +188,7 @@ abstract class BaseSessionStateBuilder(
         new ResolveSQLOnFile(session) +:
         new FallBackFileSourceV2(session) +:
         ResolveEncodersInScalaAgg +:
-        new ResolveSessionCatalog(catalogManager) +:
+        new ResolveSessionCatalog(this.catalogManager) +:
         ResolveWriteToStream +:
         new EvalSubqueriesForTimeTravel +:
         customResolutionRules
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlignAssignmentsSuiteBase.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlignAssignmentsSuiteBase.scala
index 6f9cc66f247..2bc747c0abe 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlignAssignmentsSuiteBase.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlignAssignmentsSuiteBase.scala
@@ -191,7 +191,7 @@ abstract class AlignAssignmentsSuiteBase extends 
AnalysisTest {
   protected def parseAndResolve(query: String): LogicalPlan = {
     val analyzer = new CustomAnalyzer(catalogManager) {
       override val extendedResolutionRules: Seq[Rule[LogicalPlan]] = Seq(
-        new ResolveSessionCatalog(catalogManager))
+        new ResolveSessionCatalog(this.catalogManager))
     }
     val analyzed = analyzer.execute(CatalystSqlParser.parsePlan(query))
     analyzer.checkAnalysis(analyzed)
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
index 4eb65305de8..e39cc91d5f0 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
@@ -233,7 +233,7 @@ class PlanResolutionSuite extends AnalysisTest {
     }
     val analyzer = new Analyzer(catalogManager) {
       override val extendedResolutionRules: Seq[Rule[LogicalPlan]] = Seq(
-        new ResolveSessionCatalog(catalogManager))
+        new ResolveSessionCatalog(this.catalogManager))
     }
     // We don't check analysis here by default, as we expect the plan to be 
unresolved
     // such as `CreateTable`.
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionStateBuilder.scala
 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionStateBuilder.scala
index 2d0bcdff071..0b5e98d0a3e 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionStateBuilder.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionStateBuilder.scala
@@ -123,7 +123,7 @@ class HiveSessionStateBuilder(
    */
   override protected def planner: SparkPlanner = {
     new SparkPlanner(session, experimentalMethods) with HiveStrategies {
-      override val sparkSession: SparkSession = session
+      override val sparkSession: SparkSession = this.session
 
       override def extraPlanningStrategies: Seq[Strategy] =
         super.extraPlanningStrategies ++ customPlanningStrategies ++


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to