This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 3cc55f6  [SPARK-29392][CORE][SQL][FOLLOWUP] More removal of 'foo 
Symbol syntax for Scala 2.13
3cc55f6 is described below

commit 3cc55f6a0a560782f6e20296ac716ef68a412d26
Author: Sean Owen <sro...@gmail.com>
AuthorDate: Tue Dec 10 19:41:24 2019 -0800

    [SPARK-29392][CORE][SQL][FOLLOWUP] More removal of 'foo Symbol syntax for 
Scala 2.13
    
    ### What changes were proposed in this pull request?
    
    Another continuation of https://github.com/apache/spark/pull/26748
    
    ### Why are the changes needed?
    
    To cleanly cross compile with Scala 2.13.
    
    ### Does this PR introduce any user-facing change?
    
    None.
    
    ### How was this patch tested?
    
    Existing tests
    
    Closes #26842 from srowen/SPARK-29392.4.
    
    Authored-by: Sean Owen <sro...@gmail.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../spark/sql/catalyst/analysis/DSLHintSuite.scala |   6 +-
 .../analysis/ExpressionTypeCheckingSuite.scala     | 193 +++++++++++----------
 .../spark/sql/DataFrameWindowFramesSuite.scala     |   4 +-
 .../spark/sql/DataFrameWindowFunctionsSuite.scala  |  22 +--
 .../org/apache/spark/sql/DatasetCacheSuite.scala   |  18 +-
 .../scala/org/apache/spark/sql/DatasetSuite.scala  |  14 +-
 .../spark/sql/DynamicPartitionPruningSuite.scala   |   5 +-
 .../apache/spark/sql/GeneratorFunctionSuite.scala  |  37 ++--
 .../scala/org/apache/spark/sql/JoinHintSuite.scala |  22 +--
 9 files changed, 168 insertions(+), 153 deletions(-)

diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DSLHintSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DSLHintSuite.scala
index 388eb23..c316e04 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DSLHintSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DSLHintSuite.scala
@@ -22,9 +22,9 @@ import org.apache.spark.sql.catalyst.dsl.plans._
 import org.apache.spark.sql.catalyst.plans.logical._
 
 class DSLHintSuite extends AnalysisTest {
-  lazy val a = 'a.int
-  lazy val b = 'b.string
-  lazy val c = 'c.string
+  lazy val a = Symbol("a").int
+  lazy val b = Symbol("b").string
+  lazy val c = Symbol("c").string
   lazy val r1 = LocalRelation(a, b, c)
 
   test("various hint parameters") {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
index c83759e..f944b4a 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
@@ -29,12 +29,12 @@ import org.apache.spark.sql.types._
 class ExpressionTypeCheckingSuite extends SparkFunSuite {
 
   val testRelation = LocalRelation(
-    'intField.int,
-    'stringField.string,
-    'booleanField.boolean,
-    'decimalField.decimal(8, 0),
-    'arrayField.array(StringType),
-    'mapField.map(StringType, LongType))
+    Symbol("intField").int,
+    Symbol("stringField").string,
+    Symbol("booleanField").boolean,
+    Symbol("decimalField").decimal(8, 0),
+    Symbol("arrayField").array(StringType),
+    Symbol("mapField").map(StringType, LongType))
 
   def assertError(expr: Expression, errorMessage: String): Unit = {
     val e = intercept[AnalysisException] {
@@ -56,83 +56,92 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite {
   }
 
   test("check types for unary arithmetic") {
-    assertError(BitwiseNot('stringField), "requires integral type")
+    assertError(BitwiseNot(Symbol("stringField")), "requires integral type")
   }
 
   test("check types for binary arithmetic") {
     // We will cast String to Double for binary arithmetic
-    assertSuccess(Add('intField, 'stringField))
-    assertSuccess(Subtract('intField, 'stringField))
-    assertSuccess(Multiply('intField, 'stringField))
-    assertSuccess(Divide('intField, 'stringField))
-    assertSuccess(Remainder('intField, 'stringField))
-    // checkAnalysis(BitwiseAnd('intField, 'stringField))
-
-    assertErrorForDifferingTypes(Add('intField, 'booleanField))
-    assertErrorForDifferingTypes(Subtract('intField, 'booleanField))
-    assertErrorForDifferingTypes(Multiply('intField, 'booleanField))
-    assertErrorForDifferingTypes(Divide('intField, 'booleanField))
-    assertErrorForDifferingTypes(Remainder('intField, 'booleanField))
-    assertErrorForDifferingTypes(BitwiseAnd('intField, 'booleanField))
-    assertErrorForDifferingTypes(BitwiseOr('intField, 'booleanField))
-    assertErrorForDifferingTypes(BitwiseXor('intField, 'booleanField))
-
-    assertError(Add('booleanField, 'booleanField), "requires (numeric or 
interval) type")
-    assertError(Subtract('booleanField, 'booleanField),
+    assertSuccess(Add(Symbol("intField"), Symbol("stringField")))
+    assertSuccess(Subtract(Symbol("intField"), Symbol("stringField")))
+    assertSuccess(Multiply(Symbol("intField"), Symbol("stringField")))
+    assertSuccess(Divide(Symbol("intField"), Symbol("stringField")))
+    assertSuccess(Remainder(Symbol("intField"), Symbol("stringField")))
+    // checkAnalysis(BitwiseAnd(Symbol("intField"), Symbol("stringField")))
+
+    assertErrorForDifferingTypes(Add(Symbol("intField"), 
Symbol("booleanField")))
+    assertErrorForDifferingTypes(Subtract(Symbol("intField"), 
Symbol("booleanField")))
+    assertErrorForDifferingTypes(Multiply(Symbol("intField"), 
Symbol("booleanField")))
+    assertErrorForDifferingTypes(Divide(Symbol("intField"), 
Symbol("booleanField")))
+    assertErrorForDifferingTypes(Remainder(Symbol("intField"), 
Symbol("booleanField")))
+    assertErrorForDifferingTypes(BitwiseAnd(Symbol("intField"), 
Symbol("booleanField")))
+    assertErrorForDifferingTypes(BitwiseOr(Symbol("intField"), 
Symbol("booleanField")))
+    assertErrorForDifferingTypes(BitwiseXor(Symbol("intField"), 
Symbol("booleanField")))
+
+    assertError(Add(Symbol("booleanField"), Symbol("booleanField")),
       "requires (numeric or interval) type")
-    assertError(Multiply('booleanField, 'booleanField), "requires numeric 
type")
-    assertError(Divide('booleanField, 'booleanField), "requires (double or 
decimal) type")
-    assertError(Remainder('booleanField, 'booleanField), "requires numeric 
type")
-
-    assertError(BitwiseAnd('booleanField, 'booleanField), "requires integral 
type")
-    assertError(BitwiseOr('booleanField, 'booleanField), "requires integral 
type")
-    assertError(BitwiseXor('booleanField, 'booleanField), "requires integral 
type")
+    assertError(Subtract(Symbol("booleanField"), Symbol("booleanField")),
+      "requires (numeric or interval) type")
+    assertError(Multiply(Symbol("booleanField"), Symbol("booleanField")), 
"requires numeric type")
+    assertError(Divide(Symbol("booleanField"), Symbol("booleanField")),
+      "requires (double or decimal) type")
+    assertError(Remainder(Symbol("booleanField"), Symbol("booleanField")), 
"requires numeric type")
+
+    assertError(BitwiseAnd(Symbol("booleanField"), Symbol("booleanField")),
+      "requires integral type")
+    assertError(BitwiseOr(Symbol("booleanField"), Symbol("booleanField")), 
"requires integral type")
+    assertError(BitwiseXor(Symbol("booleanField"), Symbol("booleanField")),
+      "requires integral type")
   }
 
   test("check types for predicates") {
     // We will cast String to Double for binary comparison
-    assertSuccess(EqualTo('intField, 'stringField))
-    assertSuccess(EqualNullSafe('intField, 'stringField))
-    assertSuccess(LessThan('intField, 'stringField))
-    assertSuccess(LessThanOrEqual('intField, 'stringField))
-    assertSuccess(GreaterThan('intField, 'stringField))
-    assertSuccess(GreaterThanOrEqual('intField, 'stringField))
+    assertSuccess(EqualTo(Symbol("intField"), Symbol("stringField")))
+    assertSuccess(EqualNullSafe(Symbol("intField"), Symbol("stringField")))
+    assertSuccess(LessThan(Symbol("intField"), Symbol("stringField")))
+    assertSuccess(LessThanOrEqual(Symbol("intField"), Symbol("stringField")))
+    assertSuccess(GreaterThan(Symbol("intField"), Symbol("stringField")))
+    assertSuccess(GreaterThanOrEqual(Symbol("intField"), 
Symbol("stringField")))
 
     // We will transform EqualTo with numeric and boolean types to CaseKeyWhen
-    assertSuccess(EqualTo('intField, 'booleanField))
-    assertSuccess(EqualNullSafe('intField, 'booleanField))
-
-    assertErrorForDifferingTypes(EqualTo('intField, 'mapField))
-    assertErrorForDifferingTypes(EqualNullSafe('intField, 'mapField))
-    assertErrorForDifferingTypes(LessThan('intField, 'booleanField))
-    assertErrorForDifferingTypes(LessThanOrEqual('intField, 'booleanField))
-    assertErrorForDifferingTypes(GreaterThan('intField, 'booleanField))
-    assertErrorForDifferingTypes(GreaterThanOrEqual('intField, 'booleanField))
-
-    assertError(EqualTo('mapField, 'mapField), "EqualTo does not support 
ordering on type map")
-    assertError(EqualNullSafe('mapField, 'mapField),
+    assertSuccess(EqualTo(Symbol("intField"), Symbol("booleanField")))
+    assertSuccess(EqualNullSafe(Symbol("intField"), Symbol("booleanField")))
+
+    assertErrorForDifferingTypes(EqualTo(Symbol("intField"), 
Symbol("mapField")))
+    assertErrorForDifferingTypes(EqualNullSafe(Symbol("intField"), 
Symbol("mapField")))
+    assertErrorForDifferingTypes(LessThan(Symbol("intField"), 
Symbol("booleanField")))
+    assertErrorForDifferingTypes(LessThanOrEqual(Symbol("intField"), 
Symbol("booleanField")))
+    assertErrorForDifferingTypes(GreaterThan(Symbol("intField"), 
Symbol("booleanField")))
+    assertErrorForDifferingTypes(GreaterThanOrEqual(Symbol("intField"), 
Symbol("booleanField")))
+
+    assertError(EqualTo(Symbol("mapField"), Symbol("mapField")),
+      "EqualTo does not support ordering on type map")
+    assertError(EqualNullSafe(Symbol("mapField"), Symbol("mapField")),
       "EqualNullSafe does not support ordering on type map")
-    assertError(LessThan('mapField, 'mapField),
+    assertError(LessThan(Symbol("mapField"), Symbol("mapField")),
       "LessThan does not support ordering on type map")
-    assertError(LessThanOrEqual('mapField, 'mapField),
+    assertError(LessThanOrEqual(Symbol("mapField"), Symbol("mapField")),
       "LessThanOrEqual does not support ordering on type map")
-    assertError(GreaterThan('mapField, 'mapField),
+    assertError(GreaterThan(Symbol("mapField"), Symbol("mapField")),
       "GreaterThan does not support ordering on type map")
-    assertError(GreaterThanOrEqual('mapField, 'mapField),
+    assertError(GreaterThanOrEqual(Symbol("mapField"), Symbol("mapField")),
       "GreaterThanOrEqual does not support ordering on type map")
 
-    assertError(If('intField, 'stringField, 'stringField),
+    assertError(If(Symbol("intField"), Symbol("stringField"), 
Symbol("stringField")),
       "type of predicate expression in If should be boolean")
-    assertErrorForDifferingTypes(If('booleanField, 'intField, 'booleanField))
+    assertErrorForDifferingTypes(
+      If(Symbol("booleanField"), Symbol("intField"), Symbol("booleanField")))
 
     assertError(
-      CaseWhen(Seq(('booleanField.attr, 'intField.attr), ('booleanField.attr, 
'mapField.attr))),
+      CaseWhen(Seq((Symbol("booleanField").attr, Symbol("intField").attr),
+        (Symbol("booleanField").attr, Symbol("mapField").attr))),
       "THEN and ELSE expressions should all be same type or coercible to a 
common type")
     assertError(
-      CaseKeyWhen('intField, Seq('intField, 'stringField, 'intField, 
'mapField)),
+      CaseKeyWhen(Symbol("intField"), Seq(Symbol("intField"), 
Symbol("stringField"),
+        Symbol("intField"), Symbol("mapField"))),
       "THEN and ELSE expressions should all be same type or coercible to a 
common type")
     assertError(
-      CaseWhen(Seq(('booleanField.attr, 'intField.attr), ('intField.attr, 
'intField.attr))),
+      CaseWhen(Seq((Symbol("booleanField").attr, Symbol("intField").attr),
+        (Symbol("intField").attr, Symbol("intField").attr))),
       "WHEN expressions in CaseWhen should all be boolean type")
   }
 
@@ -141,29 +150,29 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite {
     // instead of from AggregateExpression, which is the wrapper of an 
AggregateFunction.
 
     // We will cast String to Double for sum and average
-    assertSuccess(Sum('stringField))
-    assertSuccess(Average('stringField))
-    assertSuccess(Min('arrayField))
-    assertSuccess(new BoolAnd("bool_and", 'booleanField))
-    assertSuccess(new BoolOr("bool_or", 'booleanField))
-
-    assertError(Min('mapField), "min does not support ordering on type")
-    assertError(Max('mapField), "max does not support ordering on type")
-    assertError(Sum('booleanField), "requires (numeric or interval) type")
-    assertError(Average('booleanField), "requires (numeric or interval) type")
+    assertSuccess(Sum(Symbol("stringField")))
+    assertSuccess(Average(Symbol("stringField")))
+    assertSuccess(Min(Symbol("arrayField")))
+    assertSuccess(new BoolAnd("bool_and", Symbol("booleanField")))
+    assertSuccess(new BoolOr("bool_or", Symbol("booleanField")))
+
+    assertError(Min(Symbol("mapField")), "min does not support ordering on 
type")
+    assertError(Max(Symbol("mapField")), "max does not support ordering on 
type")
+    assertError(Sum(Symbol("booleanField")), "requires (numeric or interval) 
type")
+    assertError(Average(Symbol("booleanField")), "requires (numeric or 
interval) type")
   }
 
   test("check types for others") {
-    assertError(CreateArray(Seq('intField, 'booleanField)),
+    assertError(CreateArray(Seq(Symbol("intField"), Symbol("booleanField"))),
       "input to function array should all be the same type")
-    assertError(Coalesce(Seq('intField, 'booleanField)),
+    assertError(Coalesce(Seq(Symbol("intField"), Symbol("booleanField"))),
       "input to function coalesce should all be the same type")
     assertError(Coalesce(Nil), "function coalesce requires at least one 
argument")
     assertError(new Murmur3Hash(Nil), "function hash requires at least one 
argument")
     assertError(new XxHash64(Nil), "function xxhash64 requires at least one 
argument")
-    assertError(Explode('intField),
+    assertError(Explode(Symbol("intField")),
       "input to function explode should be array or map type")
-    assertError(PosExplode('intField),
+    assertError(PosExplode(Symbol("intField")),
       "input to function explode should be array or map type")
   }
 
@@ -174,7 +183,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite {
       CreateNamedStruct(Seq(1, "a", "b", 2.0)),
       "Only foldable string expressions are allowed to appear at odd position")
     assertError(
-      CreateNamedStruct(Seq('a.string.at(0), "a", "b", 2.0)),
+      CreateNamedStruct(Seq(Symbol("a").string.at(0), "a", "b", 2.0)),
       "Only foldable string expressions are allowed to appear at odd position")
     assertError(
       CreateNamedStruct(Seq(Literal.create(null, StringType), "a")),
@@ -184,36 +193,42 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite {
   test("check types for CreateMap") {
     assertError(CreateMap(Seq("a", "b", 2.0)), "even number of arguments")
     assertError(
-      CreateMap(Seq('intField, 'stringField, 'booleanField, 'stringField)),
+      CreateMap(Seq(Symbol("intField"), Symbol("stringField"),
+        Symbol("booleanField"), Symbol("stringField"))),
       "keys of function map should all be the same type")
     assertError(
-      CreateMap(Seq('stringField, 'intField, 'stringField, 'booleanField)),
+      CreateMap(Seq(Symbol("stringField"), Symbol("intField"),
+        Symbol("stringField"), Symbol("booleanField"))),
       "values of function map should all be the same type")
   }
 
   test("check types for ROUND/BROUND") {
     assertSuccess(Round(Literal(null), Literal(null)))
-    assertSuccess(Round('intField, Literal(1)))
+    assertSuccess(Round(Symbol("intField"), Literal(1)))
 
-    assertError(Round('intField, 'intField), "Only foldable Expression is 
allowed")
-    assertError(Round('intField, 'booleanField), "requires int type")
-    assertError(Round('intField, 'mapField), "requires int type")
-    assertError(Round('booleanField, 'intField), "requires numeric type")
+    assertError(Round(Symbol("intField"), Symbol("intField")),
+      "Only foldable Expression is allowed")
+    assertError(Round(Symbol("intField"), Symbol("booleanField")), "requires 
int type")
+    assertError(Round(Symbol("intField"), Symbol("mapField")), "requires int 
type")
+    assertError(Round(Symbol("booleanField"), Symbol("intField")), "requires 
numeric type")
 
     assertSuccess(BRound(Literal(null), Literal(null)))
-    assertSuccess(BRound('intField, Literal(1)))
+    assertSuccess(BRound(Symbol("intField"), Literal(1)))
 
-    assertError(BRound('intField, 'intField), "Only foldable Expression is 
allowed")
-    assertError(BRound('intField, 'booleanField), "requires int type")
-    assertError(BRound('intField, 'mapField), "requires int type")
-    assertError(BRound('booleanField, 'intField), "requires numeric type")
+    assertError(BRound(Symbol("intField"), Symbol("intField")),
+      "Only foldable Expression is allowed")
+    assertError(BRound(Symbol("intField"), Symbol("booleanField")), "requires 
int type")
+    assertError(BRound(Symbol("intField"), Symbol("mapField")), "requires int 
type")
+    assertError(BRound(Symbol("booleanField"), Symbol("intField")), "requires 
numeric type")
   }
 
   test("check types for Greatest/Least") {
     for (operator <- Seq[(Seq[Expression] => Expression)](Greatest, Least)) {
-      assertError(operator(Seq('booleanField)), "requires at least two 
arguments")
-      assertError(operator(Seq('intField, 'stringField)), "should all have the 
same type")
-      assertError(operator(Seq('mapField, 'mapField)), "does not support 
ordering")
+      assertError(operator(Seq(Symbol("booleanField"))), "requires at least 
two arguments")
+      assertError(operator(Seq(Symbol("intField"), Symbol("stringField"))),
+        "should all have the same type")
+      assertError(operator(Seq(Symbol("mapField"), Symbol("mapField"))),
+        "does not support ordering")
     }
   }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFramesSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFramesSuite.scala
index fbd3999..8c99829 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFramesSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFramesSuite.scala
@@ -111,7 +111,7 @@ class DataFrameWindowFramesSuite extends QueryTest with 
SharedSparkSession {
 
     checkAnswer(
       df.select(
-        'key,
+        $"key",
         first("value").over(
           window.rowsBetween(Window.unboundedPreceding, 
Window.unboundedFollowing)),
         first("value").over(
@@ -226,7 +226,7 @@ class DataFrameWindowFramesSuite extends QueryTest with 
SharedSparkSession {
 
     checkAnswer(
       df.select(
-        'key,
+        $"key",
         sum("value").over(window.
           rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)),
         sum("value").over(window.
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala
index 835630b..696b056 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala
@@ -633,20 +633,20 @@ class DataFrameWindowFunctionsSuite extends QueryTest 
with SharedSparkSession {
       assert(thrownException.message.contains("window functions inside WHERE 
and HAVING clauses"))
     }
 
-    
checkAnalysisError(testData2.select('a).where(rank().over(Window.orderBy('b)) 
=== 1))
-    checkAnalysisError(testData2.where('b === 2 && 
rank().over(Window.orderBy('b)) === 1))
+    
checkAnalysisError(testData2.select("a").where(rank().over(Window.orderBy($"b"))
 === 1))
+    checkAnalysisError(testData2.where($"b" === 2 && 
rank().over(Window.orderBy($"b")) === 1))
     checkAnalysisError(
-      testData2.groupBy('a)
-        .agg(avg('b).as("avgb"))
-        .where('a > 'avgb && rank().over(Window.orderBy('a)) === 1))
+      testData2.groupBy($"a")
+        .agg(avg($"b").as("avgb"))
+        .where($"a" > $"avgb" && rank().over(Window.orderBy($"a")) === 1))
     checkAnalysisError(
-      testData2.groupBy('a)
-        .agg(max('b).as("maxb"), sum('b).as("sumb"))
-        .where(rank().over(Window.orderBy('a)) === 1))
+      testData2.groupBy($"a")
+        .agg(max($"b").as("maxb"), sum($"b").as("sumb"))
+        .where(rank().over(Window.orderBy($"a")) === 1))
     checkAnalysisError(
-      testData2.groupBy('a)
-        .agg(max('b).as("maxb"), sum('b).as("sumb"))
-        .where('sumb === 5 && rank().over(Window.orderBy('a)) === 1))
+      testData2.groupBy($"a")
+        .agg(max($"b").as("maxb"), sum($"b").as("sumb"))
+        .where($"sumb" === 5 && rank().over(Window.orderBy($"a")) === 1))
 
     checkAnalysisError(sql("SELECT a FROM testData2 WHERE RANK() OVER(ORDER BY 
b) = 1"))
     checkAnalysisError(sql("SELECT * FROM testData2 WHERE b = 2 AND RANK() 
OVER(ORDER BY b) = 1"))
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala
index 33d9def..5dba9aa 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala
@@ -158,8 +158,8 @@ class DatasetCacheSuite extends QueryTest with 
SharedSparkSession with TimeLimit
 
   test("SPARK-24596 Non-cascading Cache Invalidation") {
     val df = Seq(("a", 1), ("b", 2)).toDF("s", "i")
-    val df2 = df.filter('i > 1)
-    val df3 = df.filter('i < 2)
+    val df2 = df.filter($"i" > 1)
+    val df3 = df.filter($"i" < 2)
 
     df2.cache()
     df.cache()
@@ -178,8 +178,8 @@ class DatasetCacheSuite extends QueryTest with 
SharedSparkSession with TimeLimit
     val expensiveUDF = udf({ x: Int => Thread.sleep(5000); x })
     val df = spark.range(0, 5).toDF("a")
     val df1 = df.withColumn("b", expensiveUDF($"a"))
-    val df2 = df1.groupBy('a).agg(sum('b))
-    val df3 = df.agg(sum('a))
+    val df2 = df1.groupBy($"a").agg(sum($"b"))
+    val df3 = df.agg(sum($"a"))
 
     df1.cache()
     df2.cache()
@@ -192,16 +192,16 @@ class DatasetCacheSuite extends QueryTest with 
SharedSparkSession with TimeLimit
 
     // df1 un-cached; df2's cache plan stays the same
     assert(df1.storageLevel == StorageLevel.NONE)
-    assertCacheDependency(df1.groupBy('a).agg(sum('b)))
+    assertCacheDependency(df1.groupBy($"a").agg(sum($"b")))
 
-    val df4 = df1.groupBy('a).agg(sum('b)).agg(sum("sum(b)"))
+    val df4 = df1.groupBy($"a").agg(sum($"b")).agg(sum("sum(b)"))
     assertCached(df4)
     // reuse loaded cache
     failAfter(3.seconds) {
       checkDataset(df4, Row(10))
     }
 
-    val df5 = df.agg(sum('a)).filter($"sum(a)" > 1)
+    val df5 = df.agg(sum($"a")).filter($"sum(a)" > 1)
     assertCached(df5)
     // first time use, load cache
     checkDataset(df5, Row(10))
@@ -209,8 +209,8 @@ class DatasetCacheSuite extends QueryTest with 
SharedSparkSession with TimeLimit
 
   test("SPARK-26708 Cache data and cached plan should stay consistent") {
     val df = spark.range(0, 5).toDF("a")
-    val df1 = df.withColumn("b", 'a + 1)
-    val df2 = df.filter('a > 1)
+    val df1 = df.withColumn("b", $"a" + 1)
+    val df2 = df.filter($"a" > 1)
 
     df.cache()
     // Add df1 to the CacheManager; the buffer is currently empty.
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
index 9a367d4..91d4a1c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
@@ -980,7 +980,7 @@ class DatasetSuite extends QueryTest with 
SharedSparkSession {
   }
 
   test("SPARK-14554: Dataset.map may generate wrong java code for wide table") 
{
-    val wideDF = spark.range(10).select(Seq.tabulate(1000) {i => ('id + 
i).as(s"c$i")} : _*)
+    val wideDF = spark.range(10).select(Seq.tabulate(1000) {i => ($"id" + 
i).as(s"c$i")} : _*)
     // Make sure the generated code for this plan can compile and execute.
     checkDataset(wideDF.map(_.getLong(0)), 0L until 10 : _*)
   }
@@ -1390,7 +1390,7 @@ class DatasetSuite extends QueryTest with 
SharedSparkSession {
       }
 
       testCheckpointing("basic") {
-        val ds = spark.range(10).repartition('id % 2).filter('id > 
5).orderBy('id.desc)
+        val ds = spark.range(10).repartition($"id" % 2).filter($"id" > 
5).orderBy($"id".desc)
         val cp = if (reliable) ds.checkpoint(eager) else 
ds.localCheckpoint(eager)
 
         val logicalRDD = cp.logicalPlan match {
@@ -1425,10 +1425,10 @@ class DatasetSuite extends QueryTest with 
SharedSparkSession {
       }
 
       testCheckpointing("should preserve partitioning information") {
-        val ds = spark.range(10).repartition('id % 2)
+        val ds = spark.range(10).repartition($"id" % 2)
         val cp = if (reliable) ds.checkpoint(eager) else 
ds.localCheckpoint(eager)
 
-        val agg = cp.groupBy('id % 2).agg(count('id))
+        val agg = cp.groupBy($"id" % 2).agg(count($"id"))
 
         agg.queryExecution.executedPlan.collectFirst {
           case ShuffleExchangeExec(_, _: RDDScanExec, _) =>
@@ -1440,7 +1440,7 @@ class DatasetSuite extends QueryTest with 
SharedSparkSession {
           )
         }
 
-        checkAnswer(agg, ds.groupBy('id % 2).agg(count('id)))
+        checkAnswer(agg, ds.groupBy($"id" % 2).agg(count($"id")))
       }
     }
   }
@@ -1538,11 +1538,9 @@ class DatasetSuite extends QueryTest with 
SharedSparkSession {
     checkAnswer(df.sort("id"), expected)
     checkAnswer(df.sort(col("id")), expected)
     checkAnswer(df.sort($"id"), expected)
-    checkAnswer(df.sort('id), expected)
     checkAnswer(df.orderBy("id"), expected)
     checkAnswer(df.orderBy(col("id")), expected)
     checkAnswer(df.orderBy($"id"), expected)
-    checkAnswer(df.orderBy('id), expected)
   }
 
   test("SPARK-21567: Dataset should work with type alias") {
@@ -1697,7 +1695,7 @@ class DatasetSuite extends QueryTest with 
SharedSparkSession {
 
   test("SPARK-24571: filtering of string values by char literal") {
     val df = Seq("Amsterdam", "San Francisco", "X").toDF("city")
-    checkAnswer(df.where('city === 'X'), Seq(Row("X")))
+    checkAnswer(df.where($"city" === 'X'), Seq(Row("X")))
     checkAnswer(
       df.where($"city".contains(java.lang.Character.valueOf('A'))),
       Seq(Row("Amsterdam")))
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DynamicPartitionPruningSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/DynamicPartitionPruningSuite.scala
index e0ea361..a54528f 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/DynamicPartitionPruningSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/DynamicPartitionPruningSuite.scala
@@ -97,7 +97,8 @@ class DynamicPartitionPruningSuite
       (6, 60)
     )
 
-    spark.range(1000).select('id as 'product_id, ('id % 10) as 'store_id, ('id 
+ 1) as 'code)
+    spark.range(1000)
+      .select($"id" as "product_id", ($"id" % 10) as "store_id", ($"id" + 1) 
as "code")
       .write
       .format(tableFormat)
       .mode("overwrite")
@@ -1037,7 +1038,7 @@ class DynamicPartitionPruningSuite
   test("no partition pruning when the build side is a stream") {
     withTable("fact") {
       val input = MemoryStream[Int]
-      val stream = input.toDF.select('value as "one", ('value * 3) as "code")
+      val stream = input.toDF.select($"value" as "one", ($"value" * 3) as 
"code")
       spark.range(100).select(
         $"id",
         ($"id" + 1).as("one"),
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala
index 4edce3b..96a0eb3 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala
@@ -88,28 +88,28 @@ class GeneratorFunctionSuite extends QueryTest with 
SharedSparkSession {
   test("single explode") {
     val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList")
     checkAnswer(
-      df.select(explode('intList)),
+      df.select(explode($"intList")),
       Row(1) :: Row(2) :: Row(3) :: Nil)
   }
 
   test("single explode_outer") {
     val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList")
     checkAnswer(
-      df.select(explode_outer('intList)),
+      df.select(explode_outer($"intList")),
       Row(1) :: Row(2) :: Row(3) :: Row(null) :: Nil)
   }
 
   test("single posexplode") {
     val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList")
     checkAnswer(
-      df.select(posexplode('intList)),
+      df.select(posexplode($"intList")),
       Row(0, 1) :: Row(1, 2) :: Row(2, 3) :: Nil)
   }
 
   test("single posexplode_outer") {
     val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList")
     checkAnswer(
-      df.select(posexplode_outer('intList)),
+      df.select(posexplode_outer($"intList")),
       Row(0, 1) :: Row(1, 2) :: Row(2, 3) :: Row(null, null) :: Nil)
   }
 
@@ -117,13 +117,13 @@ class GeneratorFunctionSuite extends QueryTest with 
SharedSparkSession {
     val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList")
 
     checkAnswer(
-      df.select($"a", explode('intList)),
+      df.select($"a", explode($"intList")),
       Row(1, 1) ::
       Row(1, 2) ::
       Row(1, 3) :: Nil)
 
     checkAnswer(
-      df.select($"*", explode('intList)),
+      df.select($"*", explode($"intList")),
       Row(1, Seq(1, 2, 3), 1) ::
       Row(1, Seq(1, 2, 3), 2) ::
       Row(1, Seq(1, 2, 3), 3) :: Nil)
@@ -133,7 +133,7 @@ class GeneratorFunctionSuite extends QueryTest with 
SharedSparkSession {
     val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList")
 
     checkAnswer(
-      df.select($"a", explode_outer('intList)),
+      df.select($"a", explode_outer($"intList")),
       Row(1, 1) ::
         Row(1, 2) ::
         Row(1, 3) ::
@@ -141,7 +141,7 @@ class GeneratorFunctionSuite extends QueryTest with 
SharedSparkSession {
         Nil)
 
     checkAnswer(
-      df.select($"*", explode_outer('intList)),
+      df.select($"*", explode_outer($"intList")),
       Row(1, Seq(1, 2, 3), 1) ::
         Row(1, Seq(1, 2, 3), 2) ::
         Row(1, Seq(1, 2, 3), 3) ::
@@ -153,11 +153,11 @@ class GeneratorFunctionSuite extends QueryTest with 
SharedSparkSession {
     val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList")
 
     checkAnswer(
-      df.select(explode('intList).as('int)).select('int),
+      df.select(explode($"intList").as("int")).select($"int"),
       Row(1) :: Row(2) :: Row(3) :: Nil)
 
     checkAnswer(
-      df.select(explode('intList).as('int)).select(sum('int)),
+      df.select(explode($"intList").as("int")).select(sum($"int")),
       Row(6) :: Nil)
   }
 
@@ -165,11 +165,11 @@ class GeneratorFunctionSuite extends QueryTest with 
SharedSparkSession {
     val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList")
 
     checkAnswer(
-      df.select(explode_outer('intList).as('int)).select('int),
+      df.select(explode_outer($"intList").as("int")).select($"int"),
       Row(1) :: Row(2) :: Row(3) :: Row(null) :: Nil)
 
     checkAnswer(
-      df.select(explode('intList).as('int)).select(sum('int)),
+      df.select(explode($"intList").as("int")).select(sum($"int")),
       Row(6) :: Nil)
   }
 
@@ -177,7 +177,7 @@ class GeneratorFunctionSuite extends QueryTest with 
SharedSparkSession {
     val df = Seq((1, Map("a" -> "b"))).toDF("a", "map")
 
     checkAnswer(
-      df.select(explode('map)),
+      df.select(explode($"map")),
       Row("a", "b"))
   }
 
@@ -186,7 +186,7 @@ class GeneratorFunctionSuite extends QueryTest with 
SharedSparkSession {
       (3, Map("c" -> "d"))).toDF("a", "map")
 
     checkAnswer(
-      df.select(explode_outer('map)),
+      df.select(explode_outer($"map")),
       Row("a", "b") :: Row(null, null) :: Row("c", "d") :: Nil)
   }
 
@@ -194,7 +194,7 @@ class GeneratorFunctionSuite extends QueryTest with 
SharedSparkSession {
     val df = Seq((1, Map("a" -> "b"))).toDF("a", "map")
 
     checkAnswer(
-      df.select(explode('map).as("key1" :: "value1" :: Nil)).select("key1", 
"value1"),
+      df.select(explode($"map").as("key1" :: "value1" :: Nil)).select("key1", 
"value1"),
       Row("a", "b"))
   }
 
@@ -202,13 +202,13 @@ class GeneratorFunctionSuite extends QueryTest with 
SharedSparkSession {
     val df = Seq((3, None), (1, Some(Map("a" -> "b")))).toDF("a", "map")
 
     checkAnswer(
-      df.select(explode_outer('map).as("key1" :: "value1" :: 
Nil)).select("key1", "value1"),
+      df.select(explode_outer($"map").as("key1" :: "value1" :: 
Nil)).select("key1", "value1"),
       Row("a", "b") :: Row(null, null) :: Nil)
   }
 
   test("self join explode") {
     val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList")
-    val exploded = df.select(explode('intList).as('i))
+    val exploded = df.select(explode($"intList").as("i"))
 
     checkAnswer(
       exploded.join(exploded, exploded("i") === exploded("i")).agg(count("*")),
@@ -277,7 +277,8 @@ class GeneratorFunctionSuite extends QueryTest with 
SharedSparkSession {
 
   test("inline_outer") {
     val df = Seq((1, "2"), (3, "4"), (5, "6")).toDF("col1", "col2")
-    val df2 = df.select(when('col1 === 1, null).otherwise(array(struct('col1, 
'col2))).as("col1"))
+    val df2 = df.select(
+      when($"col1" === 1, null).otherwise(array(struct($"col1", 
$"col2"))).as("col1"))
     checkAnswer(
       df2.selectExpr("inline(col1)"),
       Row(3, "4") :: Row(5, "6") :: Nil
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JoinHintSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/JoinHintSuite.scala
index 6b15425..e405864 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/JoinHintSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/JoinHintSuite.scala
@@ -99,7 +99,7 @@ class JoinHintSuite extends PlanTest with SharedSparkSession {
 
   test("multiple joins") {
     verifyJoinHint(
-      df1.join(df2.hint("broadcast").join(df3, 'b1 === 'c1).hint("broadcast"), 
'a1 === 'c1),
+      df1.join(df2.hint("broadcast").join(df3, $"b1" === 
$"c1").hint("broadcast"), $"a1" === $"c1"),
       JoinHint(
         None,
         Some(HintInfo(strategy = Some(BROADCAST)))) ::
@@ -108,7 +108,7 @@ class JoinHintSuite extends PlanTest with 
SharedSparkSession {
           None) :: Nil
     )
     verifyJoinHint(
-      df1.hint("broadcast").join(df2, 'a1 === 'b1).hint("broadcast").join(df3, 
'a1 === 'c1),
+      df1.hint("broadcast").join(df2, $"a1" === 
$"b1").hint("broadcast").join(df3, $"a1" === $"c1"),
       JoinHint(
         Some(HintInfo(strategy = Some(BROADCAST))),
         None) ::
@@ -180,8 +180,8 @@ class JoinHintSuite extends PlanTest with 
SharedSparkSession {
         )
 
         verifyJoinHint(
-          df1.join(df2, 'a1 === 'b1 && 'a1 > 5).hint("broadcast")
-            .join(df3, 'b1 === 'c1 && 'a1 < 10),
+          df1.join(df2, $"a1" === $"b1" && $"a1" > 5).hint("broadcast")
+            .join(df3, $"b1" === $"c1" && $"a1" < 10),
           JoinHint(
             Some(HintInfo(strategy = Some(BROADCAST))),
             None) ::
@@ -189,9 +189,9 @@ class JoinHintSuite extends PlanTest with 
SharedSparkSession {
         )
 
         verifyJoinHint(
-          df1.join(df2, 'a1 === 'b1 && 'a1 > 5).hint("broadcast")
-            .join(df3, 'b1 === 'c1 && 'a1 < 10)
-            .join(df, 'b1 === 'id),
+          df1.join(df2, $"a1" === $"b1" && $"a1" > 5).hint("broadcast")
+            .join(df3, $"b1" === $"c1" && $"a1" < 10)
+            .join(df, $"b1" === $"id"),
           JoinHint.NONE ::
             JoinHint(
               Some(HintInfo(strategy = Some(BROADCAST))),
@@ -222,7 +222,7 @@ class JoinHintSuite extends PlanTest with 
SharedSparkSession {
 
   test("hint merge") {
     verifyJoinHintWithWarnings(
-      df.hint("broadcast").filter('id > 2).hint("broadcast").join(df, "id"),
+      df.hint("broadcast").filter($"id" > 2).hint("broadcast").join(df, "id"),
       JoinHint(
         Some(HintInfo(strategy = Some(BROADCAST))),
         None) :: Nil,
@@ -236,7 +236,7 @@ class JoinHintSuite extends PlanTest with 
SharedSparkSession {
       Nil
     )
     verifyJoinHintWithWarnings(
-      df.hint("merge").filter('id > 2).hint("shuffle_hash").join(df, 
"id").hint("broadcast"),
+      df.hint("merge").filter($"id" > 2).hint("shuffle_hash").join(df, 
"id").hint("broadcast"),
       JoinHint(
         Some(HintInfo(strategy = Some(SHUFFLE_HASH))),
         None) :: Nil,
@@ -312,13 +312,13 @@ class JoinHintSuite extends PlanTest with 
SharedSparkSession {
 
   test("nested hint") {
     verifyJoinHint(
-      df.hint("broadcast").hint("broadcast").filter('id > 2).join(df, "id"),
+      df.hint("broadcast").hint("broadcast").filter($"id" > 2).join(df, "id"),
       JoinHint(
         Some(HintInfo(strategy = Some(BROADCAST))),
         None) :: Nil
     )
     verifyJoinHint(
-      df.hint("shuffle_hash").hint("broadcast").hint("merge").filter('id > 
2).join(df, "id"),
+      df.hint("shuffle_hash").hint("broadcast").hint("merge").filter($"id" > 
2).join(df, "id"),
       JoinHint(
         Some(HintInfo(strategy = Some(SHUFFLE_MERGE))),
         None) :: Nil


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to