This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new aabe3a75454 [SPARK-39138][SQL] Add ANSI general value specification 
and function - user
aabe3a75454 is described below

commit aabe3a75454462c9ce665a3d947a7edb91f37d13
Author: Kent Yao <y...@apache.org>
AuthorDate: Sat May 14 00:03:23 2022 +0800

    [SPARK-39138][SQL] Add ANSI general value specification and function - user
    
    ### What changes were proposed in this pull request?
    
    Add ANSI general value specification and function - user
    
    ### Why are the changes needed?
    
    According to ANSI SQL,
    ```
    CURRENT_USER and USER are semantically the same
    ```
    
    USER is also supported by other systems like MySQL, PG, hive, etc.
    
    ### Does this PR introduce _any_ user-facing change?
    
    new function added
    
    for ansi mode, and if enforceReservedKeywords, USER always reserved
    
    otherwise, it will be resolved as a literal if no attribute matches.
    
    ### How was this patch tested?
    
    new tests
    
    Closes #36497 from yaooqinn/SPARK-39138.
    
    Authored-by: Kent Yao <y...@apache.org>
    Signed-off-by: Kent Yao <y...@apache.org>
---
 .../spark/sql/catalyst/parser/SqlBaseParser.g4     |  2 +-
 .../spark/sql/catalyst/analysis/Analyzer.scala     |  1 +
 .../sql/catalyst/analysis/FunctionRegistry.scala   |  1 +
 .../spark/sql/catalyst/parser/AstBuilder.scala     |  2 +-
 .../sql-functions/sql-expression-schema.md         |  1 +
 .../org/apache/spark/sql/MiscFunctionsSuite.scala  | 15 +++++++------
 .../ThriftServerWithSparkContextSuite.scala        | 25 ++++++++++++++--------
 7 files changed, 30 insertions(+), 17 deletions(-)

diff --git 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
index 7eace05b6b2..ed57e9062c1 100644
--- 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
+++ 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
@@ -816,7 +816,7 @@ datetimeUnit
     ;
 
 primaryExpression
-    : name=(CURRENT_DATE | CURRENT_TIMESTAMP | CURRENT_USER)                   
                #currentLike
+    : name=(CURRENT_DATE | CURRENT_TIMESTAMP | CURRENT_USER | USER)            
                       #currentLike
     | name=(TIMESTAMPADD | DATEADD) LEFT_PAREN unit=datetimeUnit COMMA 
unitsAmount=valueExpression COMMA timestamp=valueExpression RIGHT_PAREN         
    #timestampadd
     | name=(TIMESTAMPDIFF | DATEDIFF) LEFT_PAREN unit=datetimeUnit COMMA 
startTimestamp=valueExpression COMMA endTimestamp=valueExpression RIGHT_PAREN   
 #timestampdiff
     | CASE whenClause+ (ELSE elseExpression=expression)? END                   
                #searchedCase
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 817a62fd1d8..20c1756ef4e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -1682,6 +1682,7 @@ class Analyzer(override val catalogManager: 
CatalogManager)
     (CurrentDate().prettyName, () => CurrentDate(), toPrettySQL(_)),
     (CurrentTimestamp().prettyName, () => CurrentTimestamp(), toPrettySQL(_)),
     (CurrentUser().prettyName, () => CurrentUser(), toPrettySQL),
+    ("user", () => CurrentUser(), toPrettySQL),
     (VirtualColumn.hiveGroupingIdName, () => GroupingID(Nil), _ => 
VirtualColumn.hiveGroupingIdName)
   )
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index 50f376c0ce6..5084753d2d4 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -714,6 +714,7 @@ object FunctionRegistry {
     expression[CurrentDatabase]("current_database"),
     expression[CurrentCatalog]("current_catalog"),
     expression[CurrentUser]("current_user"),
+    expression[CurrentUser]("user", setAlias = true),
     expression[CallMethodViaReflection]("reflect"),
     expression[CallMethodViaReflection]("java_method", true),
     expression[SparkVersion]("version"),
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index e6f7dba863b..ff3b99fb815 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -1776,7 +1776,7 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef] 
with SQLConfHelper wit
           CurrentDate()
         case SqlBaseParser.CURRENT_TIMESTAMP =>
           CurrentTimestamp()
-        case SqlBaseParser.CURRENT_USER =>
+        case SqlBaseParser.CURRENT_USER | SqlBaseParser.USER =>
           CurrentUser()
       }
     } else {
diff --git a/sql/core/src/test/resources/sql-functions/sql-expression-schema.md 
b/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
index 93c12487200..ca31d169692 100644
--- a/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
+++ b/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
@@ -92,6 +92,7 @@
 | org.apache.spark.sql.catalyst.expressions.CurrentTimeZone | current_timezone 
| SELECT current_timezone() | struct<current_timezone():string> |
 | org.apache.spark.sql.catalyst.expressions.CurrentTimestamp | 
current_timestamp | SELECT current_timestamp() | 
struct<current_timestamp():timestamp> |
 | org.apache.spark.sql.catalyst.expressions.CurrentUser | current_user | 
SELECT current_user() | struct<current_user():string> |
+| org.apache.spark.sql.catalyst.expressions.CurrentUser | user | SELECT user() 
| struct<current_user():string> |
 | org.apache.spark.sql.catalyst.expressions.DateAdd | date_add | SELECT 
date_add('2016-07-30', 1) | struct<date_add(2016-07-30, 1):date> |
 | org.apache.spark.sql.catalyst.expressions.DateDiff | datediff | SELECT 
datediff('2009-07-31', '2009-07-30') | struct<datediff(2009-07-31, 
2009-07-30):int> |
 | org.apache.spark.sql.catalyst.expressions.DateFormatClass | date_format | 
SELECT date_format('2016-04-08', 'y') | struct<date_format(2016-04-08, 
y):string> |
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/MiscFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/MiscFunctionsSuite.scala
index 37ba52023dd..e1b7f7f57b6 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/MiscFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/MiscFunctionsSuite.scala
@@ -47,15 +47,18 @@ class MiscFunctionsSuite extends QueryTest with 
SharedSparkSession {
   test("SPARK-21957: get current_user in normal spark apps") {
     val user = spark.sparkContext.sparkUser
     withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") {
-      val df = sql("select current_user(), current_user")
-      checkAnswer(df, Row(user, user))
+      val df = sql("select current_user(), current_user, user, user()")
+      checkAnswer(df, Row(user, user, user, user))
     }
     withSQLConf(SQLConf.ANSI_ENABLED.key -> "true",
       SQLConf.ENFORCE_RESERVED_KEYWORDS.key -> "true") {
-      val df = sql("select current_user")
-      checkAnswer(df, Row(spark.sparkContext.sparkUser))
-      val e = intercept[ParseException](sql("select current_user()"))
-      assert(e.getMessage.contains("current_user"))
+      Seq("user", "current_user").foreach { func =>
+        checkAnswer(sql(s"select $func"), Row(user))
+      }
+      Seq("user()", "current_user()").foreach { func =>
+        val e = intercept[ParseException](sql(s"select $func"))
+        assert(e.getMessage.contains(func))
+      }
     }
   }
 
diff --git 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
index 94d7318e620..639a5e3a598 100644
--- 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
+++ 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
@@ -126,20 +126,27 @@ trait ThriftServerWithSparkContextSuite extends 
SharedThriftServer {
 
       exec(s"set ${SQLConf.ANSI_ENABLED.key}=false")
 
-      val opHandle1 = exec("select current_user(), current_user")
-      val rowSet1 = client.fetchResults(opHandle1)
-      rowSet1.getColumns.forEach { col =>
-        assert(col.getStringVal.getValues.get(0) === clientUser)
+      val userFuncs = Seq("user", "current_user")
+      userFuncs.foreach { func =>
+        val opHandle1 = exec(s"select $func(), $func")
+        val rowSet1 = client.fetchResults(opHandle1)
+        rowSet1.getColumns.forEach { col =>
+          assert(col.getStringVal.getValues.get(0) === clientUser)
+        }
       }
 
       exec(s"set ${SQLConf.ANSI_ENABLED.key}=true")
       exec(s"set ${SQLConf.ENFORCE_RESERVED_KEYWORDS.key}=true")
-      val opHandle2 = exec("select current_user")
-      assert(client.fetchResults(opHandle2).getColumns.get(0)
-        .getStringVal.getValues.get(0) === clientUser)
+      userFuncs.foreach { func =>
+        val opHandle2 = exec(s"select $func")
+        assert(client.fetchResults(opHandle2)
+          .getColumns.get(0).getStringVal.getValues.get(0) === clientUser)
+      }
 
-      val e = intercept[HiveSQLException](exec("select current_user()"))
-      assert(e.getMessage.contains("current_user"))
+      userFuncs.foreach { func =>
+        val e = intercept[HiveSQLException](exec(s"select $func()"))
+        assert(e.getMessage.contains(func))
+      }
     }
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to