This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-2.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-2.3 by this push:
     new 1201a0a  [SPARK-28093][SPARK-28109][SQL][2.3] Fix TRIM/LTRIM/RTRIM 
function parameter order issue
1201a0a is described below

commit 1201a0a7580a8c41d0501c95826c900f84e1db45
Author: Yuming Wang <yumw...@ebay.com>
AuthorDate: Fri Jun 21 18:40:23 2019 -0700

    [SPARK-28093][SPARK-28109][SQL][2.3] Fix TRIM/LTRIM/RTRIM function 
parameter order issue
    
    ## What changes were proposed in this pull request?
    
    This pr backport #24902 and #24911 to branch-2.3.
    
    ## How was this patch tested?
    
    unit tests
    
    Closes #24908 from wangyum/SPARK-28093-branch-2.3.
    
    Authored-by: Yuming Wang <yumw...@ebay.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../apache/spark/sql/catalyst/parser/SqlBase.g4    |  6 +-
 .../catalyst/expressions/stringExpressions.scala   |  6 +-
 .../spark/sql/catalyst/parser/AstBuilder.scala     | 43 +++++++-------
 .../expressions/StringExpressionsSuite.scala       | 11 ++++
 .../sql/catalyst/parser/PlanParserSuite.scala      | 24 +++++---
 .../parser/TableIdentifierParserSuite.scala        |  2 +-
 .../sql-tests/inputs/string-functions.sql          | 12 +++-
 .../sql-tests/results/string-functions.sql.out     | 66 +++++++++++++++++++++-
 8 files changed, 130 insertions(+), 40 deletions(-)

diff --git 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index 5fa75fe..15fd48b 100644
--- 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -582,12 +582,12 @@ primaryExpression
     | '(' query ')'                                                            
                #subqueryExpression
     | qualifiedName '(' (setQuantifier? argument+=expression (',' 
argument+=expression)*)? ')'
        (OVER windowSpec)?                                                      
                #functionCall
-    | qualifiedName '(' trimOption=(BOTH | LEADING | TRAILING) 
argument+=expression
-      FROM argument+=expression ')'                                            
                #functionCall
     | value=primaryExpression '[' index=valueExpression ']'                    
                #subscript
     | identifier                                                               
                #columnReference
     | base=primaryExpression '.' fieldName=identifier                          
                #dereference
     | '(' expression ')'                                                       
                #parenthesizedExpression
+    | TRIM '(' trimOption=(BOTH | LEADING | TRAILING) 
(trimStr=valueExpression)?
+       FROM srcStr=valueExpression ')'                                         
                #trim
     ;
 
 constant
@@ -735,6 +735,7 @@ nonReserved
     | VIEW | REPLACE
     | IF
     | POSITION
+    | TRIM
     | NO | DATA
     | START | TRANSACTION | COMMIT | ROLLBACK | IGNORE
     | SORT | CLUSTER | DISTRIBUTE | UNSET | TBLPROPERTIES | SKEWED | STORED | 
DIRECTORIES | LOCATION
@@ -872,6 +873,7 @@ TRAILING: 'TRAILING';
 
 IF: 'IF';
 POSITION: 'POSITION';
+TRIM: 'TRIM';
 
 EQ  : '=' | '==';
 NSEQ: '<=>';
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
index c855581..1166e77 100755
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
@@ -714,7 +714,7 @@ case class StringTrim(
     trimStr: Option[Expression] = None)
   extends String2TrimExpression {
 
-  def this(trimStr: Expression, srcStr: Expression) = this(srcStr, 
Option(trimStr))
+  def this(srcStr: Expression, trimStr: Expression) = this(srcStr, 
Option(trimStr))
 
   def this(srcStr: Expression) = this(srcStr, None)
 
@@ -814,7 +814,7 @@ case class StringTrimLeft(
     trimStr: Option[Expression] = None)
   extends String2TrimExpression {
 
-  def this(trimStr: Expression, srcStr: Expression) = this(srcStr, 
Option(trimStr))
+  def this(srcStr: Expression, trimStr: Expression) = this(srcStr, 
Option(trimStr))
 
   def this(srcStr: Expression) = this(srcStr, None)
 
@@ -917,7 +917,7 @@ case class StringTrimRight(
     trimStr: Option[Expression] = None)
   extends String2TrimExpression {
 
-  def this(trimStr: Expression, srcStr: Expression) = this(srcStr, 
Option(trimStr))
+  def this(srcStr: Expression, trimStr: Expression) = this(srcStr, 
Option(trimStr))
 
   def this(srcStr: Expression) = this(srcStr, None)
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index bdc357d..418e029 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -1186,29 +1186,28 @@ class AstBuilder(conf: SQLConf) extends 
SqlBaseBaseVisitor[AnyRef] with Logging
   }
 
   /**
+   * Create a Trim expression.
+   */
+  override def visitTrim(ctx: TrimContext): Expression = withOrigin(ctx) {
+    val srcStr = expression(ctx.srcStr)
+    val trimStr = Option(ctx.trimStr).map(expression)
+    ctx.trimOption.getType match {
+      case SqlBaseParser.BOTH =>
+        StringTrim(srcStr, trimStr)
+      case SqlBaseParser.LEADING =>
+        StringTrimLeft(srcStr, trimStr)
+      case SqlBaseParser.TRAILING =>
+        StringTrimRight(srcStr, trimStr)
+      case other =>
+        throw new ParseException("Function trim doesn't support with " +
+          s"type $other. Please use BOTH, LEADING or TRAILING as trim type", 
ctx)
+    }
+  }
+
+  /**
    * Create a (windowed) Function expression.
    */
   override def visitFunctionCall(ctx: FunctionCallContext): Expression = 
withOrigin(ctx) {
-    def replaceFunctions(
-        funcID: FunctionIdentifier,
-        ctx: FunctionCallContext): FunctionIdentifier = {
-      val opt = ctx.trimOption
-      if (opt != null) {
-        if (ctx.qualifiedName.getText.toLowerCase(Locale.ROOT) != "trim") {
-          throw new ParseException(s"The specified function 
${ctx.qualifiedName.getText} " +
-            s"doesn't support with option ${opt.getText}.", ctx)
-        }
-        opt.getType match {
-          case SqlBaseParser.BOTH => funcID
-          case SqlBaseParser.LEADING => funcID.copy(funcName = "ltrim")
-          case SqlBaseParser.TRAILING => funcID.copy(funcName = "rtrim")
-          case _ => throw new ParseException("Function trim doesn't support 
with " +
-            s"type ${opt.getType}. Please use BOTH, LEADING or Trailing as 
trim type", ctx)
-        }
-      } else {
-        funcID
-      }
-    }
     // Create the function call.
     val name = ctx.qualifiedName.getText
     val isDistinct = Option(ctx.setQuantifier()).exists(_.DISTINCT != null)
@@ -1220,9 +1219,7 @@ class AstBuilder(conf: SQLConf) extends 
SqlBaseBaseVisitor[AnyRef] with Logging
       case expressions =>
         expressions
     }
-    val funcId = replaceFunctions(visitFunctionName(ctx.qualifiedName), ctx)
-    val function = UnresolvedFunction(funcId, arguments, isDistinct)
-
+    val function = UnresolvedFunction(visitFunctionName(ctx.qualifiedName), 
arguments, isDistinct)
 
     // Check if the function is evaluated in a windowed context.
     ctx.windowSpec match {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
index 97ddbeb..45b93ec 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
@@ -466,6 +466,9 @@ class StringExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     // scalastyle:on
     checkEvaluation(StringTrim(Literal("a"), Literal.create(null, 
StringType)), null)
     checkEvaluation(StringTrim(Literal.create(null, StringType), 
Literal("a")), null)
+
+    checkEvaluation(StringTrim(Literal("yxTomxx"), Literal("xyz")), "Tom")
+    checkEvaluation(StringTrim(Literal("xxxbarxxx"), Literal("x")), "bar")
   }
 
   test("LTRIM") {
@@ -490,6 +493,10 @@ class StringExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     // scalastyle:on
     checkEvaluation(StringTrimLeft(Literal.create(null, StringType), 
Literal("a")), null)
     checkEvaluation(StringTrimLeft(Literal("a"), Literal.create(null, 
StringType)), null)
+
+    checkEvaluation(StringTrimLeft(Literal("zzzytest"), Literal("xyz")), 
"test")
+    checkEvaluation(StringTrimLeft(Literal("zzzytestxyz"), Literal("xyz")), 
"testxyz")
+    checkEvaluation(StringTrimLeft(Literal("xyxXxyLAST WORD"), Literal("xy")), 
"XxyLAST WORD")
   }
 
   test("RTRIM") {
@@ -515,6 +522,10 @@ class StringExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     // scalastyle:on
     checkEvaluation(StringTrimRight(Literal("a"), Literal.create(null, 
StringType)), null)
     checkEvaluation(StringTrimRight(Literal.create(null, StringType), 
Literal("a")), null)
+
+    checkEvaluation(StringTrimRight(Literal("testxxzx"), Literal("xyz")), 
"test")
+    checkEvaluation(StringTrimRight(Literal("xyztestxxzx"), Literal("xyz")), 
"xyztest")
+    checkEvaluation(StringTrimRight(Literal("TURNERyxXxy"), Literal("xy")), 
"TURNERyxX")
   }
 
   test("FORMAT") {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
index 812bfdd..687f5fa 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.sql.catalyst.parser
 
 import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
-import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, 
UnresolvedAttribute, UnresolvedFunction, UnresolvedGenerator, 
UnresolvedInlineTable, UnresolvedRelation, UnresolvedSubqueryColumnAliases, 
UnresolvedTableValuedFunction}
+import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedAlias, 
UnresolvedAttribute, UnresolvedFunction, UnresolvedGenerator, 
UnresolvedInlineTable, UnresolvedRelation, UnresolvedSubqueryColumnAliases, 
UnresolvedTableValuedFunction}
 import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.catalyst.plans._
 import org.apache.spark.sql.catalyst.plans.logical._
@@ -652,20 +652,26 @@ class PlanParserSuite extends AnalysisTest {
   }
 
   test("TRIM function") {
-    intercept("select ltrim(both 'S' from 'SS abc S'", "missing ')' at 
'<EOF>'")
-    intercept("select rtrim(trailing 'S' from 'SS abc S'", "missing ')' at 
'<EOF>'")
+    def assertTrimPlans(inputSQL: String, expectedExpression: Expression): 
Unit = {
+      comparePlans(
+        parsePlan(inputSQL),
+        Project(Seq(UnresolvedAlias(expectedExpression)), OneRowRelation())
+      )
+    }
+    intercept("select ltrim(both 'S' from 'SS abc S'", "mismatched input 
'from' expecting {')'")
+    intercept("select rtrim(trailing 'S' from 'SS abc S'", "mismatched input 
'from' expecting {')'")
 
-    assertEqual(
+    assertTrimPlans(
       "SELECT TRIM(BOTH '@$%&( )abc' FROM '@ $ % & ()abc ' )",
-        OneRowRelation().select('TRIM.function("@$%&( )abc", "@ $ % & ()abc "))
+      StringTrim(Literal("@ $ % & ()abc "), Some(Literal("@$%&( )abc")))
     )
-    assertEqual(
+    assertTrimPlans(
       "SELECT TRIM(LEADING 'c []' FROM '[ ccccbcc ')",
-        OneRowRelation().select('ltrim.function("c []", "[ ccccbcc "))
+      StringTrimLeft(Literal("[ ccccbcc "), Some(Literal("c []")))
     )
-    assertEqual(
+    assertTrimPlans(
       "SELECT TRIM(TRAILING 'c&^,.' FROM 'bc...,,,&&&ccc')",
-      OneRowRelation().select('rtrim.function("c&^,.", "bc...,,,&&&ccc"))
+      StringTrimRight(Literal("bc...,,,&&&ccc"), Some(Literal("c&^,.")))
     )
   }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
index cc80a41..50e17b1 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
@@ -51,7 +51,7 @@ class TableIdentifierParserSuite extends SparkFunSuite {
     "rollup", "row", "rows", "set", "smallint", "table", "timestamp", "to", 
"trigger",
     "true", "truncate", "update", "user", "values", "with", "regexp", "rlike",
     "bigint", "binary", "boolean", "current_date", "current_timestamp", 
"date", "double", "float",
-    "int", "smallint", "timestamp", "at", "position", "both", "leading", 
"trailing")
+    "int", "smallint", "timestamp", "at", "position", "both", "leading", 
"trailing", "trim")
 
   val hiveStrictNonReservedKeyword = Seq("anti", "full", "inner", "left", 
"semi", "right",
     "natural", "union", "intersect", "except", "database", "on", "join", 
"cross", "select", "from",
diff --git a/sql/core/src/test/resources/sql-tests/inputs/string-functions.sql 
b/sql/core/src/test/resources/sql-tests/inputs/string-functions.sql
index 4113734..f63dbec 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/string-functions.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/string-functions.sql
@@ -46,4 +46,14 @@ FROM (
     encode(string(id + 2), 'utf-8') col3,
     encode(string(id + 3), 'utf-8') col4
   FROM range(10)
-)
+);
+
+-- trim/ltrim/rtrim
+SELECT trim('yxTomxx', 'xyz'), trim(BOTH 'xyz' FROM 'yxTomxx');
+SELECT trim('xxxbarxxx', 'x'), trim(BOTH 'x' FROM 'xxxbarxxx');
+SELECT ltrim('zzzytest', 'xyz'), trim(LEADING 'xyz' FROM 'zzzytest');
+SELECT ltrim('zzzytestxyz', 'xyz'), trim(LEADING 'xyz' FROM 'zzzytestxyz');
+SELECT ltrim('xyxXxyLAST WORD', 'xy'), trim(LEADING 'xy' FROM 'xyxXxyLAST 
WORD');
+SELECT rtrim('testxxzx', 'xyz'), trim(TRAILING 'xyz' FROM 'testxxzx');
+SELECT rtrim('xyztestxxzx', 'xyz'), trim(TRAILING 'xyz' FROM 'xyztestxxzx');
+SELECT rtrim('TURNERyxXxy', 'xy'), trim(TRAILING 'xy' FROM 'TURNERyxXxy');
diff --git 
a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
index d5f8705..aace15f 100644
--- a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 15
+-- Number of queries: 23
 
 
 -- !query 0
@@ -161,3 +161,67 @@ struct<plan:string>
 == Physical Plan ==
 *Project [concat(cast(id#xL as string), cast(encode(cast((id#xL + 2) as 
string), utf-8) as string), cast(encode(cast((id#xL + 3) as string), utf-8) as 
string)) AS col#x]
 +- *Range (0, 10, step=1, splits=2)
+
+
+-- !query 15
+SELECT trim('yxTomxx', 'xyz'), trim(BOTH 'xyz' FROM 'yxTomxx')
+-- !query 15 schema
+struct<trim(yxTomxx, xyz):string,trim(yxTomxx, xyz):string>
+-- !query 15 output
+Tom    Tom
+
+
+-- !query 16
+SELECT trim('xxxbarxxx', 'x'), trim(BOTH 'x' FROM 'xxxbarxxx')
+-- !query 16 schema
+struct<trim(xxxbarxxx, x):string,trim(xxxbarxxx, x):string>
+-- !query 16 output
+bar    bar
+
+
+-- !query 17
+SELECT ltrim('zzzytest', 'xyz'), trim(LEADING 'xyz' FROM 'zzzytest')
+-- !query 17 schema
+struct<ltrim(zzzytest, xyz):string,ltrim(zzzytest, xyz):string>
+-- !query 17 output
+test   test
+
+
+-- !query 18
+SELECT ltrim('zzzytestxyz', 'xyz'), trim(LEADING 'xyz' FROM 'zzzytestxyz')
+-- !query 18 schema
+struct<ltrim(zzzytestxyz, xyz):string,ltrim(zzzytestxyz, xyz):string>
+-- !query 18 output
+testxyz        testxyz
+
+
+-- !query 19
+SELECT ltrim('xyxXxyLAST WORD', 'xy'), trim(LEADING 'xy' FROM 'xyxXxyLAST 
WORD')
+-- !query 19 schema
+struct<ltrim(xyxXxyLAST WORD, xy):string,ltrim(xyxXxyLAST WORD, xy):string>
+-- !query 19 output
+XxyLAST WORD   XxyLAST WORD
+
+
+-- !query 20
+SELECT rtrim('testxxzx', 'xyz'), trim(TRAILING 'xyz' FROM 'testxxzx')
+-- !query 20 schema
+struct<rtrim(testxxzx, xyz):string,rtrim(testxxzx, xyz):string>
+-- !query 20 output
+test   test
+
+
+-- !query 21
+SELECT rtrim('xyztestxxzx', 'xyz'), trim(TRAILING 'xyz' FROM 'xyztestxxzx')
+-- !query 21 schema
+struct<rtrim(xyztestxxzx, xyz):string,rtrim(xyztestxxzx, xyz):string>
+-- !query 21 output
+xyztest        xyztest
+
+
+-- !query 22
+SELECT rtrim('TURNERyxXxy', 'xy'), trim(TRAILING 'xy' FROM 'TURNERyxXxy')
+-- !query 22 schema
+struct<rtrim(TURNERyxXxy, xy):string,rtrim(TURNERyxXxy, xy):string>
+-- !query 22 output
+TURNERyxX      TURNERyxX


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to