This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new cc4ecb5104e [SPARK-45398][SQL] Append `ESCAPE` in `sql()` of the 
`Like` expression
cc4ecb5104e is described below

commit cc4ecb5104e37d5e530d44b41fc1d8f8116e37d8
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Wed Oct 4 11:35:05 2023 +0300

    [SPARK-45398][SQL] Append `ESCAPE` in `sql()` of the `Like` expression
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to fix the `sql()` method of the `Like` expression, 
and append the `ESCAPE` clause when the `escapeChar` is not the default one 
`\\`.
    
    ### Why are the changes needed?
    1. To be consistent to the `toString()` method
    2. To distinguish column names when the escape argument is set. Before the 
changes, columns might conflict like the example below, and that could confuse 
users:
    ```sql
    spark-sql (default)> create temp view tbl as (SELECT 'a|_' like 'a||_' 
escape '|', 'a|_' like 'a||_' escape 'a');
    [COLUMN_ALREADY_EXISTS] The column `a|_ like a||_` already exists. Consider 
to choose another name or rename the existing column.
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    Should not.
    
    ### How was this patch tested?
    Manually checking the column name by:
    ```sql
    spark-sql (default)> create temp view tbl as (SELECT 'a|_' like 'a||_' 
escape '|', 'a|_' like 'a||_' escape 'a');
    Time taken: 0.531 seconds
    spark-sql (default)> describe extended tbl;
    a|_ LIKE a||_ ESCAPE '|'        boolean
    a|_ LIKE a||_ ESCAPE 'a'        boolean
    ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #43196 from MaxGekk/fix-like-sql.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../explain-results/function_like_with_escape.explain         |  2 +-
 .../spark/sql/catalyst/expressions/regexpExpressions.scala    | 11 +++++++----
 2 files changed, 8 insertions(+), 5 deletions(-)

diff --git 
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_like_with_escape.explain
 
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_like_with_escape.explain
index 471a3a4bd52..1a15a27d97e 100644
--- 
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_like_with_escape.explain
+++ 
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_like_with_escape.explain
@@ -1,2 +1,2 @@
-Project [g#0 LIKE g#0 ESCAPE '/' AS g LIKE g#0]
+Project [g#0 LIKE g#0 ESCAPE '/' AS g LIKE g ESCAPE '/'#0]
 +- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala
index 5ebfdd919b8..69d90296d7f 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala
@@ -133,12 +133,15 @@ case class Like(left: Expression, right: Expression, 
escapeChar: Char)
 
   final override val nodePatterns: Seq[TreePattern] = Seq(LIKE_FAMLIY)
 
-  override def toString: String = escapeChar match {
-    case '\\' => s"$left LIKE $right"
-    case c => s"$left LIKE $right ESCAPE '$c'"
+  override def toString: String = {
+    val escapeSuffix = if (escapeChar == '\\') "" else s" ESCAPE '$escapeChar'"
+    s"$left ${prettyName.toUpperCase(Locale.ROOT)} $right" + escapeSuffix
   }
 
-  override def sql: String = s"${left.sql} 
${prettyName.toUpperCase(Locale.ROOT)} ${right.sql}"
+  override def sql: String = {
+    val escapeSuffix = if (escapeChar == '\\') "" else s" ESCAPE 
${Literal(escapeChar).sql}"
+    s"${left.sql} ${prettyName.toUpperCase(Locale.ROOT)} ${right.sql}" + 
escapeSuffix
+  }
 
   override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): 
ExprCode = {
     val patternClass = classOf[Pattern].getName


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to