This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 90d83479a16 [SPARK-39319][FOLLOW-UP][SQL] Make TreeNode.context as 
lazy val
90d83479a16 is described below

commit 90d83479a16cb594aa1ee6c6a8219dbb7d859752
Author: Gengliang Wang <gengli...@apache.org>
AuthorDate: Wed Jul 27 10:57:31 2022 +0900

    [SPARK-39319][FOLLOW-UP][SQL] Make TreeNode.context as lazy val
    
    ### What changes were proposed in this pull request?
    
    - Make TreeNode.context as lazy val
    - Code clean up in SQLQueryContext
    
    ### Why are the changes needed?
    
    Making TreeNode.context as lazy val can save the memory usage, which is 
only called on certain expressions under ANSI SQL mode.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    Existing UT
    
    Closes #37307 from gengliangwang/lazyVal.
    
    Authored-by: Gengliang Wang <gengli...@apache.org>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 .../apache/spark/sql/catalyst/trees/SQLQueryContext.scala | 15 +++++++++------
 .../org/apache/spark/sql/catalyst/trees/TreeNode.scala    |  2 +-
 2 files changed, 10 insertions(+), 7 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContext.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContext.scala
index 8f75079fcf9..a8806dbad4d 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContext.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContext.scala
@@ -42,9 +42,7 @@ case class SQLQueryContext(
    */
   lazy val summary: String = {
     // If the query context is missing or incorrect, simply return an empty 
string.
-    if (sqlText.isEmpty || originStartIndex.isEmpty || originStopIndex.isEmpty 
||
-      originStartIndex.get < 0 || originStopIndex.get >= sqlText.get.length ||
-      originStartIndex.get > originStopIndex.get) {
+    if (!isValid) {
       ""
     } else {
       val positionContext = if (line.isDefined && startPosition.isDefined) {
@@ -119,12 +117,17 @@ case class SQLQueryContext(
 
   /** Gets the textual fragment of a SQL query. */
   override lazy val fragment: String = {
-    if (sqlText.isEmpty || originStartIndex.isEmpty || originStopIndex.isEmpty 
||
-      originStartIndex.get < 0 || originStopIndex.get >= sqlText.get.length ||
-      originStartIndex.get > originStopIndex.get) {
+    if (!isValid) {
       ""
     } else {
       sqlText.get.substring(originStartIndex.get, originStopIndex.get)
     }
   }
+
+  private def isValid: Boolean = {
+    sqlText.isDefined && originStartIndex.isDefined && 
originStopIndex.isDefined &&
+      originStartIndex.get >= 0 && originStopIndex.get < sqlText.get.length &&
+      originStartIndex.get <= originStopIndex.get
+
+  }
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
index b8cfdcdbe7f..8f5858d2f4d 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
@@ -66,7 +66,7 @@ case class Origin(
     objectType: Option[String] = None,
     objectName: Option[String] = None) {
 
-  val context: SQLQueryContext = SQLQueryContext(
+  lazy val context: SQLQueryContext = SQLQueryContext(
     line, startPosition, startIndex, stopIndex, sqlText, objectType, 
objectName)
 }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to