This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.4 by this push:
     new 1d6b2a07fe5 [SPARK-42163][SQL] Fix schema pruning for non-foldable 
array index or map key
1d6b2a07fe5 is described below

commit 1d6b2a07fe5d66832e740c4e9d669f9cfd51dab1
Author: cashmand <david.cash...@databricks.com>
AuthorDate: Tue Jan 31 10:16:15 2023 +0800

    [SPARK-42163][SQL] Fix schema pruning for non-foldable array index or map 
key
    
    ### What changes were proposed in this pull request?
    
    In parquet schema pruning, we use SelectedField to try to extract the field 
that is used in a struct. It looks through GetArrayItem/GetMapItem, but when 
doing so, it ignores the index/key, which may itself be a struct field. If it 
is a struct field that is not selected in some other expression, and another 
field of the same attribute is selected, then pruning will drop the field, 
resulting in an optimizer error.
    
    This change modifies SelectedField to only look through 
GetArrayItem/GetMapItem if the index/key argument is foldable. The equivalent 
code for `ElementAt` was already doing the same thing, so this just makes them 
consistent.
    
    In principle, we could continue to traverse through these expressions, we'd 
just need to make sure that the index/key expression was also surfaced to 
column pruning as an expression that needs to be examined. But this seems like 
a fairly non-trivial change to the design of the SelectedField class.
    
    There is some risk that the current approach could result in a regression 
e.g. if there is an existing GetArrayItem that is being successfully pruned, 
where a non-foldable index argument happens to not trigger an error (because it 
is not a struct field, or it is preserved due to some other expression).
    
    ### Why are the changes needed?
    
    Allows queries that previously would fail in the optimizer to pass.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, as described above, there could be a performance regression if a query 
was previously pruning through a GetArrayItem/GetMapItem, and happened to not 
fail.
    
    ### How was this patch tested?
    
    Unit test included in patch, fails without the patch and passes with it.
    
    Closes #39718 from cashmand/fix_selected_field.
    
    Authored-by: cashmand <david.cash...@databricks.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
    (cherry picked from commit 16cfa092d2c23a1cca45acd62e2095a72cb27d86)
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../sql/catalyst/expressions/SelectedField.scala   |  7 +++--
 .../execution/datasources/SchemaPruningSuite.scala | 30 ++++++++++++++++++++++
 2 files changed, 35 insertions(+), 2 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SelectedField.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SelectedField.scala
index ee2b4969222..820dc452d7e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SelectedField.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SelectedField.scala
@@ -96,9 +96,10 @@ object SelectedField {
         }
         val newField = StructField(field.name, newFieldDataType, 
field.nullable)
         selectField(child, Option(ArrayType(struct(newField), containsNull)))
-      case GetMapValue(child, _) =>
+      case GetMapValue(child, key) if key.foldable =>
         // GetMapValue does not select a field from a struct (i.e. prune the 
struct) so it can't be
         // the top-level extractor. However it can be part of an extractor 
chain.
+        // See comment on GetArrayItem regarding the need for key.foldable
         val MapType(keyType, _, valueContainsNull) = child.dataType
         val opt = dataTypeOpt.map(dt => MapType(keyType, dt, 
valueContainsNull))
         selectField(child, opt)
@@ -124,9 +125,11 @@ object SelectedField {
             throw QueryCompilationErrors.dataTypeUnsupportedByClassError(x, 
"MapKeys")
         }
         selectField(child, opt)
-      case GetArrayItem(child, _, _) =>
+      case GetArrayItem(child, index, _) if index.foldable =>
         // GetArrayItem does not select a field from a struct (i.e. prune the 
struct) so it can't be
         // the top-level extractor. However it can be part of an extractor 
chain.
+        // If index is not foldable, we'd need to also return the field 
selected by index, which
+        // the SelectedField interface doesn't support, so only allow a 
foldable index for now.
         val ArrayType(_, containsNull) = child.dataType
         val opt = dataTypeOpt.map(dt => ArrayType(dt, containsNull))
         selectField(child, opt)
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/SchemaPruningSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/SchemaPruningSuite.scala
index f9a8c67fc9f..bd9c79e5b96 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/SchemaPruningSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/SchemaPruningSuite.scala
@@ -1131,4 +1131,34 @@ abstract class SchemaPruningSuite
       .select($"id", $"name.first")
     checkScan(query, "struct<id:int, name:struct<first:string>>")
   }
+
+  testSchemaPruning("SPARK-42163: GetArrayItem and GetMapItem with 
non-foldable index") {
+    // Technically, there's no reason that we can't support a non-foldable 
index, it's just tricky
+    // with the existing pruning code. If we ever do support it, this test can 
be modified to check
+    // for a narrower scan schema.
+    val arrayQuery =
+      sql("""
+            |SELECT
+            |employer.company, friends[employer.id].first
+            |FROM contacts
+            |""".stripMargin)
+    checkScan(arrayQuery,
+        
"""struct<friends:array<struct<first:string,middle:string,last:string>>,
+          
|employer:struct<id:int,company:struct<name:string,address:string>>>""".stripMargin)
+    checkAnswer(arrayQuery,
+      Row(Row("abc", "123 Business Street"), "Susan") ::
+      Row(null, null) :: Row(null, null) :: Row(null, null) :: Nil)
+
+    val mapQuery =
+      sql("""
+            |SELECT
+            |employer.id, relatives[employer.company.name].first
+            |FROM contacts
+            |""".stripMargin)
+    checkScan(mapQuery,
+        
"""struct<relatives:map<string,struct<first:string,middle:string,last:string>>,
+          |employer:struct<id:int,company:struct<name:string>>>""".stripMargin)
+    checkAnswer(mapQuery, Row(0, null) :: Row(1, null) ::
+      Row(null, null) :: Row(null, null) :: Nil)
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to