This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e05441c  [SPARK-29519][SQL][FOLLOWUP] Keep output is deterministic for 
show tblproperties
e05441c is described below

commit e05441c223bc3a0479bdae7969b4827c85fdf0ed
Author: PengLei <peng.8...@gmail.com>
AuthorDate: Thu Jul 15 21:44:10 2021 +0800

    [SPARK-29519][SQL][FOLLOWUP] Keep output is deterministic for show 
tblproperties
    
    ### What changes were proposed in this pull request?
    Keep the output order is deterministic for `SHOW TBLPROPERTIES`
    
    ### Why are the changes needed?
    [#33343](https://github.com/apache/spark/pull/33343#issue-689828187).
    Keep the output order deterministic meaningful.
    
    Since the properties are sorted and then compare result in the testcase for 
`SHOW TBLPROPERTIES`,  it does not fail, but ideally, the output is ordered and 
deterministic.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    existed ut test
    
    Closes #33353 from Peng-Lei/order-ouput-properties.
    
    Authored-by: PengLei <peng.8...@gmail.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala  | 4 ++--
 .../scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala   | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala
index 33d7337..3a75259 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala
@@ -41,8 +41,8 @@ case class ShowTablePropertiesExec(
           .getOrElse(p, s"Table ${catalogTable.name} does not have property: 
$p")
         Seq(toCatalystRow(p, propValue))
       case None =>
-        properties.keys.map(k =>
-          toCatalystRow(k, properties(k))).toSeq
+        properties.toSeq.sortBy(_._1).map(kv =>
+          toCatalystRow(kv._1, kv._2)).toSeq
     }
   }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
index ac82721..a326b82 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
@@ -2129,7 +2129,7 @@ class DataSourceV2SQLSuite
       spark.sql(s"CREATE TABLE $t (id bigint, data string) USING $provider " +
         s"TBLPROPERTIES ('user'='$user', 'status'='$status')")
 
-      val properties = sql(s"SHOW TBLPROPERTIES $t").orderBy("key")
+      val properties = sql(s"SHOW TBLPROPERTIES $t")
 
       val schema = new StructType()
         .add("key", StringType, nullable = false)

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to