This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new d5715a9  [SPARK-27772][SQL][TEST] Refactor SQLTestUtils to use 
`tryWithSafeFinally`
d5715a9 is described below

commit d5715a9b230624ea08db9a86556a1564b0d6e8b2
Author: williamwong <william1...@gmail.com>
AuthorDate: Tue Jun 4 09:26:24 2019 -0500

    [SPARK-27772][SQL][TEST] Refactor SQLTestUtils to use `tryWithSafeFinally`
    
    ## What changes were proposed in this pull request?
    
    The current `SQLTestUtils` created many `withXXX` utility functions to 
clean up tables/views/caches created for testing purpose. Java's 
`try-with-resources` statement does something similar, but it does not mask 
exception throwing in the try block with any exception caught in the 'close()' 
statement. Exception caught in the 'close()' statement would add as a 
suppressed exception instead.
    
    This PR standardizes those 'withXXX' function to 
use`Utils.tryWithSafeFinally` function, which does something similar to Java's 
try-with-resources statement. The purpose of this proposal is to help 
developers to identify what actually breaks their tests.
    
    ## How was this patch tested?
    Existing testcases.
    
    Closes #24747 from William1104/feature/SPARK-27772-2.
    
    Lead-authored-by: williamwong <william1...@gmail.com>
    Co-authored-by: William Wong <william1...@gmail.com>
    Signed-off-by: Sean Owen <sean.o...@databricks.com>
---
 .../org/apache/spark/sql/test/SQLTestUtils.scala   | 36 ++++++++++++----------
 1 file changed, 20 insertions(+), 16 deletions(-)

diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
index 9c0b43f..3d9d3b3 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
@@ -255,11 +255,13 @@ private[sql] trait SQLTestUtilsBase
    * Drops temporary view `viewNames` after calling `f`.
    */
   protected def withTempView(viewNames: String*)(f: => Unit): Unit = {
-    try f finally {
-      // If the test failed part way, we don't want to mask the failure by 
failing to remove
-      // temp views that never got created.
-      try viewNames.foreach(spark.catalog.dropTempView) catch {
-        case _: NoSuchTableException =>
+    Utils.tryWithSafeFinally(f) {
+      viewNames.foreach { viewName =>
+        try spark.catalog.dropTempView(viewName) catch {
+          // If the test failed part way, we don't want to mask the failure by 
failing to remove
+          // temp views that never got created.
+          case _: NoSuchTableException =>
+        }
       }
     }
   }
@@ -268,11 +270,13 @@ private[sql] trait SQLTestUtilsBase
    * Drops global temporary view `viewNames` after calling `f`.
    */
   protected def withGlobalTempView(viewNames: String*)(f: => Unit): Unit = {
-    try f finally {
-      // If the test failed part way, we don't want to mask the failure by 
failing to remove
-      // global temp views that never got created.
-      try viewNames.foreach(spark.catalog.dropGlobalTempView) catch {
-        case _: NoSuchTableException =>
+    Utils.tryWithSafeFinally(f) {
+      viewNames.foreach { viewName =>
+        try spark.catalog.dropGlobalTempView(viewName) catch {
+          // If the test failed part way, we don't want to mask the failure by 
failing to remove
+          // global temp views that never got created.
+          case _: NoSuchTableException =>
+        }
       }
     }
   }
@@ -281,7 +285,7 @@ private[sql] trait SQLTestUtilsBase
    * Drops table `tableName` after calling `f`.
    */
   protected def withTable(tableNames: String*)(f: => Unit): Unit = {
-    try f finally {
+    Utils.tryWithSafeFinally(f) {
       tableNames.foreach { name =>
         spark.sql(s"DROP TABLE IF EXISTS $name")
       }
@@ -292,18 +296,18 @@ private[sql] trait SQLTestUtilsBase
    * Drops view `viewName` after calling `f`.
    */
   protected def withView(viewNames: String*)(f: => Unit): Unit = {
-    try f finally {
+    Utils.tryWithSafeFinally(f)(
       viewNames.foreach { name =>
         spark.sql(s"DROP VIEW IF EXISTS $name")
       }
-    }
+    )
   }
 
   /**
    * Drops cache `cacheName` after calling `f`.
    */
   protected def withCache(cacheNames: String*)(f: => Unit): Unit = {
-    try f finally {
+    Utils.tryWithSafeFinally(f) {
       cacheNames.foreach { cacheName =>
         try uncacheTable(cacheName) catch {
           case _: AnalysisException =>
@@ -350,7 +354,7 @@ private[sql] trait SQLTestUtilsBase
    * Drops database `dbName` after calling `f`.
    */
   protected def withDatabase(dbNames: String*)(f: => Unit): Unit = {
-    try f finally {
+    Utils.tryWithSafeFinally(f) {
       dbNames.foreach { name =>
         spark.sql(s"DROP DATABASE IF EXISTS $name CASCADE")
       }
@@ -379,7 +383,7 @@ private[sql] trait SQLTestUtilsBase
    */
   protected def activateDatabase(db: String)(f: => Unit): Unit = {
     spark.sessionState.catalog.setCurrentDatabase(db)
-    try f finally spark.sessionState.catalog.setCurrentDatabase("default")
+    
Utils.tryWithSafeFinally(f)(spark.sessionState.catalog.setCurrentDatabase("default"))
   }
 
   /**


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to