This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch release-1.2
in repository https://gitbox.apache.org/repos/asf/paimon.git

commit 4efd31c85dda0e07e3d6723c8e09213e9080217c
Author: Zouxxyy <[email protected]>
AuthorDate: Thu Jun 12 18:12:32 2025 +0800

    [test][spark] Fix the unstable testHiveCatalogOptions (#5738)
---
 .../paimon/spark/SparkCatalogWithHiveTest.java     |  1 +
 .../spark/SparkGenericCatalogWithHiveTest.java     | 30 ++++++++++------------
 .../catalog/functions/BucketFunctionTest.java      |  6 +++++
 3 files changed, 20 insertions(+), 17 deletions(-)

diff --git 
a/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkCatalogWithHiveTest.java
 
b/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkCatalogWithHiveTest.java
index 9f383a54c2..c7c07b1aef 100644
--- 
a/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkCatalogWithHiveTest.java
+++ 
b/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkCatalogWithHiveTest.java
@@ -109,6 +109,7 @@ public class SparkCatalogWithHiveTest {
                                     .collect(Collectors.toList()))
                     .containsExactlyInAnyOrder("[1,1,1]", "[2,2,2]");
         }
+        spark.stop();
     }
 
     @Test
diff --git 
a/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkGenericCatalogWithHiveTest.java
 
b/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkGenericCatalogWithHiveTest.java
index 3bb013648e..9e13dd7be5 100644
--- 
a/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkGenericCatalogWithHiveTest.java
+++ 
b/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkGenericCatalogWithHiveTest.java
@@ -60,12 +60,15 @@ public class SparkGenericCatalogWithHiveTest {
                         .config(
                                 "spark.sql.catalog.spark_catalog",
                                 SparkGenericCatalog.class.getName())
+                        .config("spark.sql.catalog.paimon.warehouse", 
warehousePath.toString())
+                        .config("spark.sql.catalog.paimon", 
SparkCatalog.class.getName())
                         .config(
                                 "spark.sql.extensions",
                                 
"org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions")
                         .master("local[2]")
                         .getOrCreate();
 
+        spark.sql("USE spark_catalog");
         spark.sql("CREATE DATABASE my_db");
         spark.sql("USE my_db");
         spark.sql(
@@ -80,29 +83,22 @@ public class SparkGenericCatalogWithHiveTest {
                                 .map(s -> s.get(1))
                                 .map(Object::toString))
                 .containsExactlyInAnyOrder("t1");
-        spark.close();
 
-        // secondly, we close catalog with hive metastore, and start a 
filesystem metastore to check
-        // the result.
-        SparkSession spark2 =
-                SparkSession.builder()
-                        .config("spark.sql.catalog.paimon.warehouse", 
warehousePath.toString())
-                        .config("spark.sql.catalogImplementation", "in-memory")
-                        .config("spark.sql.catalog.paimon", 
SparkCatalog.class.getName())
-                        .config(
-                                "spark.sql.extensions",
-                                
"org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions")
-                        .master("local[2]")
-                        .getOrCreate();
-        spark2.sql("USE paimon");
-        spark2.sql("USE my_db");
-        assertThat(spark2.sql("SHOW 
NAMESPACES").collectAsList().stream().map(Object::toString))
+        // secondly, use filesystem metastore to check the result.
+        spark.sql("USE paimon");
+        spark.sql("USE my_db");
+        assertThat(spark.sql("SHOW 
NAMESPACES").collectAsList().stream().map(Object::toString))
                 .containsExactlyInAnyOrder("[default]", "[my_db]");
         assertThat(
-                        spark2.sql("SHOW TABLES").collectAsList().stream()
+                        spark.sql("SHOW TABLES").collectAsList().stream()
                                 .map(s -> s.get(1))
                                 .map(Object::toString))
                 .containsExactlyInAnyOrder("t1");
+
+        spark.sql("USE spark_catalog");
+        spark.sql("DROP TABLE my_db.t1");
+        spark.sql("DROP DATABASE my_db");
+        spark.close();
     }
 
     @Test
diff --git 
a/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/catalog/functions/BucketFunctionTest.java
 
b/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/catalog/functions/BucketFunctionTest.java
index cf02654063..b8fbcdae42 100644
--- 
a/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/catalog/functions/BucketFunctionTest.java
+++ 
b/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/catalog/functions/BucketFunctionTest.java
@@ -56,6 +56,7 @@ import 
org.apache.paimon.shade.guava30.com.google.common.collect.ImmutableMap;
 
 import org.apache.spark.sql.SparkSession;
 import org.assertj.core.api.Assertions;
+import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Test;
@@ -190,6 +191,11 @@ public class BucketFunctionTest {
         spark.sql(String.format("DROP TABLE IF EXISTS %s", TABLE_NAME));
     }
 
+    @AfterAll
+    public static void tearDown() {
+        spark.stop();
+    }
+
     public static void setupTable(String... bucketColumns) {
         String commitUser = UUID.randomUUID().toString();
         try {

Reply via email to