This is an automated email from the ASF dual-hosted git repository.

yuqi4733 pushed a commit to branch branch-1.1
in repository https://gitbox.apache.org/repos/asf/gravitino.git


The following commit(s) were added to refs/heads/branch-1.1 by this push:
     new cbb313b938 [#9498] Improvement (test): Add integration test for the 
Hive3 catalog (#9556)
cbb313b938 is described below

commit cbb313b93819dcf4a93f4ec121d22e7e8b58f31b
Author: github-actions[bot] 
<41898282+github-actions[bot]@users.noreply.github.com>
AuthorDate: Thu Dec 25 17:08:32 2025 +0800

    [#9498] Improvement (test): Add integration test for the Hive3 catalog 
(#9556)
    
    ### What changes were proposed in this pull request?
    
    Add integration test for the Hive3 catalog
    
    ### Why are the changes needed?
    
    Fix: #9498
    
    ### Does this PR introduce _any_ user-facing change?
    
    NO
    
    ### How was this patch tested?
    
    Add UTs
    
    Co-authored-by: Yuhui <[email protected]>
---
 .../hive/integration/test/CatalogHive2IT.java      |  53 +++-
 .../hive/integration/test/CatalogHive3IT.java      |   4 +
 .../test/CatalogHive3ITWithCatalog.java            |  76 +++++
 .../hive/integration/test/CatalogHiveABSIT.java    |   2 +
 .../hive/integration/test/CatalogHiveGCSIT.java    |   2 +
 .../hive/integration/test/CatalogHiveS3IT.java     |   5 +-
 .../integration/test/HiveUserAuthenticationIT.java |   2 +
 .../test/MultipleHMSUserAuthenticationIT.java      |   2 +
 .../hive/integration/test/ProxyCatalogHiveIT.java  |   2 +
 catalogs/hive-metastore-common/build.gradle.kts    |   2 +
 .../apache/gravitino/hive/client/HiveClient.java   |   6 +
 .../gravitino/hive/client/HiveClientFactory.java   |   4 +-
 .../gravitino/hive/client/HiveClientImpl.java      |   5 +
 .../org/apache/gravitino/hive/client/HiveShim.java |   2 +
 .../apache/gravitino/hive/client/HiveShimV2.java   |   6 +
 .../apache/gravitino/hive/client/HiveShimV3.java   |  43 ++-
 .../gravitino/hive/kerberos/KerberosClient.java    |   1 -
 .../{TestHiveClient.java => TestHive2HMS.java}     | 350 ++++++++++-----------
 .../hive/client/TestHive2HMSWithKerberos.java      | 140 +++++++++
 .../gravitino/hive/client/TestHive3HMS.java}       |  30 +-
 .../hive/client/TestHive3HMSWithCatalog.java}      |  32 +-
 .../hive/client/TestHive3HMSWithKerberos.java      |  60 ++++
 .../integration/test/container/ContainerSuite.java |   7 +-
 23 files changed, 610 insertions(+), 226 deletions(-)

diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive2IT.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive2IT.java
index c934589369..7f9eb0e771 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive2IT.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive2IT.java
@@ -108,16 +108,19 @@ import org.apache.thrift.TException;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Assumptions;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Tag;
 import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @Tag("gravitino-docker-test")
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
 public class CatalogHive2IT extends BaseIT {
   private static final Logger LOG = 
LoggerFactory.getLogger(CatalogHive2IT.class);
-  private static final String HMS_CATALOG = "hive";
+
   public static final String metalakeName =
       GravitinoITUtils.genRandomName("CatalogHiveIT_metalake");
   public String catalogName = 
GravitinoITUtils.genRandomName("CatalogHiveIT_catalog");
@@ -131,13 +134,15 @@ public class CatalogHive2IT extends BaseIT {
   public static final String HIVE_COL_NAME2 = "hive_col_name2";
   public static final String HIVE_COL_NAME3 = "hive_col_name3";
   protected String hiveMetastoreUris;
+  protected String hmsCatalog = "";
   protected final String provider = "hive";
   protected final ContainerSuite containerSuite = ContainerSuite.getInstance();
-  private HiveClientPool hiveClientPool;
+  protected HiveClientPool hiveClientPool;
   protected GravitinoMetalake metalake;
   protected Catalog catalog;
   protected SparkSession sparkSession;
   protected FileSystem fileSystem;
+  protected boolean enableSparkTest = true;
   private final String SELECT_ALL_TEMPLATE = "SELECT * FROM %s.%s";
 
   private static String getInsertWithoutPartitionSql(
@@ -175,7 +180,7 @@ public class CatalogHive2IT extends BaseIT {
   }
 
   protected void initSparkSession() {
-    sparkSession =
+    SparkSession.Builder builder =
         SparkSession.builder()
             .master("local[1]")
             .appName("Hive Catalog integration test")
@@ -188,8 +193,8 @@ public class CatalogHive2IT extends BaseIT {
                     HiveContainer.HDFS_DEFAULTFS_PORT))
             .config("spark.sql.storeAssignmentPolicy", "LEGACY")
             .config("mapreduce.input.fileinputformat.input.dir.recursive", 
"true")
-            .enableHiveSupport()
-            .getOrCreate();
+            .enableHiveSupport();
+    sparkSession = builder.getOrCreate();
   }
 
   protected void initFileSystem() throws IOException {
@@ -215,10 +220,10 @@ public class CatalogHive2IT extends BaseIT {
 
     // Check if Hive client can connect to Hive metastore
     hiveClientPool = new HiveClientPool("hive", 1, hiveClientProperties);
-    List<String> dbs = hiveClientPool.run(client -> 
client.getAllDatabases(HMS_CATALOG));
-    Assertions.assertFalse(dbs.isEmpty());
 
-    initSparkSession();
+    if (enableSparkTest) {
+      initSparkSession();
+    }
     initFileSystem();
 
     createMetalake();
@@ -250,6 +255,8 @@ public class CatalogHive2IT extends BaseIT {
       sparkSession.close();
     }
 
+    ContainerSuite.getInstance().close();
+
     if (fileSystem != null) {
       fileSystem.close();
     }
@@ -266,7 +273,7 @@ public class CatalogHive2IT extends BaseIT {
     catalog.asSchemas().dropSchema(schemaName, true);
     assertThrows(
         NoSuchSchemaException.class,
-        () -> hiveClientPool.run(client -> client.getDatabase(HMS_CATALOG, 
schemaName)));
+        () -> hiveClientPool.run(client -> client.getDatabase(hmsCatalog, 
schemaName)));
     createSchema();
   }
 
@@ -387,27 +394,27 @@ public class CatalogHive2IT extends BaseIT {
   }
 
   private HiveTable loadHiveTable(String schema, String table) throws 
InterruptedException {
-    return hiveClientPool.run(client -> client.getTable(HMS_CATALOG, schema, 
table));
+    return hiveClientPool.run(client -> client.getTable(hmsCatalog, schema, 
table));
   }
 
   private HivePartition loadHivePartition(String schema, String table, String 
partition)
       throws InterruptedException {
     return hiveClientPool.run(
         client -> {
-          HiveTable hiveTable = client.getTable(HMS_CATALOG, schema, table);
+          HiveTable hiveTable = client.getTable(hmsCatalog, schema, table);
           return client.getPartition(hiveTable, partition);
         });
   }
 
   private HiveSchema loadHiveSchema(String schema) throws InterruptedException 
{
-    return hiveClientPool.run(client -> client.getDatabase(HMS_CATALOG, 
schema));
+    return hiveClientPool.run(client -> client.getDatabase(hmsCatalog, 
schema));
   }
 
   private boolean hiveTableExists(String schema, String table) throws 
InterruptedException {
     return hiveClientPool.run(
         client -> {
           try {
-            client.getTable(HMS_CATALOG, schema, table);
+            client.getTable(hmsCatalog, schema, table);
             return true;
           } catch (NoSuchTableException e) {
             return false;
@@ -419,7 +426,7 @@ public class CatalogHive2IT extends BaseIT {
       throws InterruptedException {
     hiveClientPool.run(
         client -> {
-          client.dropTable(HMS_CATALOG, schema, table, deleteData, ifPurge);
+          client.dropTable(hmsCatalog, schema, table, deleteData, ifPurge);
           return null;
         });
   }
@@ -481,7 +488,7 @@ public class CatalogHive2IT extends BaseIT {
   @Test
   public void testCreateHiveTableWithDistributionAndSortOrder()
       throws TException, InterruptedException {
-    // Create table from Gravitino API
+    Assumptions.assumeTrue(enableSparkTest);
     Column[] columns = createColumns();
 
     NameIdentifier nameIdentifier = NameIdentifier.of(schemaName, tableName);
@@ -574,7 +581,7 @@ public class CatalogHive2IT extends BaseIT {
 
   @Test
   public void testCreateHiveTable() throws TException, InterruptedException {
-    // Create table from Gravitino API
+    Assumptions.assumeTrue(enableSparkTest);
     Column[] columns = createColumns();
 
     NameIdentifier nameIdentifier = NameIdentifier.of(schemaName, tableName);
@@ -669,6 +676,7 @@ public class CatalogHive2IT extends BaseIT {
 
   @Test
   public void testHiveTableProperties() throws TException, 
InterruptedException {
+    Assumptions.assumeTrue(enableSparkTest);
     Column[] columns = createColumns();
     NameIdentifier nameIdentifier = NameIdentifier.of(schemaName, tableName);
     // test default properties
@@ -756,6 +764,7 @@ public class CatalogHive2IT extends BaseIT {
 
   @Test
   public void testHiveSchemaProperties() throws TException, 
InterruptedException {
+    Assumptions.assumeTrue(enableSparkTest);
     // test LOCATION property
     String schemaName = GravitinoITUtils.genRandomName(SCHEMA_PREFIX);
 
@@ -797,6 +806,7 @@ public class CatalogHive2IT extends BaseIT {
 
   @Test
   public void testCreatePartitionedHiveTable() throws TException, 
InterruptedException {
+    Assumptions.assumeTrue(enableSparkTest);
     // Create table from Gravitino API
     Column[] columns = createColumns();
 
@@ -844,6 +854,7 @@ public class CatalogHive2IT extends BaseIT {
 
   @Test
   public void testListPartitionNames() throws TException, InterruptedException 
{
+    Assumptions.assumeTrue(enableSparkTest);
     // test empty partitions
     Column[] columns = createColumns();
     NameIdentifier nameIdentifier = NameIdentifier.of(schemaName, tableName);
@@ -870,6 +881,7 @@ public class CatalogHive2IT extends BaseIT {
 
   @Test
   public void testListPartitions() throws TException, InterruptedException {
+    Assumptions.assumeTrue(enableSparkTest);
     // test empty partitions
     Column[] columns = createColumns();
     NameIdentifier nameIdentifier = NameIdentifier.of(schemaName, tableName);
@@ -924,6 +936,7 @@ public class CatalogHive2IT extends BaseIT {
 
   @Test
   public void testGetPartition() throws TException, InterruptedException {
+    Assumptions.assumeTrue(enableSparkTest);
     Table createdTable = preparePartitionedTable();
 
     String[] partitionNames = 
createdTable.supportPartitions().listPartitionNames();
@@ -946,6 +959,7 @@ public class CatalogHive2IT extends BaseIT {
 
   @Test
   public void testAddPartition() throws TException, InterruptedException {
+    Assumptions.assumeTrue(enableSparkTest);
     Table createdTable = preparePartitionedTable();
 
     // add partition 
"hive_col_name2=2023-01-02/hive_col_name3=gravitino_it_test2"
@@ -994,6 +1008,7 @@ public class CatalogHive2IT extends BaseIT {
 
   @Test
   public void testDropPartition() throws TException, InterruptedException, 
IOException {
+    Assumptions.assumeTrue(enableSparkTest);
     Table createdTable = preparePartitionedTable();
 
     // add partition 
"hive_col_name2=2023-01-02/hive_col_name3=gravitino_it_test2"
@@ -1088,6 +1103,7 @@ public class CatalogHive2IT extends BaseIT {
   @Test
   public void testPurgePartition()
       throws InterruptedException, UnsupportedOperationException, TException {
+    Assumptions.assumeTrue(enableSparkTest);
     Table createdTable = preparePartitionedTable();
     Assertions.assertThrows(
         UnsupportedOperationException.class,
@@ -1151,6 +1167,7 @@ public class CatalogHive2IT extends BaseIT {
 
   @Test
   public void testAlterHiveTable() throws TException, InterruptedException {
+    Assumptions.assumeTrue(enableSparkTest);
     Column[] columns = createColumns();
     Table createdTable =
         catalog
@@ -1532,6 +1549,7 @@ public class CatalogHive2IT extends BaseIT {
 
   @Test
   public void testDropHiveManagedTable() throws TException, 
InterruptedException, IOException {
+    Assumptions.assumeTrue(enableSparkTest);
     Column[] columns = createColumns();
     catalog
         .asTableCatalog()
@@ -1558,6 +1576,7 @@ public class CatalogHive2IT extends BaseIT {
 
   @Test
   public void testDropHiveExternalTable() throws TException, 
InterruptedException, IOException {
+    Assumptions.assumeTrue(enableSparkTest);
     Column[] columns = createColumns();
     Map<String, String> properties = createProperties();
     properties.put(TABLE_TYPE, EXTERNAL_TABLE.name().toLowerCase(Locale.ROOT));
@@ -1587,6 +1606,7 @@ public class CatalogHive2IT extends BaseIT {
 
   @Test
   public void testPurgeHiveManagedTable() throws TException, 
InterruptedException, IOException {
+    Assumptions.assumeTrue(enableSparkTest);
     Column[] columns = createColumns();
     catalog
         .asTableCatalog()
@@ -1619,6 +1639,7 @@ public class CatalogHive2IT extends BaseIT {
 
   @Test
   public void testPurgeHiveExternalTable() throws TException, 
InterruptedException, IOException {
+    Assumptions.assumeTrue(enableSparkTest);
     Column[] columns = createColumns();
     Map<String, String> properties = createProperties();
     properties.put(TABLE_TYPE, EXTERNAL_TABLE.name().toLowerCase(Locale.ROOT));
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive3IT.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive3IT.java
index 60e8facb4e..a678fa8c9d 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive3IT.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive3IT.java
@@ -21,11 +21,15 @@ package org.apache.gravitino.catalog.hive.integration.test;
 import com.google.common.collect.ImmutableMap;
 import org.apache.gravitino.integration.test.container.HiveContainer;
 import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestInstance;
 
 @Tag("gravitino-docker-test")
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
 public class CatalogHive3IT extends CatalogHive2IT {
+
   @Override
   protected void startNecessaryContainer() {
+    hmsCatalog = "hive";
     containerSuite.startHiveContainer(
         ImmutableMap.of(HiveContainer.HIVE_RUNTIME_VERSION, 
HiveContainer.HIVE3));
 
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive3ITWithCatalog.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive3ITWithCatalog.java
new file mode 100644
index 0000000000..79d8b07926
--- /dev/null
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive3ITWithCatalog.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.catalog.hive.integration.test;
+
+import static org.apache.gravitino.catalog.hive.HiveConstants.DEFAULT_CATALOG;
+import static org.apache.gravitino.catalog.hive.HiveConstants.METASTORE_URIS;
+
+import com.google.common.collect.Maps;
+import java.util.Map;
+import org.apache.gravitino.Catalog;
+import org.apache.gravitino.integration.test.container.HiveContainer;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestInstance;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Tag("gravitino-docker-test")
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
+public class CatalogHive3ITWithCatalog extends CatalogHive3IT {
+
+  private static final Logger LOGGER = 
LoggerFactory.getLogger(CatalogHive3ITWithCatalog.class);
+
+  @Override
+  protected void startNecessaryContainer() {
+    super.startNecessaryContainer();
+    hmsCatalog = "mycatalog";
+    // spark does not support non-default catalog, disable spark test
+    enableSparkTest = false;
+  }
+
+  @Override
+  protected void createCatalog() {
+
+    String location =
+        String.format(
+            "hdfs://%s:%d/user/hive/warehouse/%s/%s/%s",
+            containerSuite.getHiveContainer().getContainerIpAddress(),
+            HiveContainer.HDFS_DEFAULTFS_PORT,
+            hmsCatalog,
+            catalogName.toLowerCase(),
+            schemaName.toLowerCase());
+    try {
+      hiveClientPool.run(
+          client1 -> {
+            client1.createCatalog(hmsCatalog, location);
+            return null;
+          });
+    } catch (Exception e) {
+      LOGGER.error("Error creating Hive catalog {}", hmsCatalog, e);
+      throw new RuntimeException(e);
+    }
+    Map<String, String> properties = Maps.newHashMap();
+    properties.put(METASTORE_URIS, hiveMetastoreUris);
+    properties.put(DEFAULT_CATALOG, hmsCatalog);
+
+    metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider, 
"comment", properties);
+
+    catalog = metalake.loadCatalog(catalogName);
+  }
+}
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveABSIT.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveABSIT.java
index a225e0ca0d..c09e1ef8b2 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveABSIT.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveABSIT.java
@@ -29,6 +29,7 @@ import 
org.apache.gravitino.integration.test.util.GravitinoITUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.spark.sql.SparkSession;
+import org.junit.jupiter.api.TestInstance;
 import org.junit.jupiter.api.condition.EnabledIf;
 
 // Apart from the following dependencies on environment, this test also needs 
hadoop3-common, please
@@ -37,6 +38,7 @@ import org.junit.jupiter.api.condition.EnabledIf;
 @EnabledIf(
     value = "isAzureBlobStorageConfigured",
     disabledReason = "Azure Blob Storage is not prepared.")
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
 public class CatalogHiveABSIT extends CatalogHive2IT {
 
   private static final String ABS_BUCKET_NAME = 
System.getenv("ABS_CONTAINER_NAME");
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveGCSIT.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveGCSIT.java
index d9dbc02503..2dee713bf4 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveGCSIT.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveGCSIT.java
@@ -29,10 +29,12 @@ import 
org.apache.gravitino.integration.test.container.HiveContainer;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.spark.sql.SparkSession;
+import org.junit.jupiter.api.TestInstance;
 import org.junit.jupiter.api.condition.EnabledIf;
 import org.testcontainers.utility.MountableFile;
 
 @EnabledIf(value = "isGCSConfigured", disabledReason = "Google Cloud 
Storage(GCS) is not prepared.")
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
 public class CatalogHiveGCSIT extends CatalogHive2IT {
 
   private static final String GCS_BUCKET_NAME = 
System.getenv("GCS_BUCKET_NAME");
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveS3IT.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveS3IT.java
index b8cf284eca..dc4ec212bc 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveS3IT.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveS3IT.java
@@ -29,12 +29,14 @@ import 
org.apache.gravitino.integration.test.container.HiveContainer;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.spark.sql.SparkSession;
+import org.junit.jupiter.api.TestInstance;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.testcontainers.containers.Container;
 import org.testcontainers.shaded.org.awaitility.Awaitility;
 
-public class CatalogHiveS3IT extends CatalogHive2IT {
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
+public class CatalogHiveS3IT extends CatalogHive3IT {
 
   private static final Logger LOGGER = 
LoggerFactory.getLogger(CatalogHiveS3IT.class);
 
@@ -51,6 +53,7 @@ public class CatalogHiveS3IT extends CatalogHive2IT {
 
   @Override
   protected void startNecessaryContainer() {
+    hmsCatalog = "hive";
     containerSuite.startLocalStackContainer();
     gravitinoLocalStackContainer = containerSuite.getLocalStackContainer();
 
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/HiveUserAuthenticationIT.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/HiveUserAuthenticationIT.java
index 7c3232f94f..23a23772b9 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/HiveUserAuthenticationIT.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/HiveUserAuthenticationIT.java
@@ -59,10 +59,12 @@ import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Tag;
 import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @Tag("gravitino-docker-test")
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
 public class HiveUserAuthenticationIT extends BaseIT {
   private static final Logger LOG = 
LoggerFactory.getLogger(HiveUserAuthenticationIT.class);
 
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/MultipleHMSUserAuthenticationIT.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/MultipleHMSUserAuthenticationIT.java
index 4a6b54816d..f626569510 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/MultipleHMSUserAuthenticationIT.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/MultipleHMSUserAuthenticationIT.java
@@ -24,8 +24,10 @@ import 
org.apache.gravitino.integration.test.util.GravitinoITUtils;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Tag;
 import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
 
 @Tag("gravitino-docker-test")
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
 public class MultipleHMSUserAuthenticationIT extends HiveUserAuthenticationIT {
   @BeforeAll
   static void setHiveURI() {
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
index b82dba2bdb..43a5b7d852 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
@@ -65,8 +65,10 @@ import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Tag;
 import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
 
 @Tag("gravitino-docker-test")
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
 public class ProxyCatalogHiveIT extends BaseIT {
 
   public static final String METALAKE_NAME =
diff --git a/catalogs/hive-metastore-common/build.gradle.kts 
b/catalogs/hive-metastore-common/build.gradle.kts
index bed4a3de95..406b0fd31c 100644
--- a/catalogs/hive-metastore-common/build.gradle.kts
+++ b/catalogs/hive-metastore-common/build.gradle.kts
@@ -121,6 +121,8 @@ dependencies {
   }
   testImplementation(libs.junit.jupiter.api)
   testImplementation(libs.woodstox.core)
+  testImplementation(libs.testcontainers)
+  testImplementation(project(":integration-test-common", "testArtifacts"))
 
   testRuntimeOnly(libs.junit.jupiter.engine)
 }
diff --git 
a/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveClient.java
 
b/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveClient.java
index f7202049d6..251d241c83 100644
--- 
a/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveClient.java
+++ 
b/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveClient.java
@@ -85,6 +85,12 @@ public interface HiveClient extends AutoCloseable {
 
   List<String> getCatalogs();
 
+  default void createCatalog(String catalogName, String location) {
+    createCatalog(catalogName, location, "");
+  }
+
+  void createCatalog(String catalogName, String location, String description);
+
   void close();
 
   UserGroupInformation getUser();
diff --git 
a/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveClientFactory.java
 
b/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveClientFactory.java
index 62ff549dc1..c80c2590ce 100644
--- 
a/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveClientFactory.java
+++ 
b/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveClientFactory.java
@@ -80,8 +80,8 @@ public final class HiveClientFactory {
         kerberosClient.setHiveClient(client);
       }
     } catch (Exception e) {
-      throw new RuntimeException(
-          String.format("Failed to initialize HiveClientFactory %s", 
this.name), e);
+      throw HiveExceptionConverter.toGravitinoException(
+          e, HiveExceptionConverter.ExceptionTarget.other(this.name));
     }
   }
 
diff --git 
a/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveClientImpl.java
 
b/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveClientImpl.java
index 935f1c85c5..809f356eec 100644
--- 
a/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveClientImpl.java
+++ 
b/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveClientImpl.java
@@ -164,6 +164,11 @@ public class HiveClientImpl implements HiveClient {
     return shim.getCatalogs();
   }
 
+  @Override
+  public void createCatalog(String catalogName, String location, String 
description) {
+    shim.createCatalog(catalogName, location, description);
+  }
+
   @Override
   public void close() {
     try {
diff --git 
a/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveShim.java
 
b/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveShim.java
index dc477129ee..ba61ec65a4 100644
--- 
a/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveShim.java
+++ 
b/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveShim.java
@@ -108,6 +108,8 @@ public abstract class HiveShim {
 
   public abstract List<String> getCatalogs();
 
+  public abstract void createCatalog(String catalogName, String location, 
String description);
+
   public void close() throws Exception {
     if (client != null) {
       client.close();
diff --git 
a/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveShimV2.java
 
b/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveShimV2.java
index 383f5a1d85..c82967cb77 100644
--- 
a/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveShimV2.java
+++ 
b/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveShimV2.java
@@ -273,6 +273,12 @@ class HiveShimV2 extends HiveShim {
     return List.of();
   }
 
+  @Override
+  public void createCatalog(String catalogName, String location, String 
description) {
+    throw new UnsupportedOperationException(
+        "Catalog creation is not supported in Hive 2.x. Please upgrade to Hive 
3.x.");
+  }
+
   @Override
   public void close() throws Exception {
     client.close();
diff --git 
a/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveShimV3.java
 
b/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveShimV3.java
index 6409696a41..655981698c 100644
--- 
a/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveShimV3.java
+++ 
b/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/client/HiveShimV3.java
@@ -20,9 +20,11 @@ package org.apache.gravitino.hive.client;
 import static 
org.apache.gravitino.hive.client.HiveClientClassLoader.HiveVersion.HIVE3;
 import static 
org.apache.gravitino.hive.client.Util.updateConfigurationFromProperties;
 
+import java.lang.reflect.Constructor;
 import java.lang.reflect.Method;
 import java.util.List;
 import java.util.Properties;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.reflect.MethodUtils;
 import org.apache.gravitino.hive.HivePartition;
 import org.apache.gravitino.hive.HiveSchema;
@@ -37,6 +39,8 @@ import org.apache.hadoop.hive.metastore.api.Table;
 
 class HiveShimV3 extends HiveShimV2 {
 
+  private static final String CATALOG_CLASS = 
"org.apache.hadoop.hive.metastore.api.Catalog";
+
   private final Method createDatabaseMethod;
   private final Method getDatabaseMethod;
   private final Method getAllDatabasesMethod;
@@ -57,8 +61,14 @@ class HiveShimV3 extends HiveShimV2 {
   private final Method getTableObjectsByNameMethod;
   private final Method databaseSetCatalogNameMethod;
   private final Method getCatalogsMethod;
+  private final Method createCatalogMethod;
+
   private final Method tableSetCatalogNameMethod;
   private final Method partitionSetCatalogNameMethod;
+  private final Method catalogSetDescriptionMethod;
+
+  private final Class<?> catalogClass;
+  private final Constructor<?> catalogCreator;
 
   HiveShimV3(Properties properties) {
     super(HIVE3, properties);
@@ -129,6 +139,11 @@ class HiveShimV3 extends HiveShimV2 {
               "getTableObjectsByName", String.class, String.class, List.class);
       this.getCatalogsMethod = IMetaStoreClient.class.getMethod("getCatalogs");
 
+      this.catalogClass = 
this.getClass().getClassLoader().loadClass(CATALOG_CLASS);
+      this.catalogCreator = 
this.catalogClass.getDeclaredConstructor(String.class, String.class);
+      this.createCatalogMethod = 
IMetaStoreClient.class.getMethod("createCatalog", catalogClass);
+      this.catalogSetDescriptionMethod = 
catalogClass.getMethod("setDescription", String.class);
+
       // SetCatalogName methods for Hive3
       this.databaseSetCatalogNameMethod =
           MethodUtils.getAccessibleMethod(Database.class, "setCatalogName", 
String.class);
@@ -294,7 +309,7 @@ class HiveShimV3 extends HiveShimV2 {
     String catalogName = hiveTable.catalogName();
     var tb = HiveTableConverter.toHiveTable(hiveTable);
     invoke(ExceptionTarget.other(""), tb, tableSetCatalogNameMethod, 
catalogName);
-    invoke(ExceptionTarget.schema(hiveTable.name()), client, 
createTableMethod, tb);
+    invoke(ExceptionTarget.table(hiveTable.name()), client, createTableMethod, 
tb);
   }
 
   @Override
@@ -424,6 +439,16 @@ class HiveShimV3 extends HiveShimV2 {
     return (List<String>) invoke(ExceptionTarget.other(""), client, 
getCatalogsMethod);
   }
 
+  @Override
+  public void createCatalog(String catalogName, String location, String 
description) {
+    Object catalog =
+        invoke(ExceptionTarget.other(catalogName), catalogCreator, 
catalogName, location);
+    if (StringUtils.isNotBlank(description)) {
+      invoke(ExceptionTarget.other(catalogName), catalog, 
catalogSetDescriptionMethod, description);
+    }
+    invoke(ExceptionTarget.catalog(catalogName), client, createCatalogMethod, 
catalog);
+  }
+
   /**
    * Invokes a method on an object and converts any exception to a Gravitino 
exception.
    *
@@ -441,6 +466,22 @@ class HiveShimV3 extends HiveShimV2 {
     }
   }
 
+  /**
+   * Creates an object using a constructor and converts any exception to a 
Gravitino exception.
+   *
+   * @param target Hive object info used in error messages and exception 
mapping
+   * @param constructor The constructor to use for creating the object
+   * @param args The arguments to pass to the constructor
+   * @return The created object
+   */
+  private Object invoke(ExceptionTarget target, Constructor<?> constructor, 
Object... args) {
+    try {
+      return constructor.newInstance(args);
+    } catch (Exception e) {
+      throw HiveExceptionConverter.toGravitinoException(e, target);
+    }
+  }
+
   /**
    * Converts pageSize from short to int if the method parameter expects int 
type.
    *
diff --git 
a/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/kerberos/KerberosClient.java
 
b/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/kerberos/KerberosClient.java
index 8ba4f18559..d1be53a115 100644
--- 
a/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/kerberos/KerberosClient.java
+++ 
b/catalogs/hive-metastore-common/src/main/java/org/apache/gravitino/hive/kerberos/KerberosClient.java
@@ -172,7 +172,6 @@ public class KerberosClient implements java.io.Closeable {
       keytabsDir.mkdir();
     }
     File keytabFile = new File(path);
-    keytabFile.deleteOnExit();
     if (keytabFile.exists() && !keytabFile.delete()) {
       throw new IllegalStateException(
           String.format("Fail to delete keytab file %s", 
keytabFile.getAbsolutePath()));
diff --git 
a/catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHiveClient.java
 
b/catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive2HMS.java
similarity index 63%
rename from 
catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHiveClient.java
rename to 
catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive2HMS.java
index c238f33ce5..8c30e0b18a 100644
--- 
a/catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHiveClient.java
+++ 
b/catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive2HMS.java
@@ -19,6 +19,7 @@
 
 package org.apache.gravitino.hive.client;
 
+import com.google.common.collect.ImmutableMap;
 import java.time.Instant;
 import java.util.HashMap;
 import java.util.List;
@@ -38,6 +39,10 @@ import 
org.apache.gravitino.exceptions.TableAlreadyExistsException;
 import org.apache.gravitino.hive.HivePartition;
 import org.apache.gravitino.hive.HiveSchema;
 import org.apache.gravitino.hive.HiveTable;
+import org.apache.gravitino.integration.test.container.ContainerSuite;
+import org.apache.gravitino.integration.test.container.HiveContainer;
+import org.apache.gravitino.integration.test.util.CloseContainerExtension;
+import org.apache.gravitino.integration.test.util.PrintFuncNameExtension;
 import org.apache.gravitino.meta.AuditInfo;
 import org.apache.gravitino.rel.Column;
 import org.apache.gravitino.rel.expressions.literals.Literal;
@@ -45,50 +50,57 @@ import 
org.apache.gravitino.rel.expressions.literals.Literals;
 import org.apache.gravitino.rel.expressions.transforms.Transform;
 import org.apache.gravitino.rel.expressions.transforms.Transforms;
 import org.apache.gravitino.rel.types.Types;
+import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
 import org.junit.jupiter.api.Test;
-
-// This class is used for manual testing against real Hive Metastore instances.
-@Disabled
-public class TestHiveClient {
-
-  private static final String HIVE2_HMS_URL = "thrift://172.17.0.4:9083";
-  private static final String HIVE2_HDFS_URL = "hdfs://172.17.0.4:9000";
-  private static final String HIVE3_HMS_URL = "thrift://172.17.0.3:9083";
-  private static final String HIVE3_HDFS_URL = "hdfs://172.17.0.3:9000";
-
-  private static final String KERBEROS_HIVE2_HMS_URL = 
"thrift://172.17.0.2:9083";
-  private static final String KERBEROS_HIVE2_HDFS_URL = 
"hdfs://172.17.0.2:9000";
-  private static final String KERBEROS_PRINCIPAL = "cli@HADOOPKRB";
-  private static final String KERBEROS_KEYTAB = 
"/tmp/test4310082059861441407/client.keytab";
-  private static final String KERBEROS_METASTORE_PRINCIPAL = 
"hive/6b1955fcb754@HADOOPKRB";
-  private static final String KERBEROS_KRB5_CONF = 
"/tmp/test4310082059861441407/krb5.conf";
-
-  @Test
-  void testHive2Client() throws Exception {
-    runHiveClientTest("", "hive2", HIVE2_HMS_URL, HIVE2_HDFS_URL + 
"/tmp/gravitino_test");
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.extension.ExtendWith;
+
+// This class is used for testing against Hive 2.x Metastore instances using 
Docker containers.
+@Tag("gravitino-docker-test")
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
+@ExtendWith({PrintFuncNameExtension.class, CloseContainerExtension.class})
+public class TestHive2HMS {
+
+  protected final ContainerSuite containerSuite = ContainerSuite.getInstance();
+
+  protected HiveContainer hiveContainer;
+  protected String testPrefix = "hive2";
+  protected String metastoreUri;
+  protected String hdfsBasePath;
+  protected String catalogName = ""; // Hive2 doesn't support catalog
+  protected HiveClient hiveClient;
+
+  @BeforeAll
+  public void startHiveContainer() {
+    containerSuite.startHiveContainer(
+        ImmutableMap.of(HiveContainer.HIVE_RUNTIME_VERSION, 
HiveContainer.HIVE2));
+    hiveContainer = containerSuite.getHiveContainer();
+    metastoreUri =
+        String.format(
+            "thrift://%s:%d",
+            hiveContainer.getContainerIpAddress(), 
HiveContainer.HIVE_METASTORE_PORT);
+    hdfsBasePath =
+        String.format(
+            "hdfs://%s:%d/tmp/gravitino_test",
+            hiveContainer.getContainerIpAddress(), 
HiveContainer.HDFS_DEFAULTFS_PORT);
+
+    hiveClient = new HiveClientFactory(createHiveProperties(), 
testPrefix).createHiveClient();
   }
 
-  @Test
-  void testHive3DefaultCatalog() throws Exception {
-    // Hive3 default catalog is "hive", not empty string
-    runHiveClientTest(
-        "hive", "hive3_default", HIVE3_HMS_URL, HIVE3_HDFS_URL + 
"/tmp/gravitino_test");
+  @AfterAll
+  public void stopHiveContainer() throws Exception {
+    containerSuite.close();
+    if (hiveClient != null) {
+      hiveClient.close();
+      hiveClient = null;
+    }
   }
 
   @Test
-  void testHive3SampleCatalog() throws Exception {
-    runHiveClientTest(
-        "sample_catalog", "hive3_sample", HIVE3_HMS_URL, HIVE3_HDFS_URL + 
"/tmp/gravitino_test");
-  }
-
-  private void runHiveClientTest(
-      String catalogName, String testPrefix, String metastoreUri, String 
hdfsBasePath) {
-    Properties properties = new Properties();
-    properties.setProperty("hive.metastore.uris", metastoreUri);
-    HiveClient client = new HiveClientFactory(properties, 
"").createHiveClient();
-
+  void runHiveClientTest() throws Exception {
     String dbName = "gt_" + testPrefix + "_db_" + 
UUID.randomUUID().toString().replace("-", "");
     String tableName = "gt_" + testPrefix + "_tbl_" + 
UUID.randomUUID().toString().replace("-", "");
     String partitionValue = "p_" + UUID.randomUUID().toString().replace("-", 
"");
@@ -103,26 +115,26 @@ public class TestHiveClient {
 
     try {
       // Test database operations
-      client.createDatabase(schema);
-      List<String> allDatabases = client.getAllDatabases(catalogName);
+      hiveClient.createDatabase(schema);
+      List<String> allDatabases = hiveClient.getAllDatabases(catalogName);
       Assertions.assertTrue(allDatabases.contains(dbName), "Database should be 
in the list");
 
-      HiveSchema loadedDb = client.getDatabase(catalogName, dbName);
+      HiveSchema loadedDb = hiveClient.getDatabase(catalogName, dbName);
       Assertions.assertNotNull(loadedDb, "Loaded database should not be null");
       Assertions.assertEquals(dbName, loadedDb.name(), "Database name should 
match");
       Assertions.assertEquals(
           schema.comment(), loadedDb.comment(), "Database comment should 
match");
 
-      client.alterDatabase(catalogName, dbName, schema);
-      HiveSchema alteredDb = client.getDatabase(catalogName, dbName);
+      hiveClient.alterDatabase(catalogName, dbName, schema);
+      HiveSchema alteredDb = hiveClient.getDatabase(catalogName, dbName);
       Assertions.assertNotNull(alteredDb, "Altered database should not be 
null");
 
       // Test table operations
-      client.createTable(table);
-      List<String> allTables = client.getAllTables(catalogName, dbName);
+      hiveClient.createTable(table);
+      List<String> allTables = hiveClient.getAllTables(catalogName, dbName);
       Assertions.assertTrue(allTables.contains(tableName), "Table should be in 
the list");
 
-      HiveTable loadedTable = client.getTable(catalogName, dbName, tableName);
+      HiveTable loadedTable = hiveClient.getTable(catalogName, dbName, 
tableName);
       Assertions.assertNotNull(loadedTable, "Loaded table should not be null");
       Assertions.assertEquals(tableName, loadedTable.name(), "Table name 
should match");
       Assertions.assertEquals(table.comment(), loadedTable.comment(), "Table 
comment should match");
@@ -130,47 +142,47 @@ public class TestHiveClient {
       Assertions.assertEquals(
           1, loadedTable.partitioning().length, "Table should have 1 partition 
key");
 
-      client.alterTable(catalogName, dbName, tableName, loadedTable);
-      HiveTable alteredTable = client.getTable(catalogName, dbName, tableName);
+      hiveClient.alterTable(catalogName, dbName, tableName, loadedTable);
+      HiveTable alteredTable = hiveClient.getTable(catalogName, dbName, 
tableName);
       Assertions.assertNotNull(alteredTable, "Altered table should not be 
null");
 
       List<String> filteredTables =
-          client.listTableNamesByFilter(catalogName, dbName, "", (short) 10);
+          hiveClient.listTableNamesByFilter(catalogName, dbName, "", (short) 
10);
       Assertions.assertTrue(
           filteredTables.contains(tableName), "Filtered tables should contain 
the table");
 
       List<HiveTable> tableObjects =
-          client.getTableObjectsByName(catalogName, dbName, 
List.of(tableName));
+          hiveClient.getTableObjectsByName(catalogName, dbName, 
List.of(tableName));
       Assertions.assertEquals(1, tableObjects.size(), "Should get exactly one 
table object");
       Assertions.assertEquals(
           tableName, tableObjects.get(0).name(), "Table object name should 
match");
 
       // Test partition operations
-      HivePartition addedPartition = client.addPartition(loadedTable, 
partition);
+      HivePartition addedPartition = hiveClient.addPartition(loadedTable, 
partition);
       Assertions.assertNotNull(addedPartition, "Added partition should not be 
null");
       Assertions.assertEquals(partitionName, addedPartition.name(), "Partition 
name should match");
 
-      List<String> partitionNames = client.listPartitionNames(loadedTable, 
(short) 10);
+      List<String> partitionNames = hiveClient.listPartitionNames(loadedTable, 
(short) 10);
       Assertions.assertTrue(
           partitionNames.contains(partitionName), "Partition should be in the 
list");
 
-      List<HivePartition> partitions = client.listPartitions(loadedTable, 
(short) 10);
+      List<HivePartition> partitions = hiveClient.listPartitions(loadedTable, 
(short) 10);
       Assertions.assertEquals(1, partitions.size(), "Should have exactly one 
partition");
       Assertions.assertEquals(
           partitionName, partitions.get(0).name(), "Partition name should 
match");
 
       List<HivePartition> filteredPartitions =
-          client.listPartitions(loadedTable, List.of(partitionValue), (short) 
10);
+          hiveClient.listPartitions(loadedTable, List.of(partitionValue), 
(short) 10);
       Assertions.assertEquals(
           1, filteredPartitions.size(), "Should have exactly one filtered 
partition");
 
-      HivePartition fetchedPartition = client.getPartition(loadedTable, 
addedPartition.name());
+      HivePartition fetchedPartition = hiveClient.getPartition(loadedTable, 
addedPartition.name());
       Assertions.assertNotNull(fetchedPartition, "Fetched partition should not 
be null");
       Assertions.assertEquals(
           partitionName, fetchedPartition.name(), "Fetched partition name 
should match");
 
-      client.dropPartition(catalogName, dbName, tableName, 
addedPartition.name(), true);
-      List<String> partitionNamesAfterDrop = 
client.listPartitionNames(loadedTable, (short) 10);
+      hiveClient.dropPartition(catalogName, dbName, tableName, 
addedPartition.name(), true);
+      List<String> partitionNamesAfterDrop = 
hiveClient.listPartitionNames(loadedTable, (short) 10);
       Assertions.assertFalse(
           partitionNamesAfterDrop.contains(partitionName),
           "Partition should not be in the list after drop");
@@ -178,7 +190,7 @@ public class TestHiveClient {
       // Test delegation token (may not be available in all environments)
       try {
         String token =
-            client.getDelegationToken(
+            hiveClient.getDelegationToken(
                 System.getProperty("user.name"), 
System.getProperty("user.name"));
         Assertions.assertNotNull(token, "Delegation token should not be null");
       } catch (Exception e) {
@@ -186,101 +198,23 @@ public class TestHiveClient {
       }
 
       // Cleanup
-      client.dropTable(catalogName, dbName, tableName, true, true);
-      List<String> tablesAfterDrop = client.getAllTables(catalogName, dbName);
+      hiveClient.dropTable(catalogName, dbName, tableName, true, true);
+      List<String> tablesAfterDrop = hiveClient.getAllTables(catalogName, 
dbName);
       Assertions.assertFalse(
           tablesAfterDrop.contains(tableName), "Table should not be in the 
list after drop");
 
-      client.dropDatabase(catalogName, dbName, true);
-      List<String> databasesAfterDrop = client.getAllDatabases(catalogName);
+      hiveClient.dropDatabase(catalogName, dbName, true);
+      List<String> databasesAfterDrop = 
hiveClient.getAllDatabases(catalogName);
       Assertions.assertFalse(
           databasesAfterDrop.contains(dbName), "Database should not be in the 
list after drop");
     } finally {
-      safelyDropTable(client, catalogName, dbName, tableName);
-      safelyDropDatabase(client, catalogName, dbName);
-    }
-  }
-
-  private HiveSchema createTestSchema(String catalogName, String dbName, 
String location) {
-    Map<String, String> properties = new HashMap<>();
-    properties.put(HiveConstants.LOCATION, location);
-    return HiveSchema.builder()
-        .withName(dbName)
-        .withComment("Test schema for HiveClient operations")
-        .withProperties(properties)
-        .withAuditInfo(defaultAudit())
-        .withCatalogName(catalogName)
-        .build();
-  }
-
-  private HiveTable createTestTable(
-      String catalogName, String databaseName, String tableName, String 
location) {
-    Column idColumn = Column.of("id", Types.IntegerType.get(), null, false, 
false, null);
-    Column dtColumn = Column.of("dt", Types.StringType.get());
-    Map<String, String> properties = new HashMap<>();
-    properties.put(HiveConstants.LOCATION, location);
-    return HiveTable.builder()
-        .withName(tableName)
-        .withColumns(new Column[] {idColumn, dtColumn})
-        .withComment("Test table for HiveClient operations")
-        .withProperties(properties)
-        .withAuditInfo(defaultAudit())
-        .withPartitioning(new Transform[] {Transforms.identity("dt")})
-        .withCatalogName(catalogName)
-        .withDatabaseName(databaseName)
-        .build();
-  }
-
-  private HivePartition createTestPartition(String partitionName, String 
value) {
-    HivePartition partition =
-        HivePartition.identity(
-            new String[][] {new String[] {"dt"}},
-            new Literal<?>[] {Literals.stringLiteral(value)},
-            Map.of());
-    Assertions.assertEquals(partitionName, partition.name());
-    return partition;
-  }
-
-  private AuditInfo defaultAudit() {
-    return AuditInfo.builder()
-        .withCreator(System.getProperty("user.name", "gravitino"))
-        .withCreateTime(Instant.now())
-        .build();
-  }
-
-  private void safelyDropTable(
-      HiveClient client, String catalogName, String dbName, String tableName) {
-    try {
-      client.dropTable(catalogName, dbName, tableName, true, true);
-    } catch (Exception ignored) {
-      // ignore cleanup failures
-    }
-  }
-
-  private void safelyDropDatabase(HiveClient client, String catalogName, 
String dbName) {
-    try {
-      client.dropDatabase(catalogName, dbName, true);
-    } catch (Exception ignored) {
-      // ignore cleanup failures
+      safelyDropTable(hiveClient, catalogName, dbName, tableName);
+      safelyDropDatabase(hiveClient, catalogName, dbName);
     }
   }
 
   @Test
   void testHiveExceptionHandling() throws Exception {
-    testHiveExceptionHandlingForVersion("", HIVE2_HMS_URL, HIVE2_HDFS_URL);
-  }
-
-  @Test
-  void testHive3ExceptionHandling() throws Exception {
-    testHiveExceptionHandlingForVersion("hive", HIVE3_HMS_URL, HIVE3_HDFS_URL);
-  }
-
-  private void testHiveExceptionHandlingForVersion(
-      String catalogName, String metastoreUri, String hdfsBasePath) throws 
Exception {
-    Properties properties = new Properties();
-    properties.setProperty("hive.metastore.uris", metastoreUri);
-    HiveClient client = new HiveClientFactory(properties, 
"").createHiveClient();
-
     String dbName = "gt_exception_test_db_" + 
UUID.randomUUID().toString().replace("-", "");
     String tableName = "gt_exception_test_tbl_" + 
UUID.randomUUID().toString().replace("-", "");
     String partitionValue = "p_" + UUID.randomUUID().toString().replace("-", 
"");
@@ -296,7 +230,7 @@ public class TestHiveClient {
     try {
       // Test SchemaAlreadyExistsException - create database twice
       try {
-        client.createDatabase(schema);
+        hiveClient.createDatabase(schema);
       } catch (GravitinoRuntimeException e) {
         // If permission error occurs, skip this test
         if (e.getCause() != null
@@ -307,38 +241,43 @@ public class TestHiveClient {
         throw e;
       }
       Assertions.assertThrows(
-          SchemaAlreadyExistsException.class, () -> 
client.createDatabase(schema));
+          SchemaAlreadyExistsException.class, () -> 
hiveClient.createDatabase(schema));
 
       // Test NoSuchSchemaException - get non-existent database
       Assertions.assertThrows(
           NoSuchSchemaException.class,
-          () -> client.getDatabase(catalogName, "non_existent_db_" + 
UUID.randomUUID()));
+          () -> hiveClient.getDatabase(catalogName, "non_existent_db_" + 
UUID.randomUUID()));
 
       // Test TableAlreadyExistsException - create table twice
-      client.createTable(table);
-      Assertions.assertThrows(TableAlreadyExistsException.class, () -> 
client.createTable(table));
+      hiveClient.createTable(table);
+      Assertions.assertThrows(
+          TableAlreadyExistsException.class, () -> 
hiveClient.createTable(table));
 
       // Test NoSuchTableException - get non-existent table
       Assertions.assertThrows(
           NoSuchTableException.class,
-          () -> client.getTable(catalogName, dbName, "non_existent_table_" + 
UUID.randomUUID()));
+          () ->
+              hiveClient.getTable(catalogName, dbName, "non_existent_table_" + 
UUID.randomUUID()));
 
       // Test PartitionAlreadyExistsException - add partition twice
-      HiveTable loadedTable = client.getTable(catalogName, dbName, tableName);
-      HivePartition addedPartition = client.addPartition(loadedTable, 
partition);
+      HiveTable loadedTable = hiveClient.getTable(catalogName, dbName, 
tableName);
+      HivePartition addedPartition = hiveClient.addPartition(loadedTable, 
partition);
       Assertions.assertNotNull(addedPartition, "Added partition should not be 
null");
       Assertions.assertThrows(
-          PartitionAlreadyExistsException.class, () -> 
client.addPartition(loadedTable, partition));
+          PartitionAlreadyExistsException.class,
+          () -> hiveClient.addPartition(loadedTable, partition));
 
       // Test NoSuchPartitionException - get non-existent partition
       Assertions.assertThrows(
           NoSuchPartitionException.class,
-          () -> client.getPartition(loadedTable, "dt=non_existent_partition_" 
+ UUID.randomUUID()));
+          () ->
+              hiveClient.getPartition(
+                  loadedTable, "dt=non_existent_partition_" + 
UUID.randomUUID()));
 
       // Test NonEmptySchemaException - try to drop database with tables 
(cascade=false)
       Exception exception =
           Assertions.assertThrows(
-              Exception.class, () -> client.dropDatabase(catalogName, dbName, 
false));
+              Exception.class, () -> hiveClient.dropDatabase(catalogName, 
dbName, false));
       // Hive may throw different exceptions for non-empty database
       // The converter should handle it appropriately
       Assertions.assertTrue(
@@ -348,19 +287,20 @@ public class TestHiveClient {
               + exception.getClass().getName());
 
       // Cleanup
-      client.dropPartition(catalogName, dbName, tableName, 
addedPartition.name(), true);
-      client.dropTable(catalogName, dbName, tableName, true, true);
-      client.dropDatabase(catalogName, dbName, true);
+      hiveClient.dropPartition(catalogName, dbName, tableName, 
addedPartition.name(), true);
+      hiveClient.dropTable(catalogName, dbName, tableName, true, true);
+      hiveClient.dropDatabase(catalogName, dbName, true);
     } finally {
-      safelyDropTable(client, catalogName, dbName, tableName);
-      safelyDropDatabase(client, catalogName, dbName);
+      safelyDropTable(hiveClient, catalogName, dbName, tableName);
+      safelyDropDatabase(hiveClient, catalogName, dbName);
     }
   }
 
-  private void testConnectionFailedExceptionForVersion(String catalogName) {
+  @Test
+  void testConnectionFailedException() {
     // Test with invalid/unreachable Hive Metastore URI
     String invalidMetastoreUri = "thrift://127.0.0.1:9999";
-    Properties properties = new Properties();
+    Properties properties = createHiveProperties();
     properties.setProperty("hive.metastore.uris", invalidMetastoreUri);
 
     // Connection failure may occur during client creation or operation
@@ -369,7 +309,7 @@ public class TestHiveClient {
         Assertions.assertThrows(
             Exception.class,
             () -> {
-              HiveClient client = new HiveClientFactory(properties, 
"").createHiveClient();
+              HiveClient client = new HiveClientFactory(properties, 
testPrefix).createHiveClient();
               client.getAllDatabases(catalogName);
             });
 
@@ -381,39 +321,73 @@ public class TestHiveClient {
         ((ConnectionFailedException) exception).getCause(), "Exception should 
have a cause");
   }
 
-  @Test
-  void testConnectionFailedException() throws Exception {
-    // Test with HIVE2
-    testConnectionFailedExceptionForVersion("");
+  private HiveSchema createTestSchema(String catalogName, String dbName, 
String location) {
+    Map<String, String> properties = new HashMap<>();
+    properties.put(HiveConstants.LOCATION, location);
+    return HiveSchema.builder()
+        .withName(dbName)
+        .withComment("Test schema for HiveClient operations")
+        .withProperties(properties)
+        .withAuditInfo(defaultAudit())
+        .withCatalogName(catalogName)
+        .build();
+  }
 
-    // Test with HIVE3
-    testConnectionFailedExceptionForVersion("hive");
+  private HiveTable createTestTable(
+      String catalogName, String databaseName, String tableName, String 
location) {
+    Column idColumn = Column.of("id", Types.IntegerType.get(), null, false, 
false, null);
+    Column dtColumn = Column.of("dt", Types.StringType.get());
+    Map<String, String> properties = new HashMap<>();
+    properties.put(HiveConstants.LOCATION, location);
+    return HiveTable.builder()
+        .withName(tableName)
+        .withColumns(new Column[] {idColumn, dtColumn})
+        .withComment("Test table for HiveClient operations")
+        .withProperties(properties)
+        .withAuditInfo(defaultAudit())
+        .withPartitioning(new Transform[] {Transforms.identity("dt")})
+        .withCatalogName(catalogName)
+        .withDatabaseName(databaseName)
+        .build();
   }
 
-  @Test
-  void testKerberosConnection() {
-    // This method can be implemented to test Kerberos authentication with 
Hive Metastore
-    // when a Kerberos-enabled environment is available.
-    Properties properties = new Properties();
-    properties.setProperty("hive.metastore.uris", KERBEROS_HIVE2_HMS_URL);
-    properties.setProperty("authentication.kerberos.principal", 
KERBEROS_PRINCIPAL);
-    properties.setProperty("authentication.impersonation-enable", "true");
-    properties.setProperty("authentication.kerberos.keytab-uri", 
KERBEROS_KEYTAB);
-    properties.setProperty("hive.metastore.kerberos.principal", 
KERBEROS_METASTORE_PRINCIPAL);
-    properties.setProperty("hive.metastore.sasl.enabled", "true");
-    properties.setProperty("hadoop.security.authentication", "kerberos");
+  private HivePartition createTestPartition(String partitionName, String 
value) {
+    HivePartition partition =
+        HivePartition.identity(
+            new String[][] {new String[] {"dt"}},
+            new Literal<?>[] {Literals.stringLiteral(value)},
+            Map.of());
+    Assertions.assertEquals(partitionName, partition.name());
+    return partition;
+  }
+
+  private AuditInfo defaultAudit() {
+    return AuditInfo.builder()
+        .withCreator(System.getProperty("user.name", "gravitino"))
+        .withCreateTime(Instant.now())
+        .build();
+  }
 
-    System.setProperty("java.security.krb5.conf", KERBEROS_KRB5_CONF);
+  protected Properties createHiveProperties() {
+    Properties properties = new Properties();
+    properties.setProperty("hive.metastore.uris", metastoreUri);
+    return properties;
+  }
 
-    String catalogName = "hive";
-    String dbName = "test_kerberos_db";
-    String dbLocation = KERBEROS_HIVE2_HDFS_URL + 
"/tmp/gravitino_kerberos_test/" + dbName;
+  private void safelyDropTable(
+      HiveClient client, String catalogName, String dbName, String tableName) {
+    try {
+      client.dropTable(catalogName, dbName, tableName, true, true);
+    } catch (Exception ignored) {
+      // ignore cleanup failures
+    }
+  }
 
-    HiveClient client = new HiveClientFactory(properties, 
"00").createHiveClient();
-    HiveSchema schema = createTestSchema(catalogName, dbName, dbLocation);
-    client.createDatabase(schema);
-    List<String> allDatabases = client.getAllDatabases(catalogName);
-    Assertions.assertTrue(allDatabases.contains(dbName), "Database should be 
in the list");
-    client.dropDatabase(catalogName, dbName, true);
+  private void safelyDropDatabase(HiveClient client, String catalogName, 
String dbName) {
+    try {
+      client.dropDatabase(catalogName, dbName, true);
+    } catch (Exception ignored) {
+      // ignore cleanup failures
+    }
   }
 }
diff --git 
a/catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive2HMSWithKerberos.java
 
b/catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive2HMSWithKerberos.java
new file mode 100644
index 0000000000..53ef460da0
--- /dev/null
+++ 
b/catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive2HMSWithKerberos.java
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.gravitino.hive.client;
+
+import java.io.File;
+import java.lang.reflect.Method;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.util.Properties;
+import org.apache.commons.io.FileUtils;
+import org.apache.gravitino.integration.test.container.HiveContainer;
+import org.apache.hadoop.security.authentication.util.KerberosName;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestInstance;
+
+/** Kerberos-enabled Hive2 HMS tests. */
+@Tag("gravitino-docker-test")
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
+public class TestHive2HMSWithKerberos extends TestHive2HMS {
+
+  private static final String CLIENT_PRINCIPAL = "cli@HADOOPKRB";
+  private File tempDir;
+  private String keytabPath;
+  private String krb5ConfPath;
+
+  @BeforeAll
+  @Override
+  public void startHiveContainer() {
+    testPrefix = "hive2_kerberos";
+    catalogName = "";
+    containerSuite.startKerberosHiveContainer();
+    hiveContainer = containerSuite.getKerberosHiveContainer();
+
+    metastoreUri =
+        String.format(
+            "thrift://%s:%d",
+            hiveContainer.getContainerIpAddress(), 
HiveContainer.HIVE_METASTORE_PORT);
+    hdfsBasePath =
+        String.format(
+            "hdfs://%s:%d/tmp/gravitino_test",
+            hiveContainer.getContainerIpAddress(), 
HiveContainer.HDFS_DEFAULTFS_PORT);
+
+    prepareKerberosConfig();
+
+    // Initialize client with Kerberos-aware properties.
+    hiveClient = new HiveClientFactory(createHiveProperties(), 
testPrefix).createHiveClient();
+  }
+
+  protected void prepareKerberosConfig() {
+    try {
+      tempDir = Files.createTempDirectory(testPrefix).toFile();
+      tempDir.deleteOnExit();
+
+      String ip = hiveContainer.getContainerIpAddress();
+
+      // Copy client keytab (admin keytab is used for metastore client)
+      keytabPath = new File(tempDir, "admin.keytab").getAbsolutePath();
+      hiveContainer.getContainer().copyFileFromContainer("/etc/admin.keytab", 
keytabPath);
+
+      // Copy and patch krb5.conf
+      String tmpKrb5Path = new File(tempDir, 
"krb5.conf.tmp").getAbsolutePath();
+      krb5ConfPath = new File(tempDir, "krb5.conf").getAbsolutePath();
+      hiveContainer.getContainer().copyFileFromContainer("/etc/krb5.conf", 
tmpKrb5Path);
+      String content = FileUtils.readFileToString(new File(tmpKrb5Path), 
StandardCharsets.UTF_8);
+      content = content.replace("kdc = localhost:88", "kdc = " + ip + ":88");
+      content = content.replace("admin_server = localhost", "admin_server = " 
+ ip + ":749");
+      FileUtils.write(new File(krb5ConfPath), content, StandardCharsets.UTF_8);
+
+      System.setProperty("java.security.krb5.conf", krb5ConfPath);
+      System.setProperty("hadoop.security.authentication", "kerberos");
+
+      refreshKerberosConfig();
+      KerberosName.resetDefaultRealm();
+    } catch (Exception e) {
+      throw new RuntimeException("Failed to prepare kerberos configuration for 
Hive2 HMS tests", e);
+    }
+  }
+
+  private static void refreshKerberosConfig() {
+    Class<?> classRef;
+    try {
+      if (System.getProperty("java.vendor").contains("IBM")) {
+        classRef = Class.forName("com.ibm.security.krb5.internal.Config");
+      } else {
+        classRef = Class.forName("sun.security.krb5.Config");
+      }
+
+      Method refreshMethod = classRef.getMethod("refresh");
+      refreshMethod.invoke(null);
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  protected Properties createHiveProperties() {
+    Properties properties = super.createHiveProperties();
+    properties.setProperty("hive.metastore.sasl.enabled", "true");
+    properties.setProperty(
+        "hive.metastore.kerberos.principal",
+        String.format("hive/%s@HADOOPKRB", hiveContainer.getHostName()));
+    properties.setProperty("authentication.kerberos.principal", 
CLIENT_PRINCIPAL);
+    properties.setProperty("authentication.kerberos.keytab-uri", keytabPath);
+    properties.setProperty("authentication.impersonation-enable", "true");
+    properties.setProperty("hadoop.security.authentication", "kerberos");
+    return properties;
+  }
+
+  @AfterAll
+  @Override
+  public void stopHiveContainer() throws Exception {
+    super.stopHiveContainer();
+    try {
+      if (tempDir != null && tempDir.exists()) {
+        FileUtils.deleteDirectory(tempDir);
+      }
+    } catch (Exception e) {
+      throw new RuntimeException("Failed to clean up temporary files for 
Kerberos config", e);
+    }
+  }
+}
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive3IT.java
 
b/catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive3HMS.java
similarity index 57%
copy from 
catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive3IT.java
copy to 
catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive3HMS.java
index 60e8facb4e..a8b79e3913 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive3IT.java
+++ 
b/catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive3HMS.java
@@ -16,23 +16,41 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.gravitino.catalog.hive.integration.test;
+
+package org.apache.gravitino.hive.client;
 
 import com.google.common.collect.ImmutableMap;
 import org.apache.gravitino.integration.test.container.HiveContainer;
+import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestInstance;
 
+/**
+ * Hive3 version of {@link TestHive2HMS}. Hive3 default catalog is "hive" and 
uses Hive3 container.
+ */
 @Tag("gravitino-docker-test")
-public class CatalogHive3IT extends CatalogHive2IT {
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
+public class TestHive3HMS extends TestHive2HMS {
+
+  @BeforeAll
   @Override
-  protected void startNecessaryContainer() {
+  public void startHiveContainer() {
+    testPrefix = "hive3";
+    catalogName = "hive"; // Hive3 default catalog
+
     containerSuite.startHiveContainer(
         ImmutableMap.of(HiveContainer.HIVE_RUNTIME_VERSION, 
HiveContainer.HIVE3));
+    hiveContainer = containerSuite.getHiveContainer();
 
-    hiveMetastoreUris =
+    metastoreUri =
         String.format(
             "thrift://%s:%d",
-            containerSuite.getHiveContainer().getContainerIpAddress(),
-            HiveContainer.HIVE_METASTORE_PORT);
+            hiveContainer.getContainerIpAddress(), 
HiveContainer.HIVE_METASTORE_PORT);
+    hdfsBasePath =
+        String.format(
+            "hdfs://%s:%d/tmp/gravitino_test",
+            hiveContainer.getContainerIpAddress(), 
HiveContainer.HDFS_DEFAULTFS_PORT);
+
+    hiveClient = new HiveClientFactory(createHiveProperties(), 
testPrefix).createHiveClient();
   }
 }
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive3IT.java
 
b/catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive3HMSWithCatalog.java
similarity index 50%
copy from 
catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive3IT.java
copy to 
catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive3HMSWithCatalog.java
index 60e8facb4e..a3b38347fe 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHive3IT.java
+++ 
b/catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive3HMSWithCatalog.java
@@ -16,23 +16,35 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.gravitino.catalog.hive.integration.test;
 
-import com.google.common.collect.ImmutableMap;
+package org.apache.gravitino.hive.client;
+
 import org.apache.gravitino.integration.test.container.HiveContainer;
+import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestInstance;
 
+/** Hive3 HMS tests with an explicitly created catalog to validate catalog 
operations in Hive3. */
 @Tag("gravitino-docker-test")
-public class CatalogHive3IT extends CatalogHive2IT {
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
+public class TestHive3HMSWithCatalog extends TestHive3HMS {
+
+  @BeforeAll
   @Override
-  protected void startNecessaryContainer() {
-    containerSuite.startHiveContainer(
-        ImmutableMap.of(HiveContainer.HIVE_RUNTIME_VERSION, 
HiveContainer.HIVE3));
+  public void startHiveContainer() {
+    testPrefix = "hive3_mycatalog";
+    super.startHiveContainer();
 
-    hiveMetastoreUris =
+    // Override catalog to a dedicated Hive3 catalog and ensure it exists.
+    catalogName = "mycatalog";
+    String catalogLocation =
         String.format(
-            "thrift://%s:%d",
-            containerSuite.getHiveContainer().getContainerIpAddress(),
-            HiveContainer.HIVE_METASTORE_PORT);
+            "hdfs://%s:%d/tmp/gravitino_test/catalogs/%s",
+            hiveContainer.getContainerIpAddress(), 
HiveContainer.HDFS_DEFAULTFS_PORT, catalogName);
+
+    // Ensure the catalog exists; only create if missing.
+    if (!hiveClient.getCatalogs().contains(catalogName)) {
+      hiveClient.createCatalog(catalogName, catalogLocation, "Hive3 catalog 
for tests");
+    }
   }
 }
diff --git 
a/catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive3HMSWithKerberos.java
 
b/catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive3HMSWithKerberos.java
new file mode 100644
index 0000000000..9bb1eb048d
--- /dev/null
+++ 
b/catalogs/hive-metastore-common/src/test/java/org/apache/gravitino/hive/client/TestHive3HMSWithKerberos.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.gravitino.hive.client;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.gravitino.integration.test.container.HiveContainer;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestInstance;
+
+/**
+ * Kerberos-enabled Hive3 HMS tests. Reuses Kerberos setup from {@link 
TestHive2HMSWithKerberos} but
+ * starts the Hive3 container and uses the default Hive3 catalog name "hive".
+ */
+@Tag("gravitino-docker-test")
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
+public class TestHive3HMSWithKerberos extends TestHive2HMSWithKerberos {
+
+  @BeforeAll
+  @Override
+  public void startHiveContainer() {
+    testPrefix = "hive3_kerberos";
+    catalogName = "hive"; // Hive3 default catalog
+
+    containerSuite.startKerberosHiveContainer(
+        ImmutableMap.of(HiveContainer.HIVE_RUNTIME_VERSION, 
HiveContainer.HIVE3));
+    hiveContainer = containerSuite.getKerberosHiveContainer();
+
+    metastoreUri =
+        String.format(
+            "thrift://%s:%d",
+            hiveContainer.getContainerIpAddress(), 
HiveContainer.HIVE_METASTORE_PORT);
+    hdfsBasePath =
+        String.format(
+            "hdfs://%s:%d/tmp/gravitino_test",
+            hiveContainer.getContainerIpAddress(), 
HiveContainer.HDFS_DEFAULTFS_PORT);
+
+    // Prepare Kerberos config (keytab/krb5) for client connections.
+    prepareKerberosConfig();
+
+    hiveClient = new HiveClientFactory(createHiveProperties(), 
"testPrefix").createHiveClient();
+  }
+}
diff --git 
a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/ContainerSuite.java
 
b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/ContainerSuite.java
index 0127752068..51d0030c10 100644
--- 
a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/ContainerSuite.java
+++ 
b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/ContainerSuite.java
@@ -215,7 +215,7 @@ public class ContainerSuite implements Closeable {
     }
   }
 
-  public void startKerberosHiveContainer() {
+  public void startKerberosHiveContainer(Map<String, String> envVars) {
     if (kerberosHiveContainer == null) {
       synchronized (ContainerSuite.class) {
         if (kerberosHiveContainer == null) {
@@ -225,6 +225,7 @@ public class ContainerSuite implements Closeable {
               HiveContainer.builder()
                   .withHostName("gravitino-ci-kerberos-hive")
                   .withKerberosEnabled(true)
+                  .withEnvVars(envVars)
                   .withNetwork(network);
           HiveContainer container = closer.register(hiveBuilder.build());
           container.start();
@@ -234,6 +235,10 @@ public class ContainerSuite implements Closeable {
     }
   }
 
+  public void startKerberosHiveContainer() {
+    startKerberosHiveContainer(ImmutableMap.of());
+  }
+
   public void startSQLBaseAuthHiveContainer(Map<String, String> envVars) {
     // If you want to enable SQL based authorization, you need both set the
     // `ENABLE_SQL_BASE_AUTHORIZATION` environment.

Reply via email to