This is an automated email from the ASF dual-hosted git repository.

difin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new d46d900cba2 HIVE-29233: Iceberg: Validate HiveRESTCatalogClient with 
external RESTCatalogs like Gravitino (#6108)
d46d900cba2 is described below

commit d46d900cba2b484f033772ec42e62e8f96e0c07a
Author: Dmitriy Fingerman <[email protected]>
AuthorDate: Mon Oct 6 15:12:09 2025 -0400

    HIVE-29233: Iceberg: Validate HiveRESTCatalogClient with external 
RESTCatalogs like Gravitino (#6108)
---
 ..._catalog.q => iceberg_rest_catalog_gravitino.q} |   5 +
 ...g_rest_catalog.q => iceberg_rest_catalog_hms.q} |   0
 ....q.out => iceberg_rest_catalog_gravitino.q.out} |   2 -
 ...atalog.q.out => iceberg_rest_catalog_hms.q.out} |   0
 itests/qtest-iceberg/pom.xml                       |  29 +++
 ...bergRESTCatalogGravitinoLlapLocalCliDriver.java | 289 +++++++++++++++++++++
 ...stIcebergRESTCatalogHMSLlapLocalCliDriver.java} |  21 +-
 .../test/resources/gravitino-h2-test-template.conf |  14 +
 .../test/resources/testconfiguration.properties    |   7 +-
 .../apache/hadoop/hive/cli/control/CliConfigs.java |  31 ++-
 10 files changed, 380 insertions(+), 18 deletions(-)

diff --git 
a/iceberg/iceberg-handler/src/test/queries/positive/iceberg_rest_catalog.q 
b/iceberg/iceberg-handler/src/test/queries/positive/iceberg_rest_catalog_gravitino.q
similarity index 89%
copy from 
iceberg/iceberg-handler/src/test/queries/positive/iceberg_rest_catalog.q
copy to 
iceberg/iceberg-handler/src/test/queries/positive/iceberg_rest_catalog_gravitino.q
index 27f23122240..81982ca44d9 100644
--- a/iceberg/iceberg-handler/src/test/queries/positive/iceberg_rest_catalog.q
+++ 
b/iceberg/iceberg-handler/src/test/queries/positive/iceberg_rest_catalog_gravitino.q
@@ -66,6 +66,11 @@ show create table ice_orc2;
 insert into ice_orc2 partition (company_id=100) 
 VALUES ('fn1','ln1', 1, 10), ('fn2','ln2', 2, 20), ('fn3','ln3', 3, 30);
 
+--! In CI, Testcontainers' .withFileSystemBind() is not able to bind the same 
host path to the same container path,
+--! so as a workaround, the .metadata.json files from container are manually 
synced in a daemon process,
+--! since the sync can take some time, need to wait for it to happen after the 
insert operation.
+! sleep 20;
+
 describe formatted ice_orc2;
 select * from ice_orc2;
 
diff --git 
a/iceberg/iceberg-handler/src/test/queries/positive/iceberg_rest_catalog.q 
b/iceberg/iceberg-handler/src/test/queries/positive/iceberg_rest_catalog_hms.q
similarity index 100%
rename from 
iceberg/iceberg-handler/src/test/queries/positive/iceberg_rest_catalog.q
rename to 
iceberg/iceberg-handler/src/test/queries/positive/iceberg_rest_catalog_hms.q
diff --git 
a/iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_rest_catalog.q.out
 
b/iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_rest_catalog_gravitino.q.out
similarity index 99%
copy from 
iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_rest_catalog.q.out
copy to 
iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_rest_catalog_gravitino.q.out
index a814fe8f149..64a9d65f878 100644
--- 
a/iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_rest_catalog.q.out
+++ 
b/iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_rest_catalog_gravitino.q.out
@@ -216,7 +216,6 @@ PREHOOK: query: show databases
 PREHOOK: type: SHOWDATABASES
 POSTHOOK: query: show databases
 POSTHOOK: type: SHOWDATABASES
-default
 ice_rest
 PREHOOK: query: drop database ice_rest
 PREHOOK: type: DROPDATABASE
@@ -230,4 +229,3 @@ PREHOOK: query: show databases
 PREHOOK: type: SHOWDATABASES
 POSTHOOK: query: show databases
 POSTHOOK: type: SHOWDATABASES
-default
diff --git 
a/iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_rest_catalog.q.out
 
b/iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_rest_catalog_hms.q.out
similarity index 100%
rename from 
iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_rest_catalog.q.out
rename to 
iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_rest_catalog_hms.q.out
diff --git a/itests/qtest-iceberg/pom.xml b/itests/qtest-iceberg/pom.xml
index 6ac62e8fa0b..c7cd70d7479 100644
--- a/itests/qtest-iceberg/pom.xml
+++ b/itests/qtest-iceberg/pom.xml
@@ -475,6 +475,11 @@
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.testcontainers</groupId>
+      <artifactId>testcontainers</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
   <build>
     <plugins>
@@ -525,6 +530,30 @@
           </additionalClasspathElements>
         </configuration>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy-h2-driver-for-test</id>
+            <phase>process-test-resources</phase>
+            <goals>
+              <goal>copy</goal>
+            </goals>
+            <configuration>
+              <artifactItems>
+                <artifactItem>
+                  <groupId>com.h2database</groupId>
+                  <artifactId>h2</artifactId>
+                  <version>${h2database.version}</version>
+                  <destFileName>h2-driver.jar</destFileName>
+                </artifactItem>
+              </artifactItems>
+              
<outputDirectory>${project.build.directory}/test-dependencies</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
   </build>
 </project>
diff --git 
a/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogGravitinoLlapLocalCliDriver.java
 
b/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogGravitinoLlapLocalCliDriver.java
new file mode 100644
index 00000000000..70b9985ce0f
--- /dev/null
+++ 
b/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogGravitinoLlapLocalCliDriver.java
@@ -0,0 +1,289 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.cli;
+
+import com.github.dockerjava.api.command.CopyArchiveFromContainerCmd;
+import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
+import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.cli.control.CliAdapter;
+import org.apache.hadoop.hive.cli.control.CliConfigs;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.iceberg.CatalogUtil;
+import org.apache.iceberg.hive.CatalogUtils;
+import org.apache.iceberg.hive.client.HiveRESTCatalogClient;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestRule;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testcontainers.containers.output.Slf4jLogConsumer;
+import org.testcontainers.containers.wait.strategy.Wait;
+import org.testcontainers.containers.wait.strategy.WaitAllStrategy;
+import org.testcontainers.utility.DockerImageName;
+import org.testcontainers.utility.MountableFile;
+import org.testcontainers.containers.GenericContainer;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.time.Duration;
+import java.util.List;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+@RunWith(Parameterized.class)
+public class TestIcebergRESTCatalogGravitinoLlapLocalCliDriver {
+
+  private static final CliAdapter CLI_ADAPTER =
+      new 
CliConfigs.TestIcebergRESTCatalogGravitinoLlapLocalCliDriver().getCliAdapter();
+  
+  private static final Logger LOG = 
LoggerFactory.getLogger(TestIcebergRESTCatalogGravitinoLlapLocalCliDriver.class);
+  
+  private static final String CATALOG_NAME = "ice01";
+  private static final long GRAVITINO_STARTUP_TIMEOUT_MINUTES = 5L;
+  private static final int GRAVITINO_HTTP_PORT = 9001;
+  private static final String GRAVITINO_CONF_FILE_TEMPLATE = 
"gravitino-h2-test-template.conf";
+  private static final String GRAVITINO_ROOT_DIR = 
"/root/gravitino-iceberg-rest-server";
+  private static final String GRAVITINO_STARTUP_SCRIPT = GRAVITINO_ROOT_DIR + 
"/bin/start-iceberg-rest-server.sh";
+  private static final String GRAVITINO_H2_LIB = GRAVITINO_ROOT_DIR + 
"/libs/h2-driver.jar";
+  private static final String GRAVITINO_CONF_FILE = GRAVITINO_ROOT_DIR + 
"/conf/gravitino-iceberg-rest-server.conf";
+  private static final DockerImageName GRAVITINO_IMAGE =
+      DockerImageName.parse("apache/gravitino-iceberg-rest:1.0.0");
+
+  private final String name;
+  private final File qfile;
+
+  private GenericContainer<?> gravitinoContainer;
+  private Path warehouseDir;
+  private final ScheduledExecutorService fileSyncExecutor = 
Executors.newSingleThreadScheduledExecutor();
+
+  @Parameters(name = "{0}")
+  public static List<Object[]> getParameters() throws Exception {
+    return CLI_ADAPTER.getParameters();
+  }
+
+  @ClassRule
+  public static final TestRule CLI_CLASS_RULE = CLI_ADAPTER.buildClassRule();
+
+  @Rule
+  public final TestRule cliTestRule = CLI_ADAPTER.buildTestRule();
+
+  public TestIcebergRESTCatalogGravitinoLlapLocalCliDriver(String name, File 
qfile) {
+    this.name = name;
+    this.qfile = qfile;
+  }
+
+  @Before
+  public void setup() throws IOException {
+    createWarehouseDir();
+    prepareGravitinoConfig();
+    startGravitinoContainer();
+    fileSyncExecutor.scheduleAtFixedRate(this::syncWarehouseDir, 0, 5, 
TimeUnit.SECONDS);
+
+    String host = gravitinoContainer.getHost();
+    Integer port = gravitinoContainer.getMappedPort(GRAVITINO_HTTP_PORT);
+    String restCatalogPrefix = String.format("%s%s.", 
CatalogUtils.CATALOG_CONFIG_PREFIX, CATALOG_NAME);
+
+    // Suppress IntelliJ warning about using HTTP since this is a local test 
container connection
+    @SuppressWarnings("HttpUrlsUsage")
+    String restCatalogUri = String.format("http://%s:%d/iceberg";, host, port);
+
+    Configuration conf = SessionState.get().getConf();
+    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_IMPL, 
HiveRESTCatalogClient.class.getName());
+    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.CATALOG_DEFAULT, 
CATALOG_NAME);
+    conf.set(restCatalogPrefix + "uri", restCatalogUri);
+    conf.set(restCatalogPrefix + "type", 
CatalogUtil.ICEBERG_CATALOG_TYPE_REST);
+  }
+
+  @After
+  public void teardown() throws IOException {
+    if (gravitinoContainer != null) {
+      gravitinoContainer.stop();
+    }
+
+    fileSyncExecutor.shutdownNow();
+    FileUtils.deleteDirectory(warehouseDir.toFile());
+  }
+
+  /**
+   * Starts a Gravitino container with the Iceberg REST server configured for 
testing.
+   *
+   * <p>This method configures the container to:
+   * <ul>
+   *   <li>Expose container REST port GRAVITINO_HTTP_PORT and map it to a host 
port.</li>
+   *   <li>Modify the container entrypoint to create the warehouse directory 
before startup.</li>
+   *   <li>Copy a dynamically prepared Gravitino configuration file into the 
container.</li>
+   *   <li>Copy the H2 driver JAR into the server's lib directory.</li>
+   *   <li>Wait for the Gravitino Iceberg REST server to finish starting 
(based on logs and port checks).</li>
+   *   <li>Stream container logs into the test logger for easier 
debugging.</li>
+   * </ul>
+   *
+   * <p>Note: The {@code @SuppressWarnings("resource")} annotation is applied 
because
+   * IntelliJ and some compilers flag {@link 
org.testcontainers.containers.GenericContainer}
+   * as a resource that should be managed with try-with-resources. In this 
test setup,
+   * the container lifecycle is managed explicitly: it is started here and 
stopped in
+   * {@code @After} (via {@code gravitinoContainer.stop()}). Using 
try-with-resources
+   * would not work in this context, since the container must remain running 
across
+   * multiple test methods rather than being confined to a single block 
scope.</p>
+   */
+  @SuppressWarnings("resource")
+  private void startGravitinoContainer() {
+    gravitinoContainer = new GenericContainer<>(GRAVITINO_IMAGE)
+        .withExposedPorts(GRAVITINO_HTTP_PORT)
+        // Update entrypoint to create the warehouse directory before starting 
the server
+        .withCreateContainerCmdModifier(cmd -> cmd.withEntrypoint("bash", "-c",
+            String.format("mkdir -p %s && exec %s", warehouseDir.toString(), 
GRAVITINO_STARTUP_SCRIPT)))
+        // Mount gravitino configuration file
+        .withCopyFileToContainer(
+            MountableFile.forHostPath(Paths.get(warehouseDir.toString(), 
GRAVITINO_CONF_FILE_TEMPLATE)),
+            GRAVITINO_CONF_FILE
+        )
+        // Mount the H2 driver JAR into the server's lib directory
+        .withCopyFileToContainer(
+            MountableFile.forHostPath(
+                Paths.get("target", "test-dependencies", 
"h2-driver.jar").toAbsolutePath()
+            ),
+            GRAVITINO_H2_LIB
+        )
+        // Wait for the server to be fully started
+        .waitingFor(
+            new WaitAllStrategy()
+                .withStrategy(Wait.forLogMessage(".*GravitinoIcebergRESTServer 
is running.*\\n", 1)
+                    
.withStartupTimeout(Duration.ofMinutes(GRAVITINO_STARTUP_TIMEOUT_MINUTES)))
+                .withStrategy(Wait.forListeningPort()
+                    
.withStartupTimeout(Duration.ofMinutes(GRAVITINO_STARTUP_TIMEOUT_MINUTES)))
+        )
+        .withLogConsumer(new Slf4jLogConsumer(LoggerFactory
+            
.getLogger(TestIcebergRESTCatalogGravitinoLlapLocalCliDriver.class)));
+
+    gravitinoContainer.start();
+  }
+
+  /**
+   * Starts a background daemon that continuously synchronizes the Iceberg 
warehouse
+   * directory from the running Gravitino container to the host file system.
+   *
+   * <p>In CI environments, Testcontainers' {@code .withFileSystemBind()} 
cannot reliably
+   * bind the same host path to the same path inside the container, especially 
when
+   * using remote Docker hosts or Docker-in-Docker setups. This causes the 
container's
+   * writes (e.g., Iceberg metadata files like {@code .metadata.json}) to be 
invisible
+   * on the host.</p>
+   *
+   * <p>This method works around that limitation by repeatedly copying new 
files from
+   * the container's warehouse directory to the corresponding host directory. 
Existing
+   * files on the host are preserved, and only files that do not yet exist are 
copied.
+   * The sync runs every 1 second while the container is running.</p>
+   *
+   * <p>Each archive copy from the container is extracted using a {@link 
TarArchiveInputStream},
+   * and directories are created as needed. Files that already exist on the 
host are skipped
+   * to avoid overwriting container data.</p>
+   */
+  private void syncWarehouseDir() {
+    if (gravitinoContainer.isRunning()) {
+      try (CopyArchiveFromContainerCmd copyArchiveFromContainerCmd = 
+               gravitinoContainer
+                   .getDockerClient()
+                   
.copyArchiveFromContainerCmd(gravitinoContainer.getContainerId(), 
warehouseDir.toString()); 
+           InputStream tarStream = copyArchiveFromContainerCmd.exec();
+           TarArchiveInputStream tis = new TarArchiveInputStream(tarStream)) {
+
+        TarArchiveEntry entry;
+        while ((entry = tis.getNextEntry()) != null) {
+          // Skip directories because we only want to copy metadata files from 
the container.
+          if (entry.isDirectory()) {
+            continue;
+          }
+
+          /*
+           * Tar entry names include a container-specific top-level folder, 
e.g.:
+           *   
iceberg-test-1759245909247/iceberg_warehouse/ice_rest/.../metadata.json
+           *
+           * Strip the first part so the relative path inside the warehouse is 
preserved
+           * when mapping to the host warehouseDir.
+           */
+          
+          String[] parts = entry.getName().split("/", 2);
+          if (parts.length < 2) {
+            continue; // defensive guard
+          }
+
+          Path relativePath = Paths.get(parts[1]);
+          Path outputPath = warehouseDir.resolve(relativePath);
+
+          // Skip if already present on host to avoid overwriting
+          if (Files.exists(outputPath)) {
+            continue;
+          }
+
+          Files.createDirectories(outputPath.getParent());
+          Files.copy(tis, outputPath);
+        }
+
+      } catch (Exception e) {
+        LOG.error("Warehouse folder sync failed: {}", e.getMessage());
+      }
+    }
+  }
+
+  private void createWarehouseDir() {
+    try {
+      warehouseDir = Paths.get("/tmp", "iceberg-test-" + 
System.currentTimeMillis()).toAbsolutePath();
+      Files.createDirectories(warehouseDir);
+    } catch (Exception e) {
+      throw new RuntimeException("Failed to create the Iceberg warehouse 
directory", e);
+    }
+  }
+
+  private void prepareGravitinoConfig() throws IOException {
+    String content;
+    try (InputStream in = 
TestIcebergRESTCatalogGravitinoLlapLocalCliDriver.class.getClassLoader()
+        .getResourceAsStream(GRAVITINO_CONF_FILE_TEMPLATE)) {
+      if (in == null) {
+        throw new IOException("Resource not found: " + 
GRAVITINO_CONF_FILE_TEMPLATE);
+      }
+      content = new String(in.readAllBytes(), StandardCharsets.UTF_8);
+    }
+
+    String updatedContent = content
+        .replace("/WAREHOUSE_DIR", warehouseDir.toString())
+        .replace("HTTP_PORT", String.valueOf(GRAVITINO_HTTP_PORT));
+
+    Path configFile = warehouseDir.resolve(GRAVITINO_CONF_FILE_TEMPLATE);
+    Files.writeString(configFile, updatedContent);
+  }
+
+  @Test
+  public void testCliDriver() throws Exception {
+    CLI_ADAPTER.runTest(name, qfile);
+  }
+}
diff --git 
a/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogLlapLocalCliDriver.java
 
b/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogHMSLlapLocalCliDriver.java
similarity index 86%
rename from 
itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogLlapLocalCliDriver.java
rename to 
itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogHMSLlapLocalCliDriver.java
index ef28f6257cb..66a2985f4f9 100644
--- 
a/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogLlapLocalCliDriver.java
+++ 
b/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogHMSLlapLocalCliDriver.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ *     http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -47,12 +47,13 @@
 import java.util.stream.Stream;
 
 @RunWith(Parameterized.class)
-public class TestIcebergRESTCatalogLlapLocalCliDriver {
+public class TestIcebergRESTCatalogHMSLlapLocalCliDriver {
 
   private static final Logger LOG = LoggerFactory.getLogger(
-      
org.apache.hadoop.hive.cli.TestIcebergRESTCatalogLlapLocalCliDriver.class);
+      TestIcebergRESTCatalogHMSLlapLocalCliDriver.class);
   private static final String CATALOG_NAME = "ice01";
-  private static final CliAdapter adapter = new 
CliConfigs.TestIcebergRESTCatalogLlapLocalCliDriver().getCliAdapter();
+  private static final CliAdapter CLI_ADAPTER =
+      new 
CliConfigs.TestIcebergRESTCatalogHMSLlapLocalCliDriver().getCliAdapter();
   
   private final String name;
   private final File qfile;
@@ -64,17 +65,17 @@ public class TestIcebergRESTCatalogLlapLocalCliDriver {
           .build();
 
   @ClassRule
-  public static final TestRule cliClassRule = adapter.buildClassRule();
+  public static final TestRule CLI_CLASS_RULE = CLI_ADAPTER.buildClassRule();
 
   @Rule
-  public final TestRule cliTestRule = adapter.buildTestRule();
+  public final TestRule cliTestRule = CLI_ADAPTER.buildTestRule();
 
   @Parameters(name = "{0}")
   public static List<Object[]> getParameters() throws Exception {
-    return adapter.getParameters();
+    return CLI_ADAPTER.getParameters();
   }
 
-  public TestIcebergRESTCatalogLlapLocalCliDriver(String name, File qfile) {
+  public TestIcebergRESTCatalogHMSLlapLocalCliDriver(String name, File qfile) {
     this.name = name;
     this.qfile = qfile;
   }
@@ -113,6 +114,6 @@ public void cleanUpRestCatalogServerTmpDir() throws 
IOException {
 
   @Test
   public void testCliDriver() throws Exception {
-    adapter.runTest(name, qfile);
+    CLI_ADAPTER.runTest(name, qfile);
   }
-}
\ No newline at end of file
+}
diff --git 
a/itests/qtest-iceberg/src/test/resources/gravitino-h2-test-template.conf 
b/itests/qtest-iceberg/src/test/resources/gravitino-h2-test-template.conf
new file mode 100644
index 00000000000..12009e7fa8f
--- /dev/null
+++ b/itests/qtest-iceberg/src/test/resources/gravitino-h2-test-template.conf
@@ -0,0 +1,14 @@
+gravitino.iceberg-rest.httpPort = HTTP_PORT
+
+# --- Iceberg REST Catalog Backend (set to JDBC) ---
+gravitino.iceberg-rest.catalog-backend = jdbc
+
+# --- H2 File-Based Database Connection Details ---
+gravitino.iceberg-rest.uri = jdbc:h2:file:/tmp/gravitino_h2_db;AUTO_SERVER=TRUE
+gravitino.iceberg-rest.jdbc-driver = org.h2.Driver
+gravitino.iceberg-rest.jdbc-user = sa
+gravitino.iceberg-rest.jdbc-password = ""
+gravitino.iceberg-rest.jdbc-initialize = true
+
+# --- Warehouse Location (where data files are stored) ---
+gravitino.iceberg-rest.warehouse = file:///WAREHOUSE_DIR/iceberg_warehouse
\ No newline at end of file
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index d253a2f93aa..14a8394d589 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -455,8 +455,11 @@ iceberg.llap.query.compactor.files=\
   iceberg_minor_compaction_partition_evolution.q,\
   iceberg_minor_compaction_unpartitioned.q
 
-iceberg.llap.query.rest.files=\
-  iceberg_rest_catalog.q
+iceberg.llap.query.rest.hms.files=\
+  iceberg_rest_catalog_hms.q
+
+iceberg.llap.query.rest.gravitino.files=\
+  iceberg_rest_catalog_gravitino.q
 
 iceberg.llap.only.query.files=\
   hadoop_catalog_create_table.q,\
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
index 62cf1add01d..2b6bcd9015a 100644
--- 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
@@ -660,7 +660,8 @@ public IcebergCliConfig() {
         setQueryDir("iceberg/iceberg-handler/src/test/queries/positive");
         excludesFrom(testConfigProps, "iceberg.llap.only.query.files");
         excludesFrom(testConfigProps, "iceberg.llap.query.compactor.files");
-        excludesFrom(testConfigProps, "iceberg.llap.query.rest.files");
+        excludesFrom(testConfigProps, "iceberg.llap.query.rest.hms.files");
+        excludesFrom(testConfigProps, 
"iceberg.llap.query.rest.gravitino.files");
 
         setResultsDir("iceberg/iceberg-handler/src/test/results/positive");
         
setLogDir("itests/qtest/target/qfile-results/iceberg-handler/positive");
@@ -714,13 +715,35 @@ public IcebergLlapLocalCliConfig() {
     }
   }
 
-  public static class TestIcebergRESTCatalogLlapLocalCliDriver extends 
AbstractCliConfig {
+  public static class TestIcebergRESTCatalogHMSLlapLocalCliDriver extends 
AbstractCliConfig {
 
-    public TestIcebergRESTCatalogLlapLocalCliDriver() {
+    public TestIcebergRESTCatalogHMSLlapLocalCliDriver() {
       super(CoreCliDriver.class);
       try {
         setQueryDir("iceberg/iceberg-handler/src/test/queries/positive");
-        includesFrom(testConfigProps, "iceberg.llap.query.rest.files");
+        includesFrom(testConfigProps, "iceberg.llap.query.rest.hms.files");
+
+        
setResultsDir("iceberg/iceberg-handler/src/test/results/positive/llap");
+        
setLogDir("itests/qtest/target/qfile-results/iceberg-handler/positive");
+
+        setInitScript("q_test_init_tez.sql");
+        setCleanupScript("q_test_cleanup_tez.sql");
+
+        setHiveConfDir("data/conf/iceberg/llap");
+        setClusterType(MiniClusterType.LLAP_LOCAL);
+      } catch (Exception e) {
+        throw new RuntimeException("can't contruct cliconfig", e);
+      }
+    }
+  }
+
+  public static class TestIcebergRESTCatalogGravitinoLlapLocalCliDriver 
extends AbstractCliConfig {
+
+    public TestIcebergRESTCatalogGravitinoLlapLocalCliDriver() {
+      super(CoreCliDriver.class);
+      try {
+        setQueryDir("iceberg/iceberg-handler/src/test/queries/positive");
+        includesFrom(testConfigProps, 
"iceberg.llap.query.rest.gravitino.files");
 
         
setResultsDir("iceberg/iceberg-handler/src/test/results/positive/llap");
         
setLogDir("itests/qtest/target/qfile-results/iceberg-handler/positive");

Reply via email to