This is an automated email from the ASF dual-hosted git repository.

jianglongtao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new cb92febb312 Eliminate HiveServer2 integration's dependency on Hive 
Metastore Client (#36114)
cb92febb312 is described below

commit cb92febb312c733fb10fdc0be225420addfec9cf
Author: Ling Hengqian <linghengq...@outlook.com>
AuthorDate: Wed Jul 30 10:55:35 2025 +0800

    Eliminate HiveServer2 integration's dependency on Hive Metastore Client 
(#36114)
    
    * Eliminate HiveServer2 integration's dependency on Hive Metastore Client
    
    * Remove redundant dependency
    
    * Modify the scope of the thin jar containing the Hive CLI classes
---
 RELEASE-NOTES.md                                   |   1 +
 .../optional-plugins/hiveserver2/_index.cn.md      |  58 +-----
 .../optional-plugins/hiveserver2/_index.en.md      |  58 +-----
 infra/database/type/hive/pom.xml                   |   2 +-
 .../metadata/data/loader/HiveMetaDataLoader.java   | 116 +++++++----
 .../metadata/data/loader/PrestoMetaDataLoader.java |   4 +-
 .../hadoop-common/3.3.6/reflect-config.json        |   8 -
 .../hadoop-common/3.3.6/resource-config.json       |   3 +
 .../hive-jdbc/4.0.1/reflect-config.json            |  11 --
 .../reachability-metadata.json                     | 212 ++++-----------------
 .../reachability-metadata.json                     |  28 +--
 pom.xml                                            |   1 -
 test/native/native-image-filter/extra-filter.json  |   1 -
 test/native/pom.xml                                |  12 --
 ...neMetastoreTest.java => SystemSchemasTest.java} |  53 +++---
 .../reachability-metadata.json                     |  16 +-
 .../{standalone-hms.yaml => system-schemas.yaml}   |   0
 17 files changed, 165 insertions(+), 419 deletions(-)

diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md
index 9fa2e78fbd2..f50fb418d3b 100644
--- a/RELEASE-NOTES.md
+++ b/RELEASE-NOTES.md
@@ -30,6 +30,7 @@
 
 1. Build: Support compiling and using ShardingSphere under OpenJDK 24 - 
[#35145](https://github.com/apache/shardingsphere/pull/35145)
 1. Infra: Support for connecting to Presto's Memory Connector in 
ShardingSphere config - 
[#34432](https://github.com/apache/shardingsphere/pull/34432)
+1. Infra: Eliminate HiveServer2 integration's dependency on Hive Metastore 
Client - [#36114](https://github.com/apache/shardingsphere/pull/36114)
 1. Metadata: Add support for partition tables in PostgreSQL 
[#34346](https://github.com/apache/shardingsphere/pull/34346)
 1. SQL Parser: Support MySQL SELECT CAST AS YEAR statement parse - 
[#34638](https://github.com/apache/shardingsphere/pull/34638)
 1. SQL Parser: Support MySQL SELECT MAX(ALL expr) statement parse - 
[#34639](https://github.com/apache/shardingsphere/pull/34639)
diff --git 
a/docs/document/content/user-manual/shardingsphere-jdbc/optional-plugins/hiveserver2/_index.cn.md
 
b/docs/document/content/user-manual/shardingsphere-jdbc/optional-plugins/hiveserver2/_index.cn.md
index 0d46a995c78..36d6b464f92 100644
--- 
a/docs/document/content/user-manual/shardingsphere-jdbc/optional-plugins/hiveserver2/_index.cn.md
+++ 
b/docs/document/content/user-manual/shardingsphere-jdbc/optional-plugins/hiveserver2/_index.cn.md
@@ -35,17 +35,6 @@ ShardingSphere 对 HiveServer2 JDBC Driver 的支持位于可选模块中。
         <artifactId>hive-service</artifactId>
         <version>4.0.1</version>
     </dependency>
-    <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-mapreduce-client-core</artifactId>
-        <version>3.3.6</version>
-        <exclusions>
-            <exclusion>
-                <groupId>*</groupId>
-                <artifactId>*</artifactId>
-            </exclusion>
-        </exclusions>
-    </dependency>
 </dependencies>
 ```
 
@@ -82,17 +71,6 @@ ShardingSphere 对 HiveServer2 JDBC Driver 的支持位于可选模块中。
             </exclusion>
         </exclusions>
     </dependency>
-    <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-mapreduce-client-core</artifactId>
-        <version>3.3.6</version>
-        <exclusions>
-            <exclusion>
-                <groupId>*</groupId>
-                <artifactId>*</artifactId>
-            </exclusion>
-        </exclusions>
-    </dependency>
 </dependencies>
 ```
 
@@ -147,8 +125,6 @@ CREATE TABLE IF NOT EXISTS t_order
     status     string,
     PRIMARY KEY (order_id) disable novalidate
 ) STORED BY ICEBERG STORED AS ORC TBLPROPERTIES ('format-version' = '2');
-
-TRUNCATE TABLE t_order;
 ```
 
 ### 在业务项目创建 ShardingSphere 数据源
@@ -209,9 +185,11 @@ public class ExampleUtils {
         try (HikariDataSource dataSource = new HikariDataSource(config);
              Connection connection = dataSource.getConnection();
              Statement statement = connection.createStatement()) {
+            statement.execute("TRUNCATE TABLE t_order");
             statement.execute("INSERT INTO t_order (user_id, order_type, 
address_id, status) VALUES (1, 1, 1, 'INSERT_TEST')");
             statement.executeQuery("SELECT * FROM t_order");
             statement.execute("DELETE FROM t_order WHERE user_id=1");
+            statement.execute("DROP TABLE IF EXISTS t_order");
         }
     }
 }
@@ -275,8 +253,6 @@ CREATE TABLE IF NOT EXISTS t_order
     status     string,
     PRIMARY KEY (order_id) disable novalidate
 ) STORED BY ICEBERG STORED AS ORC TBLPROPERTIES ('format-version' = '2');
-
-TRUNCATE TABLE t_order;
 ```
 
 在业务项目引入`前提条件`涉及的依赖后,在业务项目的 classpath 上编写 ShardingSphere 数据源的配置文件`demo.yaml`,
@@ -333,6 +309,7 @@ public class ExampleUtils {
         try (HikariDataSource dataSource = new HikariDataSource(config);
              Connection connection = dataSource.getConnection();
              Statement statement = connection.createStatement()) {
+            statement.execute("TRUNCATE TABLE t_order");
             statement.execute("INSERT INTO t_order (user_id, order_type, 
address_id, status) VALUES (1, 1, 1, 'INSERT_TEST')");
             statement.executeQuery("SELECT * FROM t_order");
             statement.execute("DELETE FROM t_order WHERE order_id=1");
@@ -436,35 +413,6 @@ ShardingSphere 仅针对 HiveServer2 `4.0.1` 进行集成测试。
 用户总应该通过 HiveServer2 的 Docker Image `apache/hive:4.0.1` 启动 HiveServer2。
 参考 https://issues.apache.org/jira/browse/HIVE-28418 。
 
-### Hadoop 限制
-
-用户仅可使用 Hadoop `3.3.6` 来作为 HiveServer2 JDBC Driver `4.0.1` 的底层 Hadoop 依赖。
-HiveServer2 JDBC Driver `4.0.1` 不支持 Hadoop `3.4.1`, 参考 
https://github.com/apache/hive/pull/5500 。
-
-对于 HiveServer2 JDBC Driver `org.apache.hive:hive-jdbc:4.0.1` 或 `classifier` 为 
`standalone` 的 `org.apache.hive:hive-jdbc:4.0.1`,
-实际上并不额外依赖 `org.apache.hadoop:hadoop-mapreduce-client-core:3.3.6`。
-
-但 `org.apache.shardingsphere:shardingsphere-infra-database-hive` 的
-`org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader`
 会使用 `org.apache.hadoop.hive.conf.HiveConf`,
-这进一步使用了 `org.apache.hadoop:hadoop-mapreduce-client-core:3.3.6` 的 
`org.apache.hadoop.mapred.JobConf` 类。
-
-ShardingSphere 仅需要使用 `org.apache.hadoop.mapred.JobConf` 类,
-因此排除 `org.apache.hadoop:hadoop-mapreduce-client-core:3.3.6` 的所有额外依赖是合理行为。
-
-```xml
-<dependency>
-    <groupId>org.apache.hadoop</groupId>
-    <artifactId>hadoop-mapreduce-client-core</artifactId>
-    <version>3.3.6</version>
-    <exclusions>
-        <exclusion>
-            <groupId>*</groupId>
-            <artifactId>*</artifactId>
-        </exclusion>
-    </exclusions>
-</dependency>
-```
-
 ### SQL 限制
 
 HiveServer2 并不能保证每一条 `insert` 相关的 DML SQL 都能成功执行,尽管可能没有任何异常被抛出。
diff --git 
a/docs/document/content/user-manual/shardingsphere-jdbc/optional-plugins/hiveserver2/_index.en.md
 
b/docs/document/content/user-manual/shardingsphere-jdbc/optional-plugins/hiveserver2/_index.en.md
index 606a96e29d0..a9a4c45f5bf 100644
--- 
a/docs/document/content/user-manual/shardingsphere-jdbc/optional-plugins/hiveserver2/_index.en.md
+++ 
b/docs/document/content/user-manual/shardingsphere-jdbc/optional-plugins/hiveserver2/_index.en.md
@@ -36,17 +36,6 @@ The possible Maven dependencies are as follows.
         <artifactId>hive-service</artifactId>
         <version>4.0.1</version>
     </dependency>
-    <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-mapreduce-client-core</artifactId>
-        <version>3.3.6</version>
-        <exclusions>
-            <exclusion>
-                <groupId>*</groupId>
-                <artifactId>*</artifactId>
-            </exclusion>
-        </exclusions>
-    </dependency>
 </dependencies>
 ```
 
@@ -84,17 +73,6 @@ The following is an example of a possible configuration,
             </exclusion>
         </exclusions>
     </dependency>
-    <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-mapreduce-client-core</artifactId>
-        <version>3.3.6</version>
-        <exclusions>
-            <exclusion>
-                <groupId>*</groupId>
-                <artifactId>*</artifactId>
-            </exclusion>
-        </exclusions>
-    </dependency>
 </dependencies>
 ```
 
@@ -279,8 +257,6 @@ CREATE TABLE IF NOT EXISTS t_order
     status     string,
     PRIMARY KEY (order_id) disable novalidate
 ) STORED BY ICEBERG STORED AS ORC TBLPROPERTIES ('format-version' = '2');
-
-TRUNCATE TABLE t_order;
 ```
 
 After the business project introduces the dependencies involved in the 
`prerequisites`,
@@ -338,6 +314,7 @@ public class ExampleUtils {
         try (HikariDataSource dataSource = new HikariDataSource(config);
              Connection connection = dataSource.getConnection();
              Statement statement = connection.createStatement()) {
+            statement.execute("TRUNCATE TABLE t_order");
             statement.execute("INSERT INTO t_order (user_id, order_type, 
address_id, status) VALUES (1, 1, 1, 'INSERT_TEST')");
             statement.executeQuery("SELECT * FROM t_order");
             statement.execute("DELETE FROM t_order WHERE order_id=1");
@@ -398,8 +375,6 @@ CREATE TABLE IF NOT EXISTS t_order
     status     string,
     PRIMARY KEY (order_id) disable novalidate
 ) STORED BY ICEBERG STORED AS ORC TBLPROPERTIES ('format-version' = '2');
-
-TRUNCATE TABLE t_order;
 ```
 
 At this point,
@@ -414,9 +389,11 @@ public class ExampleUtils {
     void test(HikariDataSource dataSource) throws SQLException {
         try (Connection connection = dataSource.getConnection();
              Statement statement = connection.createStatement()) {
+            statement.execute("TRUNCATE TABLE t_order");
             statement.execute("INSERT INTO t_order (user_id, order_type, 
address_id, status) VALUES (1, 1, 1, 'INSERT_TEST')");
             statement.executeQuery("SELECT * FROM t_order");
             statement.execute("DELETE FROM t_order WHERE order_id=1");
+            statement.execute("DROP TABLE IF EXISTS t_order");
         }
     }
 }
@@ -442,35 +419,6 @@ and users should not try to start embedded HiveServer2 
through ShardingSphere's
 Users should always start HiveServer2 through HiveServer2's Docker Image 
`apache/hive:4.0.1`.
 Reference https://issues.apache.org/jira/browse/HIVE-28418 .
 
-### Hadoop Limitations
-
-Users can only use Hadoop `3.3.6` as the underlying Hadoop dependency of 
HiveServer2 JDBC Driver `4.0.1`.
-HiveServer2 JDBC Driver `4.0.1` does not support Hadoop `3.4.1`. Reference 
https://github.com/apache/hive/pull/5500 .
-
-For HiveServer2 JDBC Driver `org.apache.hive:hive-jdbc:4.0.1` or 
`org.apache.hive:hive-jdbc:4.0.1` with `classifier` as `standalone`,
-there is actually no additional dependency on 
`org.apache.hadoop:hadoop-mapreduce-client-core:3.3.6`.
-
-But `org.apache.shardingsphere:shardingsphere-infra-database-hive`'s
-`org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader`
 uses `org.apache.hadoop.hive.conf.HiveConf`,
-which further uses `org.apache.hadoop:hadoop-mapreduce-client-core:3.3.6`'s 
`org.apache.hadoop.mapred.JobConf` class.
-
-ShardingSphere only needs to use the `org.apache.hadoop.mapred.JobConf` class,
-so it is reasonable to exclude all additional dependencies of 
`org.apache.hadoop:hadoop-mapreduce-client-core:3.3.6`.
-
-```xml
-<dependency>
-    <groupId>org.apache.hadoop</groupId>
-    <artifactId>hadoop-mapreduce-client-core</artifactId>
-    <version>3.3.6</version>
-    <exclusions>
-        <exclusion>
-            <groupId>*</groupId>
-            <artifactId>*</artifactId>
-        </exclusion>
-    </exclusions>
-</dependency>
-```
-
 ### SQL Limitations
 
 HiveServer2 does not guarantee that every `insert` related DML SQL can be 
executed successfully, although no exception may be thrown.
diff --git a/infra/database/type/hive/pom.xml b/infra/database/type/hive/pom.xml
index d16914d4772..22eee12cb24 100644
--- a/infra/database/type/hive/pom.xml
+++ b/infra/database/type/hive/pom.xml
@@ -44,7 +44,7 @@
             <groupId>io.github.linghengqian</groupId>
             <artifactId>hive-server2-jdbc-driver-thin</artifactId>
             <version>${hive-server2-jdbc-driver-thin.version}</version>
-            <scope>provided</scope>
+            <scope>test</scope>
             <optional>true</optional>
         </dependency>
         
diff --git 
a/infra/database/type/hive/src/main/java/org/apache/shardingsphere/infra/database/hive/metadata/data/loader/HiveMetaDataLoader.java
 
b/infra/database/type/hive/src/main/java/org/apache/shardingsphere/infra/database/hive/metadata/data/loader/HiveMetaDataLoader.java
index 4a19723d04d..3db57f97535 100644
--- 
a/infra/database/type/hive/src/main/java/org/apache/shardingsphere/infra/database/hive/metadata/data/loader/HiveMetaDataLoader.java
+++ 
b/infra/database/type/hive/src/main/java/org/apache/shardingsphere/infra/database/hive/metadata/data/loader/HiveMetaDataLoader.java
@@ -17,11 +17,6 @@
 
 package org.apache.shardingsphere.infra.database.hive.metadata.data.loader;
 
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.GetTableRequest;
-import org.apache.hadoop.hive.metastore.api.Table;
 import 
org.apache.shardingsphere.infra.database.core.metadata.data.loader.DialectMetaDataLoader;
 import 
org.apache.shardingsphere.infra.database.core.metadata.data.loader.MetaDataLoaderMaterial;
 import 
org.apache.shardingsphere.infra.database.core.metadata.data.loader.type.TableMetaDataLoader;
@@ -29,70 +24,107 @@ import 
org.apache.shardingsphere.infra.database.core.metadata.data.model.ColumnM
 import 
org.apache.shardingsphere.infra.database.core.metadata.data.model.SchemaMetaData;
 import 
org.apache.shardingsphere.infra.database.core.metadata.data.model.TableMetaData;
 import 
org.apache.shardingsphere.infra.database.core.metadata.database.datatype.DataTypeRegistry;
-import org.apache.thrift.TException;
 
+import java.sql.Connection;
+import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
-import java.sql.Statement;
 import java.sql.Types;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.LinkedList;
+import java.util.Map;
+import java.util.stream.Collectors;
 
 /**
  * Hive meta data loader.
+ * As of the HiveServer2 of apache/hive 4.0.1, the table 
`INFORMATION_SCHEMA.INDEXES` does not exist,
+ * and `INFORMATION_SCHEMA.COLUMNS` does not have a column `IS_VISIBLE`.
+ * The current implementation does not record the table's index, primary keys, 
generated info, or column visibility.
  */
 public final class HiveMetaDataLoader implements DialectMetaDataLoader {
     
-    private static final String HIVE_METASTORE_URIS = "hive.metastore.uris";
-    
     @SuppressWarnings("SqlNoDataSourceInspection")
     @Override
     public Collection<SchemaMetaData> load(final MetaDataLoaderMaterial 
material) throws SQLException {
-        String hiveMetastoreUris;
-        try (Statement statement = 
material.getDataSource().getConnection().createStatement()) {
-            ResultSet resultSet = statement.executeQuery("SET 
hive.metastore.uris");
-            resultSet.next();
-            hiveMetastoreUris = resultSet.getString("set");
+        boolean informationSchemaFlag;
+        try (Connection connection = material.getDataSource().getConnection()) 
{
+            informationSchemaFlag = 
connection.createStatement().executeQuery("SHOW DATABASES LIKE 
'INFORMATION_SCHEMA'").next();
         }
-        if ("hive.metastore.uris is undefined".equals(hiveMetastoreUris)) {
-            Collection<TableMetaData> tableMetaData = new LinkedList<>();
-            for (String each : material.getActualTableNames()) {
-                TableMetaDataLoader.load(material.getDataSource(), each, 
material.getStorageType()).ifPresent(tableMetaData::add);
+        Collection<TableMetaData> tableMetaData = new LinkedList<>();
+        if (informationSchemaFlag) {
+            try (Connection connection = 
material.getDataSource().getConnection()) {
+                Map<String, Collection<ColumnMetaData>> columnMetaDataMap = 
loadColumnMetaDataMap(connection, material.getActualTableNames());
+                for (Map.Entry<String, Collection<ColumnMetaData>> entry : 
columnMetaDataMap.entrySet()) {
+                    tableMetaData.add(new TableMetaData(entry.getKey(), 
entry.getValue(), Collections.emptyList(), Collections.emptyList()));
+                }
             }
-            return Collections.singletonList(new 
SchemaMetaData(material.getDefaultSchemaName(), tableMetaData));
+            return Collections.singleton(new 
SchemaMetaData(material.getDefaultSchemaName(), tableMetaData));
         }
-        HiveMetaStoreClient storeClient = null;
-        try {
-            // TODO Support set hive.metastore uris when register storage unit.
-            HiveConf hiveConf = new HiveConf();
-            hiveConf.set(HIVE_METASTORE_URIS, hiveMetastoreUris);
-            storeClient = new HiveMetaStoreClient(hiveConf);
-            return Collections.singletonList(new 
SchemaMetaData(material.getDefaultSchemaName(), 
getTableMetaData(storeClient.getAllTables(material.getDefaultSchemaName()), 
storeClient, material)));
-        } catch (final TException ignored) {
-            throw new SQLException();
-        } finally {
-            if (null != storeClient) {
-                storeClient.close();
-            }
+        for (String each : material.getActualTableNames()) {
+            TableMetaDataLoader.load(material.getDataSource(), each, 
material.getStorageType()).ifPresent(tableMetaData::add);
         }
+        return Collections.singletonList(new 
SchemaMetaData(material.getDefaultSchemaName(), tableMetaData));
     }
     
-    private Collection<TableMetaData> getTableMetaData(final 
Collection<String> tables, final HiveMetaStoreClient storeClient, final 
MetaDataLoaderMaterial material) throws TException {
-        Collection<TableMetaData> result = new LinkedList<>();
-        for (String each : tables) {
-            GetTableRequest req = new 
GetTableRequest(material.getDefaultSchemaName(), each);
-            result.add(new TableMetaData(each, 
getColumnMetaData(storeClient.getTable(req)), Collections.emptyList(), 
Collections.emptyList()));
+    /**
+     * For apache/hive 4.0.1, `org.apache.hive.jdbc.HiveConnection` does not 
implement {@link java.sql.Connection#getCatalog}.
+     *
+     * @param connection connection
+     * @param tables     tables
+     * @return a map of table name to its column metadata
+     * @throws SQLException SQL exception
+     */
+    @SuppressWarnings("SqlSourceToSinkFlow")
+    private Map<String, Collection<ColumnMetaData>> 
loadColumnMetaDataMap(final Connection connection, final Collection<String> 
tables) throws SQLException {
+        Map<String, Collection<ColumnMetaData>> result = new HashMap<>();
+        try (PreparedStatement preparedStatement = 
connection.prepareStatement(getTableMetaDataSQL(tables))) {
+            preparedStatement.setString(1, "default");
+            try (ResultSet resultSet = preparedStatement.executeQuery()) {
+                while (resultSet.next()) {
+                    String tableName = resultSet.getString("TABLE_NAME");
+                    ColumnMetaData columnMetaData = 
loadColumnMetaData(resultSet);
+                    if (!result.containsKey(tableName)) {
+                        result.put(tableName, new LinkedList<>());
+                    }
+                    result.get(tableName).add(columnMetaData);
+                }
+            }
         }
         return result;
     }
     
-    private Collection<ColumnMetaData> getColumnMetaData(final Table table) {
-        Collection<ColumnMetaData> result = new LinkedList<>();
-        for (FieldSchema each : table.getSd().getCols()) {
-            result.add(new ColumnMetaData(each.getName(), 
DataTypeRegistry.getDataType(getDatabaseType(), 
each.getType()).orElse(Types.VARCHAR), false, false, false, false, false, 
false));
+    private String getTableMetaDataSQL(final Collection<String> tables) {
+        if (tables.isEmpty()) {
+            return "SELECT TABLE_CATALOG,\n"
+                    + "       TABLE_NAME,\n"
+                    + "       COLUMN_NAME,\n"
+                    + "       DATA_TYPE,\n"
+                    + "       ORDINAL_POSITION,\n"
+                    + "       IS_NULLABLE\n"
+                    + "FROM INFORMATION_SCHEMA.COLUMNS\n"
+                    + "WHERE TABLE_CATALOG = ?\n"
+                    + "ORDER BY ORDINAL_POSITION";
         }
-        return result;
+        String tableNames = tables.stream().map(each -> String.format("'%s'", 
each).toUpperCase()).collect(Collectors.joining(","));
+        return String.format("SELECT TABLE_CATALOG,\n"
+                + "       TABLE_NAME,\n"
+                + "       COLUMN_NAME,\n"
+                + "       DATA_TYPE,\n"
+                + "       ORDINAL_POSITION,\n"
+                + "       IS_NULLABLE\n"
+                + "FROM INFORMATION_SCHEMA.COLUMNS\n"
+                + "WHERE TABLE_CATALOG = ?\n"
+                + "  AND UPPER(TABLE_NAME) IN (%s)\n"
+                + "ORDER BY ORDINAL_POSITION", tableNames);
+    }
+    
+    private ColumnMetaData loadColumnMetaData(final ResultSet resultSet) 
throws SQLException {
+        String columnName = resultSet.getString("COLUMN_NAME");
+        String dataType = resultSet.getString("DATA_TYPE");
+        boolean isNullable = "YES".equals(resultSet.getString("IS_NULLABLE"));
+        return new ColumnMetaData(columnName, 
DataTypeRegistry.getDataType(getDatabaseType(), dataType).orElse(Types.OTHER), 
Boolean.FALSE, Boolean.FALSE, false, Boolean.TRUE, false, isNullable);
     }
     
     @Override
diff --git 
a/infra/database/type/presto/src/main/java/org/apache/shardingsphere/infra/database/presto/metadata/data/loader/PrestoMetaDataLoader.java
 
b/infra/database/type/presto/src/main/java/org/apache/shardingsphere/infra/database/presto/metadata/data/loader/PrestoMetaDataLoader.java
index aaf529dd22c..390a79aa9ec 100644
--- 
a/infra/database/type/presto/src/main/java/org/apache/shardingsphere/infra/database/presto/metadata/data/loader/PrestoMetaDataLoader.java
+++ 
b/infra/database/type/presto/src/main/java/org/apache/shardingsphere/infra/database/presto/metadata/data/loader/PrestoMetaDataLoader.java
@@ -88,7 +88,7 @@ public final class PrestoMetaDataLoader implements 
DialectMetaDataLoader {
                     + "WHERE TABLE_CATALOG = ?\n"
                     + "ORDER BY ORDINAL_POSITION";
         }
-        String collect = tables.stream().map(each -> String.format("'%s'", 
each).toUpperCase()).collect(Collectors.joining(","));
+        String tableNames = tables.stream().map(each -> String.format("'%s'", 
each).toUpperCase()).collect(Collectors.joining(","));
         return String.format("SELECT TABLE_CATALOG,\n"
                 + "       TABLE_NAME,\n"
                 + "       COLUMN_NAME,\n"
@@ -98,7 +98,7 @@ public final class PrestoMetaDataLoader implements 
DialectMetaDataLoader {
                 + "FROM INFORMATION_SCHEMA.COLUMNS\n"
                 + "WHERE TABLE_CATALOG = ?\n"
                 + "  AND UPPER(TABLE_NAME) IN (%s)\n"
-                + "ORDER BY ORDINAL_POSITION", collect);
+                + "ORDER BY ORDINAL_POSITION", tableNames);
     }
     
     private ColumnMetaData loadColumnMetaData(final ResultSet resultSet) 
throws SQLException {
diff --git 
a/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.hadoop/hadoop-common/3.3.6/reflect-config.json
 
b/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.hadoop/hadoop-common/3.3.6/reflect-config.json
deleted file mode 100644
index 2920c4f5163..00000000000
--- 
a/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.hadoop/hadoop-common/3.3.6/reflect-config.json
+++ /dev/null
@@ -1,8 +0,0 @@
-[
-{
-  
"condition":{"typeReachable":"org.apache.hadoop.security.UserGroupInformation"},
-  "name":"org.apache.hadoop.security.UserGroupInformation$UgiMetrics",
-  "allDeclaredFields": true,
-  "allDeclaredMethods": true
-}
-]
diff --git 
a/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.hadoop/hadoop-common/3.3.6/resource-config.json
 
b/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.hadoop/hadoop-common/3.3.6/resource-config.json
index be64333cd64..a8e2a9038aa 100644
--- 
a/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.hadoop/hadoop-common/3.3.6/resource-config.json
+++ 
b/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.hadoop/hadoop-common/3.3.6/resource-config.json
@@ -6,6 +6,9 @@
   }, {
     "condition":{"typeReachable":"org.apache.hadoop.conf.Configuration"},
     "pattern":"\\Qcore-default.xml\\E"
+  }, {
+    "condition":{"typeReachable":"org.apache.hadoop.conf.Configuration"},
+    "pattern":"\\Qcore-site.xml\\E"
   }]},
   "bundles":[]
 }
diff --git 
a/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.hive/hive-jdbc/4.0.1/reflect-config.json
 
b/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.hive/hive-jdbc/4.0.1/reflect-config.json
deleted file mode 100644
index 3dd25918d61..00000000000
--- 
a/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.hive/hive-jdbc/4.0.1/reflect-config.json
+++ /dev/null
@@ -1,11 +0,0 @@
-[
-{
-  
"condition":{"typeReachable":"org.apache.hadoop.hive.metastore.HiveMetaStoreClient"},
-  "name":"org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl",
-  
"methods":[{"name":"<init>","parameterTypes":["org.apache.hadoop.conf.Configuration"]
 }]
-},
-{
-  
"condition":{"typeReachable":"org.apache.hadoop.hive.metastore.conf.MetastoreConf"},
-  "name":"org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl"
-}
-]
diff --git 
a/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.shardingsphere/generated-reachability-metadata/reachability-metadata.json
 
b/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.shardingsphere/generated-reachability-metadata/reachability-metadata.json
index 69d72e93711..471ad68bb9f 100644
--- 
a/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.shardingsphere/generated-reachability-metadata/reachability-metadata.json
+++ 
b/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.shardingsphere/generated-reachability-metadata/reachability-metadata.json
@@ -1,11 +1,5 @@
 {
   "reflection": [
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "type": "com.sun.security.auth.NTUserPrincipal"
-    },
     {
       "condition": {
         "typeReached": "org.apache.shardingsphere.infra.util.yaml.YamlEngine"
@@ -339,13 +333,6 @@
       },
       "type": "java.lang.Object"
     },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "type": "java.lang.Object",
-      "allDeclaredFields": true
-    },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.infra.expr.groovy.GroovyInlineExpressionParser"
@@ -1853,10 +1840,16 @@
     },
     {
       "condition": {
-        "typeReached": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.cdc.update.DropStreamingExecutor"
+        "typeReached": 
"org.apache.shardingsphere.data.pipeline.cdc.distsql.handler.update.DropStreamingExecutor"
       },
       "type": "org.apache.shardingsphere.data.pipeline.cdc.api.CDCJobAPI"
     },
+    {
+      "condition": {
+        "typeReached": 
"org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query.MySQLComQueryPacketExecutor"
+      },
+      "type": 
"org.apache.shardingsphere.data.pipeline.cdc.distsql.handler.update.DropStreamingExecutor"
+    },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.distsql.parser.core.featured.DistSQLParserEngine"
@@ -1937,85 +1930,85 @@
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query.MySQLComQueryPacketExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.cdc.update.DropStreamingExecutor"
+      "type": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.transmission.update.AlterTransmissionRuleExecutor"
     },
     {
       "condition": {
-        "typeReached": 
"org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query.MySQLComQueryPacketExecutor"
+        "typeReached": 
"org.apache.shardingsphere.data.pipeline.cdc.distsql.handler.update.DropStreamingExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.migration.update.CheckMigrationJobExecutor"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.api.MigrationJobAPI"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query.MySQLComQueryPacketExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.migration.update.CommitMigrationExecutor"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.handler.update.CheckMigrationJobExecutor"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query.MySQLComQueryPacketExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.migration.update.DropMigrationCheckExecutor"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.handler.update.CommitMigrationExecutor"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query.MySQLComQueryPacketExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.migration.update.MigrateTableExecutor"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.handler.update.DropMigrationCheckExecutor"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query.MySQLComQueryPacketExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.migration.update.RegisterMigrationSourceStorageUnitExecutor"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.handler.update.MigrateTableExecutor"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query.MySQLComQueryPacketExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.migration.update.RollbackMigrationExecutor"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.handler.update.RegisterMigrationSourceStorageUnitExecutor"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query.MySQLComQueryPacketExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.migration.update.StartMigrationCheckExecutor"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.handler.update.RollbackMigrationExecutor"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query.MySQLComQueryPacketExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.migration.update.StartMigrationExecutor"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.handler.update.StartMigrationCheckExecutor"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query.MySQLComQueryPacketExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.migration.update.StopMigrationCheckExecutor"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.handler.update.StartMigrationExecutor"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query.MySQLComQueryPacketExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.migration.update.StopMigrationExecutor"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.handler.update.StopMigrationCheckExecutor"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query.MySQLComQueryPacketExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.migration.update.UnregisterMigrationSourceStorageUnitExecutor"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.handler.update.StopMigrationExecutor"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query.MySQLComQueryPacketExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.transmission.update.AlterTransmissionRuleExecutor"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.handler.update.UnregisterMigrationSourceStorageUnitExecutor"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.distsql.parser.core.featured.DistSQLParserEngine"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.migration.distsql.parser.core.MigrationDistSQLLexer",
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.parser.core.MigrationDistSQLLexer",
       "methods": [
         {
           "name": "<init>",
@@ -2029,25 +2022,25 @@
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.distsql.parser.engine.api.DistSQLStatementParserEngine"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.migration.distsql.parser.core.MigrationDistSQLLexer"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.parser.core.MigrationDistSQLLexer"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.backend.handler.ProxySQLComQueryParser"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.migration.distsql.parser.core.MigrationDistSQLLexer"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.parser.core.MigrationDistSQLLexer"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.frontend.postgresql.command.query.extended.parse.PostgreSQLComParseExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.migration.distsql.parser.core.MigrationDistSQLLexer"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.parser.core.MigrationDistSQLLexer"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.distsql.parser.core.featured.DistSQLParserEngine"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.migration.distsql.parser.core.MigrationDistSQLParser",
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.parser.core.MigrationDistSQLParser",
       "methods": [
         {
           "name": "<init>",
@@ -2061,31 +2054,25 @@
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.distsql.parser.engine.api.DistSQLStatementParserEngine"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.migration.distsql.parser.core.MigrationDistSQLParser"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.parser.core.MigrationDistSQLParser"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.backend.handler.ProxySQLComQueryParser"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.migration.distsql.parser.core.MigrationDistSQLParser"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.parser.core.MigrationDistSQLParser"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.frontend.postgresql.command.query.extended.parse.PostgreSQLComParseExecutor"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.migration.distsql.parser.core.MigrationDistSQLParser"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.parser.core.MigrationDistSQLParser"
     },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.backend.handler.ProxySQLComQueryParser"
       },
-      "type": 
"org.apache.shardingsphere.data.pipeline.migration.distsql.parser.facade.MigrationDistSQLParserFacade"
-    },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.cdc.update.DropStreamingExecutor"
-      },
-      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.api.MigrationJobAPI"
+      "type": 
"org.apache.shardingsphere.data.pipeline.scenario.migration.distsql.parser.facade.MigrationDistSQLParserFacade"
     },
     {
       "condition": {
@@ -12303,12 +12290,6 @@
       },
       "glob": "META-INF/services/java.nio.charset.spi.CharsetProvider"
     },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "glob": "META-INF/services/java.nio.charset.spi.CharsetProvider"
-    },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.proxy.backend.postgresql.handler.admin.executor.variable.charset.PostgreSQLCharacterSets"
@@ -12327,12 +12308,6 @@
       },
       "glob": "META-INF/services/javax.xml.datatype.DatatypeFactory"
     },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "glob": "META-INF/services/javax.xml.parsers.DocumentBuilderFactory"
-    },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.mode.repository.standalone.jdbc.sql.JDBCRepositorySQLLoader"
@@ -12345,12 +12320,6 @@
       },
       "glob": "META-INF/services/javax.xml.stream.XMLOutputFactory"
     },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "glob": "META-INF/services/javax.xml.transform.TransformerFactory"
-    },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.transaction.base.seata.at.SeataATShardingSphereTransactionManager"
@@ -12425,7 +12394,7 @@
     },
     {
       "condition": {
-        "typeReached": 
"org.apache.shardingsphere.data.pipeline.distsql.handler.cdc.update.DropStreamingExecutor"
+        "typeReached": 
"org.apache.shardingsphere.data.pipeline.cdc.distsql.handler.update.DropStreamingExecutor"
       },
       "glob": 
"META-INF/services/org.apache.shardingsphere.data.pipeline.core.job.api.TransmissionJobAPI"
     },
@@ -12849,18 +12818,6 @@
       },
       "glob": "container-license-acceptance.txt"
     },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "glob": "core-default.xml"
-    },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "glob": "core-site.xml"
-    },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.infra.autogen.version.ShardingSphereVersion"
@@ -12873,30 +12830,6 @@
       },
       "glob": "docker-java.properties"
     },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "glob": "hive-default.xml"
-    },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "glob": "hive-site.xml"
-    },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "glob": "hivemetastore-site.xml"
-    },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "glob": "hiveserver2-site.xml"
-    },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.mode.repository.cluster.etcd.EtcdRepository"
@@ -12915,30 +12848,12 @@
       },
       "glob": "lib/sqlparser/druid.jar"
     },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "glob": "mapred-default.xml"
-    },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "glob": "mapred-site.xml"
-    },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.driver.jdbc.core.datasource.ShardingSphereDataSource"
       },
       "glob": "mozilla/public-suffix-list.txt"
     },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "glob": "org/apache/hadoop/hive/conf/HiveConf.class"
-    },
     {
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.driver.jdbc.core.datasource.ShardingSphereDataSource"
@@ -16099,7 +16014,7 @@
       "condition": {
         "typeReached": 
"org.apache.shardingsphere.infra.url.classpath.ClassPathURLLoader"
       },
-      "glob": "test-native/yaml/jdbc/databases/hive/standalone-hms.yaml"
+      "glob": "test-native/yaml/jdbc/databases/hive/system-schemas.yaml"
     },
     {
       "condition": {
@@ -16220,68 +16135,7 @@
         "typeReached": 
"org.apache.shardingsphere.mode.repository.cluster.etcd.EtcdRepository"
       },
       "glob": "vertx-default-jul-logging.properties"
-    },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "glob": "yarn-default.xml"
-    },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "glob": "yarn-site.xml"
-    },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "module": "java.xml",
-      "glob": "com/sun/org/apache/xml/internal/serializer/Encodings.properties"
-    },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "module": "java.xml",
-      "glob": 
"com/sun/org/apache/xml/internal/serializer/XMLEntities.properties"
-    },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "module": "java.xml",
-      "glob": 
"com/sun/org/apache/xml/internal/serializer/XMLEntities_zh.properties"
-    },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "module": "java.xml",
-      "glob": 
"com/sun/org/apache/xml/internal/serializer/XMLEntities_zh_CN.properties"
-    },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "module": "java.xml",
-      "glob": 
"com/sun/org/apache/xml/internal/serializer/XMLEntities_zh_Hans.properties"
-    },
-    {
-      "condition": {
-        "typeReached": 
"org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader"
-      },
-      "module": "java.xml",
-      "glob": 
"com/sun/org/apache/xml/internal/serializer/XMLEntities_zh_Hans_CN.properties"
     }
   ],
-  "bundles": [
-    {
-      "name": "com.sun.org.apache.xml.internal.serializer.XMLEntities",
-      "locales": [
-        "zh-CN"
-      ]
-    }
-  ]
+  "bundles": []
 }
\ No newline at end of file
diff --git 
a/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.shardingsphere/shardingsphere-infra-reachability-metadata/reachability-metadata.json
 
b/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.shardingsphere/shardingsphere-infra-reachability-metadata/reachability-metadata.json
index 91705942896..5cff32a7d00 100644
--- 
a/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.shardingsphere/shardingsphere-infra-reachability-metadata/reachability-metadata.json
+++ 
b/infra/reachability-metadata/src/main/resources/META-INF/native-image/org.apache.shardingsphere/shardingsphere-infra-reachability-metadata/reachability-metadata.json
@@ -524,18 +524,6 @@
         }
       ]
     },
-    {
-      "condition": {
-        "typeReached": "javax.security.auth.login.Configuration"
-      },
-      "type": "sun.security.provider.ConfigFile",
-      "methods": [
-        {
-          "name": "<init>",
-          "parameterTypes": []
-        }
-      ]
-    },
     {
       "condition": {
         "typeReached": "sun.security.provider.SecureRandom"
@@ -552,15 +540,15 @@
     },
     {
       "condition": {
-        "typeReached": "org.apache.hadoop.security.UserGroupInformation"
-      },
-      "type": "com.sun.security.auth.NTUserPrincipal"
-    },
-    {
-      "condition": {
-        "typeReached": "org.apache.hadoop.security.UserGroupInformation"
+        "typeReached": "javax.security.auth.login.Configuration"
       },
-      "type": "com.sun.security.auth.UnixPrincipal"
+      "type": "sun.security.provider.ConfigFile",
+      "methods": [
+        {
+          "name": "<init>",
+          "parameterTypes": []
+        }
+      ]
     }
   ],
   "resources": [
diff --git a/pom.xml b/pom.xml
index abb63dd99bd..0be7ac68252 100644
--- a/pom.xml
+++ b/pom.xml
@@ -134,7 +134,6 @@
         <clickhouse-jdbc.version>0.6.3</clickhouse-jdbc.version>
         <hive-jdbc.version>4.0.1</hive-jdbc.version>
         
<hive-server2-jdbc-driver-thin.version>1.7.0</hive-server2-jdbc-driver-thin.version>
-        <hadoop.version>3.3.6</hadoop.version>
         <presto.version>0.292</presto.version>
         <jaybird.version>5.0.6.java8</jaybird.version>
         
diff --git a/test/native/native-image-filter/extra-filter.json 
b/test/native/native-image-filter/extra-filter.json
index d8356416a2d..957a86eab34 100644
--- a/test/native/native-image-filter/extra-filter.json
+++ b/test/native/native-image-filter/extra-filter.json
@@ -6,7 +6,6 @@
     {"excludeClasses": "com.**"},
     {"includeClasses": "com.oracle.svm.core.**"},
     {"includeClasses": "com.sun.management.**"},
-    {"includeClasses": "com.sun.security.auth.**"},
     {"excludeClasses": "ch.qos.logback.**"},
     {"excludeClasses": "groovy.**"},
     {"excludeClasses": "io.**"},
diff --git a/test/native/pom.xml b/test/native/pom.xml
index 4a5cf9c58e8..53d0fb29e64 100644
--- a/test/native/pom.xml
+++ b/test/native/pom.xml
@@ -189,18 +189,6 @@
                 </exclusion>
             </exclusions>
         </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-mapreduce-client-core</artifactId>
-            <version>${hadoop.version}</version>
-            <scope>test</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>*</groupId>
-                    <artifactId>*</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
         <dependency>
             <groupId>org.firebirdsql.jdbc</groupId>
             <artifactId>jaybird</artifactId>
diff --git 
a/test/native/src/test/java/org/apache/shardingsphere/test/natived/jdbc/databases/hive/StandaloneMetastoreTest.java
 
b/test/native/src/test/java/org/apache/shardingsphere/test/natived/jdbc/databases/hive/SystemSchemasTest.java
similarity index 79%
rename from 
test/native/src/test/java/org/apache/shardingsphere/test/natived/jdbc/databases/hive/StandaloneMetastoreTest.java
rename to 
test/native/src/test/java/org/apache/shardingsphere/test/natived/jdbc/databases/hive/SystemSchemasTest.java
index c2ee3c1add9..53a10b6cf37 100644
--- 
a/test/native/src/test/java/org/apache/shardingsphere/test/natived/jdbc/databases/hive/StandaloneMetastoreTest.java
+++ 
b/test/native/src/test/java/org/apache/shardingsphere/test/natived/jdbc/databases/hive/SystemSchemasTest.java
@@ -19,7 +19,6 @@ package 
org.apache.shardingsphere.test.natived.jdbc.databases.hive;
 
 import com.zaxxer.hikari.HikariConfig;
 import com.zaxxer.hikari.HikariDataSource;
-import org.apache.curator.test.InstanceSpec;
 import org.apache.shardingsphere.test.natived.commons.TestShardingService;
 import org.apache.shardingsphere.test.natived.commons.util.ResourceUtil;
 import org.awaitility.Awaitility;
@@ -28,13 +27,14 @@ import org.junit.jupiter.api.AutoClose;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.condition.EnabledInNativeImage;
-import org.testcontainers.containers.FixedHostPortGenericContainer;
+import org.testcontainers.containers.Container.ExecResult;
 import org.testcontainers.containers.GenericContainer;
 import org.testcontainers.containers.Network;
 import org.testcontainers.junit.jupiter.Container;
 import org.testcontainers.junit.jupiter.Testcontainers;
 
 import javax.sql.DataSource;
+import java.io.IOException;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.SQLException;
@@ -46,40 +46,33 @@ import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.Matchers.is;
 import static org.hamcrest.Matchers.nullValue;
 
-@SuppressWarnings({"resource", "deprecation", "SqlNoDataSourceInspection"})
+@SuppressWarnings({"SqlDialectInspection", "SqlNoDataSourceInspection", 
"resource"})
 @EnabledInNativeImage
 @Testcontainers
-class StandaloneMetastoreTest {
-    
-    private final int randomPort = InstanceSpec.getRandomPort();
+class SystemSchemasTest {
     
     @AutoClose
     private final Network network = Network.newNetwork();
     
-    /**
-     * TODO {@link 
org.apache.shardingsphere.infra.database.hive.metadata.data.loader.HiveMetaDataLoader}
 needs to
-     *  actively connect to HMS, which leads to the use of {@link 
FixedHostPortGenericContainer} .
-     *  This is not a reasonable behavior and requires further changes.
-     */
     @Container
     @AutoClose
-    private final GenericContainer<?> hmsContainer = new 
FixedHostPortGenericContainer<>("apache/hive:4.0.1")
-            .withEnv("SERVICE_NAME", "metastore")
-            .withEnv("METASTORE_PORT", String.valueOf(randomPort))
+    private final GenericContainer<?> postgres = new 
GenericContainer<>("postgres:17.5-bookworm")
+            .withEnv("POSTGRES_PASSWORD", "example")
             .withNetwork(network)
-            .withNetworkAliases("metastore")
-            .withFixedExposedPort(randomPort, randomPort);
+            .withNetworkAliases("some-postgres");
     
     @Container
     @AutoClose
-    private final GenericContainer<?> hs2Container = new 
GenericContainer<>("apache/hive:4.0.1")
+    private final GenericContainer<?> hs2 = new 
GenericContainer<>("ghcr.io/linghengqian/hive:4.0.1-all-in-one")
             .withEnv("SERVICE_NAME", "hiveserver2")
-            .withEnv("IS_RESUME", "true")
-            .withEnv("SERVICE_OPTS", 
"-Dhive.metastore.uris=thrift://metastore:" + randomPort)
+            .withEnv("DB_DRIVER", "postgres")
+            .withEnv("SERVICE_OPTS", 
"-Djavax.jdo.option.ConnectionDriverName=org.postgresql.Driver" + " "
+                    + 
"-Djavax.jdo.option.ConnectionURL=jdbc:postgresql://some-postgres:5432/postgres"
 + " "
+                    + "-Djavax.jdo.option.ConnectionUserName=postgres" + " "
+                    + "-Djavax.jdo.option.ConnectionPassword=example")
             .withNetwork(network)
             .withExposedPorts(10000)
-            .dependsOn(hmsContainer)
-            .withStartupTimeout(Duration.ofMinutes(2L));
+            .dependsOn(postgres);
     
     private final String systemPropKeyPrefix = 
"fixture.test-native.yaml.database.hive.hms.";
     
@@ -105,8 +98,20 @@ class StandaloneMetastoreTest {
     }
     
     @Test
-    void assertShardingInLocalTransactions() throws SQLException {
-        jdbcUrlPrefix = "jdbc:hive2://localhost:" + 
hs2Container.getMappedPort(10000) + "/";
+    void assertShardingInLocalTransactions() throws SQLException, IOException, 
InterruptedException {
+        ExecResult initResult = hs2.execInContainer(
+                "/opt/hive/bin/schematool", "-initSchema",
+                "-dbType", "hive",
+                "-metaDbType", "postgres",
+                "-url", "jdbc:hive2://localhost:10000/default");
+        assertThat(initResult.getStdout(), is("Initializing the schema to: 
4.0.0\n"
+                + "Metastore connection URL:\t 
jdbc:hive2://localhost:10000/default\n"
+                + "Metastore connection Driver :\t 
org.apache.hive.jdbc.HiveDriver\n"
+                + "Metastore connection User:\t APP\n"
+                + "Starting metastore schema initialization to 4.0.0\n"
+                + "Initialization script hive-schema-4.0.0.hive.sql\n"
+                + "Initialization script completed\n"));
+        jdbcUrlPrefix = "jdbc:hive2://localhost:" + hs2.getMappedPort(10000) + 
"/";
         logicDataSource = createDataSource();
         testShardingService = new TestShardingService(logicDataSource);
         initEnvironment();
@@ -135,7 +140,7 @@ class StandaloneMetastoreTest {
         Stream.of("demo_ds_0", "demo_ds_1", 
"demo_ds_2").parallel().forEach(this::initTable);
         HikariConfig config = new HikariConfig();
         
config.setDriverClassName("org.apache.shardingsphere.driver.ShardingSphereDriver");
-        
config.setJdbcUrl("jdbc:shardingsphere:classpath:test-native/yaml/jdbc/databases/hive/standalone-hms.yaml?placeholder-type=system_props");
+        
config.setJdbcUrl("jdbc:shardingsphere:classpath:test-native/yaml/jdbc/databases/hive/system-schemas.yaml?placeholder-type=system_props");
         System.setProperty(systemPropKeyPrefix + "ds0.jdbc-url", jdbcUrlPrefix 
+ "demo_ds_0");
         System.setProperty(systemPropKeyPrefix + "ds1.jdbc-url", jdbcUrlPrefix 
+ "demo_ds_1");
         System.setProperty(systemPropKeyPrefix + "ds2.jdbc-url", jdbcUrlPrefix 
+ "demo_ds_2");
diff --git 
a/test/native/src/test/resources/META-INF/native-image/shardingsphere-test-native-test-metadata/reachability-metadata.json
 
b/test/native/src/test/resources/META-INF/native-image/shardingsphere-test-native-test-metadata/reachability-metadata.json
index 23a01cf29f4..81688a2ba78 100644
--- 
a/test/native/src/test/resources/META-INF/native-image/shardingsphere-test-native-test-metadata/reachability-metadata.json
+++ 
b/test/native/src/test/resources/META-INF/native-image/shardingsphere-test-native-test-metadata/reachability-metadata.json
@@ -206,9 +206,9 @@
     },
     {
       "condition": {
-        "typeReached": 
"org.apache.shardingsphere.test.natived.jdbc.databases.hive.StandaloneMetastoreTest"
+        "typeReached": 
"org.apache.shardingsphere.test.natived.jdbc.databases.hive.SystemSchemasTest"
       },
-      "type": 
"org.apache.shardingsphere.test.natived.jdbc.databases.hive.StandaloneMetastoreTest",
+      "type": 
"org.apache.shardingsphere.test.natived.jdbc.databases.hive.SystemSchemasTest",
       "allDeclaredFields": true,
       "allDeclaredConstructors": true,
       "allDeclaredMethods": true,
@@ -278,37 +278,37 @@
   "resources": [
     {
       "condition": {
-        "typeReached": 
"org.apache.shardingsphere.test.natived.jdbc.databases.PrestoTest"
+        "typeReached": 
"org.apache.shardingsphere.infra.url.classpath.ClassPathURLLoader"
       },
       "glob": "test-native/properties/**/*.properties"
     },
     {
       "condition": {
-        "typeReached": 
"org.apache.shardingsphere.test.natived.jdbc.transactions.base.SeataTest"
+        "typeReached": 
"org.apache.shardingsphere.infra.url.classpath.ClassPathURLLoader"
       },
       "glob": "test-native/sh/**/*.sh"
     },
     {
       "condition": {
-        "typeReached": 
"org.apache.shardingsphere.test.natived.jdbc.databases.SQLServerTest"
+        "typeReached": 
"org.apache.shardingsphere.infra.url.classpath.ClassPathURLLoader"
       },
       "glob": "test-native/yaml/**/*.yaml"
     },
     {
       "condition": {
-        "typeReached": 
"org.apache.shardingsphere.test.natived.jdbc.transactions.base.SeataTest"
+        "typeReached": 
"org.apache.shardingsphere.infra.url.classpath.ClassPathURLLoader"
       },
       "glob": "test-native/sql/**/*.sql"
     },
     {
       "condition": {
-        "typeReached": 
"org.apache.shardingsphere.test.natived.jdbc.transactions.base.SeataTest"
+        "typeReached": 
"org.apache.shardingsphere.infra.url.classpath.ClassPathURLLoader"
       },
       "glob": "*.conf"
     },
     {
       "condition": {
-        "typeReached": 
"org.apache.shardingsphere.test.natived.jdbc.databases.SQLServerTest"
+        "typeReached": 
"org.apache.shardingsphere.infra.url.classpath.ClassPathURLLoader"
       },
       "glob": "container-license-acceptance.txt"
     },
diff --git 
a/test/native/src/test/resources/test-native/yaml/jdbc/databases/hive/standalone-hms.yaml
 
b/test/native/src/test/resources/test-native/yaml/jdbc/databases/hive/system-schemas.yaml
similarity index 100%
rename from 
test/native/src/test/resources/test-native/yaml/jdbc/databases/hive/standalone-hms.yaml
rename to 
test/native/src/test/resources/test-native/yaml/jdbc/databases/hive/system-schemas.yaml

Reply via email to