This is an automated email from the ASF dual-hosted git repository.
zhangbutao pushed a commit to branch branch-4.1
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/branch-4.1 by this push:
new 5ec47759d5d HIVE-29083: Iceberg: Unable to create table using
HadoopCatalog (#5959) (#5970)
5ec47759d5d is described below
commit 5ec47759d5db7b3a0570d4697225d8ca8054bdca
Author: Butao Zhang <[email protected]>
AuthorDate: Sun Jul 13 22:22:23 2025 +0800
HIVE-29083: Iceberg: Unable to create table using HadoopCatalog (#5959)
(#5970)
Co-authored-by: Denys Kuzmenko <[email protected]>
---
iceberg/checkstyle/checkstyle.xml | 1 +
.../main/java/org/apache/iceberg/mr/Catalogs.java | 14 +
.../iceberg/mr/hive/HiveIcebergMetaHook.java | 18 +-
.../apache/iceberg/mr/hive/IcebergTableUtil.java | 18 +
.../hive/TestHiveIcebergStorageHandlerNoScan.java | 17 +-
.../queries/positive/hadoop_catalog_create_table.q | 83 ++++
.../llap/hadoop_catalog_create_table.q.out | 450 +++++++++++++++++++++
.../test/resources/testconfiguration.properties | 2 +
8 files changed, 593 insertions(+), 10 deletions(-)
diff --git a/iceberg/checkstyle/checkstyle.xml
b/iceberg/checkstyle/checkstyle.xml
index 84d205d44bf..89106a54a9d 100644
--- a/iceberg/checkstyle/checkstyle.xml
+++ b/iceberg/checkstyle/checkstyle.xml
@@ -135,6 +135,7 @@
org.apache.iceberg.SortDirection.*,
org.apache.iceberg.TableProperties.*,
org.apache.iceberg.SnapshotSummary.*,
+ org.apache.iceberg.mr.InputFormatConfig.*,
org.apache.iceberg.types.Type.*,
org.apache.iceberg.types.Types.NestedField.*,
org.apache.parquet.schema.OriginalType.*,
diff --git
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/Catalogs.java
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/Catalogs.java
index 5a219b58a08..f65ed35af5c 100644
--- a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/Catalogs.java
+++ b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/Catalogs.java
@@ -206,6 +206,20 @@ public static boolean hiveCatalog(Configuration conf,
Properties props) {
return getCatalogProperties(conf,
catalogName).get(CatalogProperties.CATALOG_IMPL) == null;
}
+ public static boolean hadoopCatalog(Configuration conf, Properties props) {
+ String catalogName = props.getProperty(InputFormatConfig.CATALOG_NAME);
+ String catalogType = getCatalogType(conf, catalogName);
+ if (catalogType != null) {
+ return
CatalogUtil.ICEBERG_CATALOG_TYPE_HADOOP.equalsIgnoreCase(catalogType);
+ }
+ catalogType = getCatalogType(conf, ICEBERG_DEFAULT_CATALOG_NAME);
+ if (catalogType != null) {
+ return
CatalogUtil.ICEBERG_CATALOG_TYPE_HADOOP.equalsIgnoreCase(catalogType);
+ }
+ return CatalogUtil.ICEBERG_CATALOG_HADOOP.equals(
+ getCatalogProperties(conf,
catalogName).get(CatalogProperties.CATALOG_IMPL));
+ }
+
/**
* Register a table with the configured catalog if it does not exist.
* @param conf a Hadoop conf
diff --git
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java
index a06c294ddbf..842ab3a2ef9 100644
---
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java
+++
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java
@@ -242,16 +242,18 @@ public void preCreateTable(CreateTableRequest request) {
// If not using HiveCatalog check for existing table
try {
-
this.icebergTable = IcebergTableUtil.getTable(conf, catalogProperties,
true);
+ if (Catalogs.hadoopCatalog(conf, catalogProperties) &&
hmsTable.getSd() != null &&
+ hmsTable.getSd().getLocation() == null) {
+ hmsTable.getSd().setLocation(icebergTable.location());
+ }
Preconditions.checkArgument(catalogProperties.getProperty(InputFormatConfig.TABLE_SCHEMA)
== null,
"Iceberg table already created - can not use provided schema");
Preconditions.checkArgument(catalogProperties.getProperty(InputFormatConfig.PARTITION_SPEC)
== null,
"Iceberg table already created - can not use provided partition
specification");
LOG.info("Iceberg table already exists {}", icebergTable);
-
return;
} catch (NoSuchTableException nte) {
// If the table does not exist we will create it below
@@ -815,10 +817,16 @@ private void assertFileFormat(String format) {
}
private void
setCommonHmsTablePropertiesForIceberg(org.apache.hadoop.hive.metastore.api.Table
hmsTable) {
- // If the table is not managed by Hive catalog then the location should be
set
+ // If the table is not managed by Hive or Hadoop catalog, then the
location should be set
if (!Catalogs.hiveCatalog(conf, catalogProperties)) {
- Preconditions.checkArgument(hmsTable.getSd() != null &&
hmsTable.getSd().getLocation() != null,
- "Table location not set");
+ String location = (hmsTable.getSd() != null) ?
hmsTable.getSd().getLocation() : null;
+ if (location == null && Catalogs.hadoopCatalog(conf, catalogProperties))
{
+ location = IcebergTableUtil.defaultWarehouseLocation(
+ TableIdentifier.of(hmsTable.getDbName(), hmsTable.getTableName()),
+ conf, catalogProperties);
+ hmsTable.getSd().setLocation(location);
+ }
+ Preconditions.checkArgument(location != null, "Table location not set");
}
Map<String, String> hmsParams = hmsTable.getParameters();
diff --git
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergTableUtil.java
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergTableUtil.java
index 54b283527a1..b0a1cc919db 100644
---
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergTableUtil.java
+++
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergTableUtil.java
@@ -95,6 +95,9 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.apache.iceberg.mr.InputFormatConfig.CATALOG_NAME;
+import static
org.apache.iceberg.mr.InputFormatConfig.CATALOG_WAREHOUSE_TEMPLATE;
+
public class IcebergTableUtil {
public static final int SPEC_IDX = 1;
@@ -562,4 +565,19 @@ public static ExecutorService newDeleteThreadPool(String
completeName, int numTh
return thread;
});
}
+
+ public static String defaultWarehouseLocation(TableIdentifier
tableIdentifier,
+ Configuration conf, Properties
catalogProperties) {
+ StringBuilder sb = new StringBuilder();
+ String warehouseLocation = conf.get(String.format(
+ CATALOG_WAREHOUSE_TEMPLATE,
catalogProperties.getProperty(CATALOG_NAME))
+ );
+ sb.append(warehouseLocation).append('/');
+ for (String level : tableIdentifier.namespace().levels()) {
+ sb.append(level).append('/');
+ }
+ sb.append(tableIdentifier.name());
+ return sb.toString();
+ }
+
}
diff --git
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java
index 818ee1e6014..c3282f9bddd 100644
---
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java
+++
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java
@@ -81,6 +81,7 @@
import org.apache.iceberg.types.Types;
import org.apache.parquet.hadoop.ParquetOutputFormat;
import org.apache.thrift.TException;
+import org.assertj.core.api.AbstractThrowableAssert;
import org.assertj.core.api.Assertions;
import org.junit.After;
import org.junit.AfterClass;
@@ -684,7 +685,7 @@ public void testCreateTableError() {
if (!testTables.locationForCreateTableSQL(identifier).isEmpty()) {
// Only test this if the location is required
- Assertions.assertThatThrownBy(
+ AbstractThrowableAssert<?, ? extends Throwable> assertThatThrownBy =
Assertions.assertThatCode(
() ->
shell.executeStatement(
"CREATE EXTERNAL TABLE withShell2 " +
@@ -697,10 +698,16 @@ public void testCreateTableError() {
InputFormatConfig.CATALOG_NAME +
"'='" +
testTables.catalogName() +
- "')"))
- .isInstanceOf(IllegalArgumentException.class)
- .hasMessageStartingWith("Failed to execute Hive query")
- .hasMessageContaining("Table location not set");
+ "')"));
+ if (testTableType != TestTables.TestTableType.HADOOP_CATALOG) {
+ assertThatThrownBy
+ .isInstanceOf(IllegalArgumentException.class)
+ .hasMessageStartingWith("Failed to execute Hive query")
+ .hasMessageContaining("Table location not set");
+ } else {
+ assertThatThrownBy
+ .doesNotThrowAnyException();
+ }
}
}
diff --git
a/iceberg/iceberg-handler/src/test/queries/positive/hadoop_catalog_create_table.q
b/iceberg/iceberg-handler/src/test/queries/positive/hadoop_catalog_create_table.q
new file mode 100644
index 00000000000..f5bf1c08566
--- /dev/null
+++
b/iceberg/iceberg-handler/src/test/queries/positive/hadoop_catalog_create_table.q
@@ -0,0 +1,83 @@
+drop table if exists orders;
+
+set iceberg.catalog.ice01.type=hadoop;
+set iceberg.catalog.ice01.warehouse=/tmp/iceberg;
+
+dfs -mkdir -p /tmp/iceberg/.tmp;
+dfs -rmr /tmp/iceberg/*;
+
+-- use HadoopCatalog
+
+CREATE EXTERNAL TABLE orders (orderid INT, quantity INT, itemid INT, tradets
TIMESTAMP)
+ PARTITIONED BY (p1 STRING, p2 STRING)
+STORED BY ICEBERG STORED AS ORC
+TBLPROPERTIES('format-version'='2', 'iceberg.catalog'='ice01');
+
+INSERT INTO orders VALUES
+(0, 48, 5, timestamp('2000-06-04 19:55:46.129'), 'EU', 'DE'),
+(1, 12, 6, timestamp('2007-06-24 19:23:22.829'), 'US', 'TX'),
+(2, 76, 4, timestamp('2018-02-19 23:43:51.995'), 'EU', 'DE'),
+(3, 91, 5, timestamp('2000-07-15 09:09:11.587'), 'US', 'NJ'),
+(4, 18, 6, timestamp('2007-12-02 22:30:39.302'), 'EU', 'ES'),
+(5, 71, 5, timestamp('2010-02-08 20:31:23.430'), 'EU', 'DE'),
+(6, 78, 3, timestamp('2016-02-22 20:37:37.025'), 'EU', 'FR'),
+(7, 88, 0, timestamp('2020-03-26 18:47:40.611'), 'EU', 'FR'),
+(8, 87, 4, timestamp('2003-02-20 00:48:09.139'), 'EU', 'ES'),
+(9, 60, 6, timestamp('2012-08-28 01:35:54.283'), 'EU', 'IT'),
+(10, 24, 5, timestamp('2015-03-28 18:57:50.069'), 'US', 'NY'),
+(11, 42, 2, timestamp('2012-06-27 01:13:32.350'), 'EU', 'UK'),
+(12, 37, 4, timestamp('2020-08-09 01:18:50.153'), 'US', 'NY'),
+(13, 52, 1, timestamp('2019-09-04 01:46:19.558'), 'EU', 'UK'),
+(14, 96, 3, timestamp('2019-03-05 22:00:03.020'), 'US', 'NJ'),
+(15, 18, 3, timestamp('2001-09-11 00:14:12.687'), 'EU', 'FR'),
+(16, 46, 0, timestamp('2013-08-31 02:16:17.878'), 'EU', 'UK'),
+(17, 26, 5, timestamp('2001-02-01 20:05:32.317'), 'EU', 'FR'),
+(18, 68, 5, timestamp('2009-12-29 08:44:08.048'), 'EU', 'ES'),
+(19, 54, 6, timestamp('2015-08-15 01:59:22.177'), 'EU', 'HU'),
+(20, 10, 0, timestamp('2018-05-06 12:56:12.789'), 'US', 'CA');
+
+--check row count
+select count(*) from orders;
+describe formatted orders;
+
+--delete rows
+delete from orders where itemid = 6;
+delete from orders where itemid = 5;
+
+--check for updated row count
+select count(*) from orders;
+describe formatted orders;
+
+-- drop table metadata
+
+DROP TABLE orders;
+
+CREATE EXTERNAL TABLE orders (orderid INT, quantity INT, itemid INT, tradets
TIMESTAMP)
+ PARTITIONED BY (p1 STRING, p2 STRING)
+STORED BY ICEBERG STORED AS ORC
+TBLPROPERTIES('format-version'='2', 'iceberg.catalog'='ice01');
+
+--check row count
+select count(*) from orders;
+describe formatted orders;
+
+-- use location-based HadoopTable
+
+CREATE EXTERNAL TABLE orders2 (orderid INT, quantity INT, itemid INT, tradets
TIMESTAMP)
+ PARTITIONED BY (p1 STRING, p2 STRING)
+STORED BY ICEBERG STORED AS ORC
+LOCATION '/tmp/iceberg/orders2'
+TBLPROPERTIES('format-version'='2', 'iceberg.catalog'='location_based_table');
+
+INSERT INTO orders2 SELECT * FROM orders;
+
+--check row count
+select count(*) from orders2;
+describe formatted orders2;
+
+--delete rows
+delete from orders2 where itemid = 3;
+
+--check for updated row count
+select count(*) from orders2;
+describe formatted orders2;
diff --git
a/iceberg/iceberg-handler/src/test/results/positive/llap/hadoop_catalog_create_table.q.out
b/iceberg/iceberg-handler/src/test/results/positive/llap/hadoop_catalog_create_table.q.out
new file mode 100644
index 00000000000..c6a7243ec96
--- /dev/null
+++
b/iceberg/iceberg-handler/src/test/results/positive/llap/hadoop_catalog_create_table.q.out
@@ -0,0 +1,450 @@
+PREHOOK: query: drop table if exists orders
+PREHOOK: type: DROPTABLE
+PREHOOK: Output: database:default
+POSTHOOK: query: drop table if exists orders
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: database:default
+#### A masked pattern was here ####
+PREHOOK: query: CREATE EXTERNAL TABLE orders (orderid INT, quantity INT,
itemid INT, tradets TIMESTAMP)
+ PARTITIONED BY (p1 STRING, p2 STRING)
+STORED BY ICEBERG STORED AS ORC
+TBLPROPERTIES('format-version'='2', 'iceberg.catalog'='ice01')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orders
+POSTHOOK: query: CREATE EXTERNAL TABLE orders (orderid INT, quantity INT,
itemid INT, tradets TIMESTAMP)
+ PARTITIONED BY (p1 STRING, p2 STRING)
+STORED BY ICEBERG STORED AS ORC
+TBLPROPERTIES('format-version'='2', 'iceberg.catalog'='ice01')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orders
+PREHOOK: query: INSERT INTO orders VALUES
+(0, 48, 5, timestamp('2000-06-04 19:55:46.129'), 'EU', 'DE'),
+(1, 12, 6, timestamp('2007-06-24 19:23:22.829'), 'US', 'TX'),
+(2, 76, 4, timestamp('2018-02-19 23:43:51.995'), 'EU', 'DE'),
+(3, 91, 5, timestamp('2000-07-15 09:09:11.587'), 'US', 'NJ'),
+(4, 18, 6, timestamp('2007-12-02 22:30:39.302'), 'EU', 'ES'),
+(5, 71, 5, timestamp('2010-02-08 20:31:23.430'), 'EU', 'DE'),
+(6, 78, 3, timestamp('2016-02-22 20:37:37.025'), 'EU', 'FR'),
+(7, 88, 0, timestamp('2020-03-26 18:47:40.611'), 'EU', 'FR'),
+(8, 87, 4, timestamp('2003-02-20 00:48:09.139'), 'EU', 'ES'),
+(9, 60, 6, timestamp('2012-08-28 01:35:54.283'), 'EU', 'IT'),
+(10, 24, 5, timestamp('2015-03-28 18:57:50.069'), 'US', 'NY'),
+(11, 42, 2, timestamp('2012-06-27 01:13:32.350'), 'EU', 'UK'),
+(12, 37, 4, timestamp('2020-08-09 01:18:50.153'), 'US', 'NY'),
+(13, 52, 1, timestamp('2019-09-04 01:46:19.558'), 'EU', 'UK'),
+(14, 96, 3, timestamp('2019-03-05 22:00:03.020'), 'US', 'NJ'),
+(15, 18, 3, timestamp('2001-09-11 00:14:12.687'), 'EU', 'FR'),
+(16, 46, 0, timestamp('2013-08-31 02:16:17.878'), 'EU', 'UK'),
+(17, 26, 5, timestamp('2001-02-01 20:05:32.317'), 'EU', 'FR'),
+(18, 68, 5, timestamp('2009-12-29 08:44:08.048'), 'EU', 'ES'),
+(19, 54, 6, timestamp('2015-08-15 01:59:22.177'), 'EU', 'HU'),
+(20, 10, 0, timestamp('2018-05-06 12:56:12.789'), 'US', 'CA')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orders
+POSTHOOK: query: INSERT INTO orders VALUES
+(0, 48, 5, timestamp('2000-06-04 19:55:46.129'), 'EU', 'DE'),
+(1, 12, 6, timestamp('2007-06-24 19:23:22.829'), 'US', 'TX'),
+(2, 76, 4, timestamp('2018-02-19 23:43:51.995'), 'EU', 'DE'),
+(3, 91, 5, timestamp('2000-07-15 09:09:11.587'), 'US', 'NJ'),
+(4, 18, 6, timestamp('2007-12-02 22:30:39.302'), 'EU', 'ES'),
+(5, 71, 5, timestamp('2010-02-08 20:31:23.430'), 'EU', 'DE'),
+(6, 78, 3, timestamp('2016-02-22 20:37:37.025'), 'EU', 'FR'),
+(7, 88, 0, timestamp('2020-03-26 18:47:40.611'), 'EU', 'FR'),
+(8, 87, 4, timestamp('2003-02-20 00:48:09.139'), 'EU', 'ES'),
+(9, 60, 6, timestamp('2012-08-28 01:35:54.283'), 'EU', 'IT'),
+(10, 24, 5, timestamp('2015-03-28 18:57:50.069'), 'US', 'NY'),
+(11, 42, 2, timestamp('2012-06-27 01:13:32.350'), 'EU', 'UK'),
+(12, 37, 4, timestamp('2020-08-09 01:18:50.153'), 'US', 'NY'),
+(13, 52, 1, timestamp('2019-09-04 01:46:19.558'), 'EU', 'UK'),
+(14, 96, 3, timestamp('2019-03-05 22:00:03.020'), 'US', 'NJ'),
+(15, 18, 3, timestamp('2001-09-11 00:14:12.687'), 'EU', 'FR'),
+(16, 46, 0, timestamp('2013-08-31 02:16:17.878'), 'EU', 'UK'),
+(17, 26, 5, timestamp('2001-02-01 20:05:32.317'), 'EU', 'FR'),
+(18, 68, 5, timestamp('2009-12-29 08:44:08.048'), 'EU', 'ES'),
+(19, 54, 6, timestamp('2015-08-15 01:59:22.177'), 'EU', 'HU'),
+(20, 10, 0, timestamp('2018-05-06 12:56:12.789'), 'US', 'CA')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orders
+PREHOOK: query: select count(*) from orders
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orders
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from orders
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orders
+#### A masked pattern was here ####
+21
+PREHOOK: query: describe formatted orders
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@orders
+POSTHOOK: query: describe formatted orders
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@orders
+# col_name data_type comment
+orderid int
+quantity int
+itemid int
+tradets timestamp
+p1 string
+p2 string
+
+# Partition Transform Information
+# col_name transform_type
+p1 IDENTITY
+p2 IDENTITY
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Retention: 0
+#### A masked pattern was here ####
+Table Type: EXTERNAL_TABLE
+Table Parameters:
+ COLUMN_STATS_ACCURATE
{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"itemid\":\"true\",\"orderid\":\"true\",\"p1\":\"true\",\"p2\":\"true\",\"quantity\":\"true\",\"tradets\":\"true\"}}
+ EXTERNAL TRUE
+ bucketing_version 2
+ format-version 2
+ iceberg.catalog ice01
+ iceberg.orc.files.only true
+ numFiles 10
+ numPartitions 10
+ numRows 21
+ rawDataSize 0
+ storage_handler
org.apache.iceberg.mr.hive.HiveIcebergStorageHandler
+ table_type ICEBERG
+ totalSize #Masked#
+#### A masked pattern was here ####
+ write.delete.mode merge-on-read
+ write.merge.mode merge-on-read
+ write.update.mode merge-on-read
+
+# Storage Information
+SerDe Library: org.apache.iceberg.mr.hive.HiveIcebergSerDe
+InputFormat: org.apache.iceberg.mr.hive.HiveIcebergInputFormat
+OutputFormat: org.apache.iceberg.mr.hive.HiveIcebergOutputFormat
+Compressed: No
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+ write.format.default ORC
+PREHOOK: query: delete from orders where itemid = 6
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orders
+PREHOOK: Output: default@orders
+POSTHOOK: query: delete from orders where itemid = 6
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orders
+POSTHOOK: Output: default@orders
+PREHOOK: query: delete from orders where itemid = 5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orders
+PREHOOK: Output: default@orders
+POSTHOOK: query: delete from orders where itemid = 5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orders
+POSTHOOK: Output: default@orders
+PREHOOK: query: select count(*) from orders
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orders
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from orders
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orders
+#### A masked pattern was here ####
+21
+PREHOOK: query: describe formatted orders
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@orders
+POSTHOOK: query: describe formatted orders
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@orders
+# col_name data_type comment
+orderid int
+quantity int
+itemid int
+tradets timestamp
+p1 string
+p2 string
+
+# Partition Transform Information
+# col_name transform_type
+p1 IDENTITY
+p2 IDENTITY
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Retention: 0
+#### A masked pattern was here ####
+Table Type: EXTERNAL_TABLE
+Table Parameters:
+ COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"}
+ EXTERNAL TRUE
+ bucketing_version 2
+ format-version 2
+ iceberg.catalog ice01
+ iceberg.orc.files.only true
+ numFiles 10
+ numPartitions 10
+ numRows 21
+ rawDataSize 0
+ storage_handler
org.apache.iceberg.mr.hive.HiveIcebergStorageHandler
+ table_type ICEBERG
+ totalSize #Masked#
+#### A masked pattern was here ####
+ write.delete.mode merge-on-read
+ write.merge.mode merge-on-read
+ write.update.mode merge-on-read
+
+# Storage Information
+SerDe Library: org.apache.iceberg.mr.hive.HiveIcebergSerDe
+InputFormat: org.apache.iceberg.mr.hive.HiveIcebergInputFormat
+OutputFormat: org.apache.iceberg.mr.hive.HiveIcebergOutputFormat
+Compressed: No
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+ write.format.default ORC
+PREHOOK: query: DROP TABLE orders
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@orders
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orders
+POSTHOOK: query: DROP TABLE orders
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@orders
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orders
+PREHOOK: query: CREATE EXTERNAL TABLE orders (orderid INT, quantity INT,
itemid INT, tradets TIMESTAMP)
+ PARTITIONED BY (p1 STRING, p2 STRING)
+STORED BY ICEBERG STORED AS ORC
+TBLPROPERTIES('format-version'='2', 'iceberg.catalog'='ice01')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orders
+POSTHOOK: query: CREATE EXTERNAL TABLE orders (orderid INT, quantity INT,
itemid INT, tradets TIMESTAMP)
+ PARTITIONED BY (p1 STRING, p2 STRING)
+STORED BY ICEBERG STORED AS ORC
+TBLPROPERTIES('format-version'='2', 'iceberg.catalog'='ice01')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orders
+PREHOOK: query: select count(*) from orders
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orders
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from orders
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orders
+#### A masked pattern was here ####
+21
+PREHOOK: query: describe formatted orders
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@orders
+POSTHOOK: query: describe formatted orders
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@orders
+# col_name data_type comment
+orderid int
+quantity int
+itemid int
+tradets timestamp
+p1 string
+p2 string
+
+# Partition Transform Information
+# col_name transform_type
+p1 IDENTITY
+p2 IDENTITY
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Retention: 0
+#### A masked pattern was here ####
+Table Type: EXTERNAL_TABLE
+Table Parameters:
+ COLUMN_STATS_ACCURATE
{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"itemid\":\"true\",\"orderid\":\"true\",\"p1\":\"true\",\"p2\":\"true\",\"quantity\":\"true\",\"tradets\":\"true\"}}
+ EXTERNAL TRUE
+ bucketing_version 2
+ format-version 2
+ iceberg.catalog ice01
+ numFiles 10
+ numPartitions 10
+ numRows 21
+ rawDataSize 0
+ storage_handler
org.apache.iceberg.mr.hive.HiveIcebergStorageHandler
+ table_type ICEBERG
+ totalSize #Masked#
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.iceberg.mr.hive.HiveIcebergSerDe
+InputFormat: org.apache.iceberg.mr.hive.HiveIcebergInputFormat
+OutputFormat: org.apache.iceberg.mr.hive.HiveIcebergOutputFormat
+Compressed: No
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+ write.format.default ORC
+PREHOOK: query: CREATE EXTERNAL TABLE orders2 (orderid INT, quantity INT,
itemid INT, tradets TIMESTAMP)
+ PARTITIONED BY (p1 STRING, p2 STRING)
+STORED BY ICEBERG STORED AS ORC
+#### A masked pattern was here ####
+TBLPROPERTIES('format-version'='2', 'iceberg.catalog'='location_based_table')
+PREHOOK: type: CREATETABLE
+#### A masked pattern was here ####
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orders2
+POSTHOOK: query: CREATE EXTERNAL TABLE orders2 (orderid INT, quantity INT,
itemid INT, tradets TIMESTAMP)
+ PARTITIONED BY (p1 STRING, p2 STRING)
+STORED BY ICEBERG STORED AS ORC
+#### A masked pattern was here ####
+TBLPROPERTIES('format-version'='2', 'iceberg.catalog'='location_based_table')
+POSTHOOK: type: CREATETABLE
+#### A masked pattern was here ####
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orders2
+PREHOOK: query: INSERT INTO orders2 SELECT * FROM orders
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orders
+PREHOOK: Output: default@orders2
+POSTHOOK: query: INSERT INTO orders2 SELECT * FROM orders
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orders
+POSTHOOK: Output: default@orders2
+PREHOOK: query: select count(*) from orders2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orders2
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from orders2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orders2
+#### A masked pattern was here ####
+21
+PREHOOK: query: describe formatted orders2
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@orders2
+POSTHOOK: query: describe formatted orders2
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@orders2
+# col_name data_type comment
+orderid int
+quantity int
+itemid int
+tradets timestamp
+p1 string
+p2 string
+
+# Partition Transform Information
+# col_name transform_type
+p1 IDENTITY
+p2 IDENTITY
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Retention: 0
+#### A masked pattern was here ####
+Table Type: EXTERNAL_TABLE
+Table Parameters:
+ COLUMN_STATS_ACCURATE
{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"itemid\":\"true\",\"orderid\":\"true\",\"p1\":\"true\",\"p2\":\"true\",\"quantity\":\"true\",\"tradets\":\"true\"}}
+ EXTERNAL TRUE
+ bucketing_version 2
+ format-version 2
+ iceberg.catalog location_based_table
+ iceberg.orc.files.only true
+ numFiles 10
+ numPartitions 10
+ numRows 21
+ rawDataSize 0
+ storage_handler
org.apache.iceberg.mr.hive.HiveIcebergStorageHandler
+ table_type ICEBERG
+ totalSize #Masked#
+#### A masked pattern was here ####
+ write.delete.mode merge-on-read
+ write.merge.mode merge-on-read
+ write.update.mode merge-on-read
+
+# Storage Information
+SerDe Library: org.apache.iceberg.mr.hive.HiveIcebergSerDe
+InputFormat: org.apache.iceberg.mr.hive.HiveIcebergInputFormat
+OutputFormat: org.apache.iceberg.mr.hive.HiveIcebergOutputFormat
+Compressed: No
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+ write.format.default ORC
+PREHOOK: query: delete from orders2 where itemid = 3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orders2
+#### A masked pattern was here ####
+POSTHOOK: query: delete from orders2 where itemid = 3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orders2
+#### A masked pattern was here ####
+PREHOOK: query: select count(*) from orders2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orders2
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from orders2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orders2
+#### A masked pattern was here ####
+21
+PREHOOK: query: describe formatted orders2
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@orders2
+POSTHOOK: query: describe formatted orders2
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@orders2
+# col_name data_type comment
+orderid int
+quantity int
+itemid int
+tradets timestamp
+p1 string
+p2 string
+
+# Partition Transform Information
+# col_name transform_type
+p1 IDENTITY
+p2 IDENTITY
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Retention: 0
+#### A masked pattern was here ####
+Table Type: EXTERNAL_TABLE
+Table Parameters:
+ COLUMN_STATS_ACCURATE
{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"itemid\":\"true\",\"orderid\":\"true\",\"p1\":\"true\",\"p2\":\"true\",\"quantity\":\"true\",\"tradets\":\"true\"}}
+ EXTERNAL TRUE
+ bucketing_version 2
+ format-version 2
+ iceberg.catalog location_based_table
+ iceberg.orc.files.only true
+ numFiles 10
+ numRows 21
+ rawDataSize 0
+ storage_handler
org.apache.iceberg.mr.hive.HiveIcebergStorageHandler
+ table_type ICEBERG
+ totalSize #Masked#
+#### A masked pattern was here ####
+ write.delete.mode merge-on-read
+ write.merge.mode merge-on-read
+ write.update.mode merge-on-read
+
+# Storage Information
+SerDe Library: org.apache.iceberg.mr.hive.HiveIcebergSerDe
+InputFormat: org.apache.iceberg.mr.hive.HiveIcebergInputFormat
+OutputFormat: org.apache.iceberg.mr.hive.HiveIcebergOutputFormat
+Compressed: No
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+ write.format.default ORC
diff --git a/itests/src/test/resources/testconfiguration.properties
b/itests/src/test/resources/testconfiguration.properties
index 4300ed89918..3f5b804bba3 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -415,6 +415,7 @@ erasurecoding.only.query.files=\
erasure_simple.q
iceberg.llap.query.files=\
+ hadoop_catalog_create_table.q,\
iceberg_bucket_map_join_1.q,\
iceberg_bucket_map_join_2.q,\
iceberg_bucket_map_join_3.q,\
@@ -455,6 +456,7 @@ iceberg.llap.query.compactor.files=\
iceberg_minor_compaction_unpartitioned.q
iceberg.llap.only.query.files=\
+ hadoop_catalog_create_table.q,\
iceberg_bucket_map_join_1.q,\
iceberg_bucket_map_join_2.q,\
iceberg_bucket_map_join_3.q,\