This is an automated email from the ASF dual-hosted git repository.

difin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new fda96f8a0db HIVE-29220: Iceberg: Enable write operations with REST 
Catalog HMS Client (#6094)
fda96f8a0db is described below

commit fda96f8a0db51207bc3ea8f95a9cd19bc5d94260
Author: Dmitriy Fingerman <[email protected]>
AuthorDate: Fri Sep 26 21:08:28 2025 -0400

    HIVE-29220: Iceberg: Enable write operations with REST Catalog HMS Client 
(#6094)
    
    Co-authored-by: Dmitriy Fingerman <[email protected]>
---
 .../org/apache/iceberg/hive/MetastoreUtil.java     |  3 +
 .../test/queries/positive/iceberg_rest_catalog.q   | 21 +++++-
 .../positive/llap/iceberg_rest_catalog.q.out       | 79 ++++++++++++++++++++++
 3 files changed, 101 insertions(+), 2 deletions(-)

diff --git 
a/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/MetastoreUtil.java
 
b/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/MetastoreUtil.java
index 1744b9b135c..9c3f10ffea9 100644
--- 
a/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/MetastoreUtil.java
+++ 
b/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/MetastoreUtil.java
@@ -19,6 +19,7 @@
 
 package org.apache.iceberg.hive;
 
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.stream.Collectors;
@@ -32,6 +33,7 @@
 import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.SkewedInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
@@ -157,6 +159,7 @@ private static StorageDescriptor 
getHiveStorageDescriptor(org.apache.iceberg.Tab
     result.setSerdeInfo(getHiveSerdeInfo());
     result.setLocation(table.location());
     result.setParameters(Maps.newHashMap());
+    result.setSkewedInfo(new SkewedInfo(Collections.emptyList(), 
Collections.emptyList(), Collections.emptyMap()));
     return result;
   }
 
diff --git 
a/iceberg/iceberg-handler/src/test/queries/positive/iceberg_rest_catalog.q 
b/iceberg/iceberg-handler/src/test/queries/positive/iceberg_rest_catalog.q
index b87d9e0af67..27f23122240 100644
--- a/iceberg/iceberg-handler/src/test/queries/positive/iceberg_rest_catalog.q
+++ b/iceberg/iceberg-handler/src/test/queries/positive/iceberg_rest_catalog.q
@@ -3,6 +3,8 @@
 --! qt:replace:/(\s+neededVirtualColumns:\s)(.*)/$1#Masked#/
 -- Mask random uuid
 --! qt:replace:/(\s+'uuid'=')\S+('\s*)/$1#Masked#$2/
+-- Mask random uuid
+--! qt:replace:/(\s+uuid\s+)\S+(\s*)/$1#Masked#$2/
 -- Mask a random snapshot id
 --! qt:replace:/(\s+current-snapshot-id\s+)\S+(\s*)/$1#Masked#/
 -- Mask added file size
@@ -20,6 +22,7 @@
 -- Mask iceberg version
 --! 
qt:replace:/(\S\"iceberg-version\\\":\\\")(\w+\s\w+\s\d+\.\d+\.\d+\s\(\w+\s\w+\))(\\\")/$1#Masked#$3/
 
+set hive.stats.autogather=false;
 set metastore.client.impl=org.apache.iceberg.hive.client.HiveRESTCatalogClient;
 set metastore.catalog.default=ice01;
 set iceberg.catalog.ice01.type=rest;
@@ -30,7 +33,10 @@ set iceberg.catalog.ice01.type=rest;
 create database ice_rest;
 use ice_rest;
 
---! Creating table without catalog name in table properties
+-----------------------------------------------------------------------------
+--! Creating a table without a catalog name in table properties
+-----------------------------------------------------------------------------
+
 create table ice_orc1 (
     first_name string, 
     last_name string,
@@ -40,7 +46,10 @@ create table ice_orc1 (
 partitioned by (company_id bigint)
 stored by iceberg stored as orc;
 
---! Creating table with a valid catalog name in table properties
+-----------------------------------------------------------------------------
+--! Creating  table with a valid catalog name in table properties
+-----------------------------------------------------------------------------
+
 create table ice_orc2 (
     first_name string, 
     last_name string,
@@ -54,6 +63,14 @@ TBLPROPERTIES('format-version'='2', 
'iceberg.catalog'='ice01');
 --! Output should contain: 'type' = 'rest'
 show create table ice_orc2;
 
+insert into ice_orc2 partition (company_id=100) 
+VALUES ('fn1','ln1', 1, 10), ('fn2','ln2', 2, 20), ('fn3','ln3', 3, 30);
+
+describe formatted ice_orc2;
+select * from ice_orc2;
+
+-----------------------------------------------------------------------------
+
 show tables;
 drop table ice_orc1;
 drop table ice_orc2;
diff --git 
a/iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_rest_catalog.q.out
 
b/iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_rest_catalog.q.out
index 2622a47ae1c..a814fe8f149 100644
--- 
a/iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_rest_catalog.q.out
+++ 
b/iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_rest_catalog.q.out
@@ -99,6 +99,85 @@ TBLPROPERTIES (
   'write.format.default'='orc', 
   'write.merge.mode'='merge-on-read', 
   'write.update.mode'='merge-on-read')
+PREHOOK: query: insert into ice_orc2 partition (company_id=100) 
+VALUES ('fn1','ln1', 1, 10), ('fn2','ln2', 2, 20), ('fn3','ln3', 3, 30)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: ice_rest@ice_orc2@company_id=100
+POSTHOOK: query: insert into ice_orc2 partition (company_id=100) 
+VALUES ('fn1','ln1', 1, 10), ('fn2','ln2', 2, 20), ('fn3','ln3', 3, 30)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: ice_rest@ice_orc2@company_id=100
+PREHOOK: query: describe formatted ice_orc2
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: ice_rest@ice_orc2
+POSTHOOK: query: describe formatted ice_orc2
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: ice_rest@ice_orc2
+# col_name             data_type               comment             
+first_name             string                                      
+last_name              string                                      
+dept_id                bigint                                      
+team_id                bigint                                      
+company_id             bigint                                      
+                
+# Partition Transform Information               
+# col_name             transform_type           
+company_id             IDENTITY                 
+                
+# Detailed Table Information            
+Database:              ice_rest                 
+#### A masked pattern was here ####
+Retention:             0                        
+#### A masked pattern was here ####
+Table Type:            EXTERNAL_TABLE           
+Table Parameters:               
+       bucketing_version       2                   
+       current-schema          
{\"type\":\"struct\",\"schema-id\":0,\"fields\":[{\"id\":1,\"name\":\"first_name\",\"required\":false,\"type\":\"string\"},{\"id\":2,\"name\":\"last_name\",\"required\":false,\"type\":\"string\"},{\"id\":3,\"name\":\"dept_id\",\"required\":false,\"type\":\"long\"},{\"id\":4,\"name\":\"team_id\",\"required\":false,\"type\":\"long\"},{\"id\":5,\"name\":\"company_id\",\"required\":false,\"type\":\"long\"}]}
+       current-snapshot-id     #Masked#
+       current-snapshot-summary        
{\"added-data-files\":\"1\",\"added-records\":\"3\",\"added-files-size\":\"#Masked#\",\"changed-partition-count\":\"1\",\"total-records\":\"3\",\"total-files-size\":\"#Masked#\",\"total-data-files\":\"1\",\"total-delete-files\":\"0\",\"total-position-deletes\":\"0\",\"total-equality-deletes\":\"0\",\"iceberg-version\":\"#Masked#\"}
+       current-snapshot-timestamp-ms   #Masked#       
+       default-partition-spec  
{\"spec-id\":0,\"fields\":[{\"name\":\"company_id\",\"transform\":\"identity\",\"source-id\":5,\"field-id\":1000}]}
+       format-version          2                   
+       iceberg.catalog         ice01               
+       iceberg.orc.files.only  true                
+#### A masked pattern was here ####
+       name                    ice_rest.ice_orc2   
+       numFiles                1                   
+       numRows                 3                   
+       parquet.compression     zstd                
+       serialization.format    1                   
+       snapshot-count          1                   
+       storage_handler         
org.apache.iceberg.mr.hive.HiveIcebergStorageHandler
+       table_type              ICEBERG             
+       totalSize               #Masked#
+       type                    rest                
+       uuid                    #Masked#
+       write.delete.mode       merge-on-read       
+       write.format.default    orc                 
+       write.merge.mode        merge-on-read       
+       write.update.mode       merge-on-read       
+                
+# Storage Information           
+SerDe Library:         org.apache.iceberg.mr.hive.HiveIcebergSerDe      
+InputFormat:           org.apache.iceberg.mr.hive.HiveIcebergInputFormat       
 
+OutputFormat:          org.apache.iceberg.mr.hive.HiveIcebergOutputFormat      
 
+Compressed:            No                       
+Sort Columns:          []                       
+Storage Desc Params:            
+       serialization.format    1                   
+PREHOOK: query: select * from ice_orc2
+PREHOOK: type: QUERY
+PREHOOK: Input: ice_rest@ice_orc2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from ice_orc2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: ice_rest@ice_orc2
+#### A masked pattern was here ####
+fn1    ln1     1       10      100
+fn2    ln2     2       20      100
+fn3    ln3     3       30      100
 PREHOOK: query: show tables
 PREHOOK: type: SHOWTABLES
 PREHOOK: Input: database:ice_rest

Reply via email to