tanishq-chugh commented on code in PR #6367:
URL: https://github.com/apache/hive/pull/6367#discussion_r3162898891
##########
iceberg/iceberg-handler/src/test/results/positive/iceberg_defaults_in_desc_table.q.out:
##########
@@ -0,0 +1,238 @@
+PREHOOK: query: CREATE TABLE ice_parq (
+ id INT)
+STORED BY ICEBERG stored as parquet
+TBLPROPERTIES ('format-version'='3')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ice_parq
+POSTHOOK: query: CREATE TABLE ice_parq (
+ id INT)
+STORED BY ICEBERG stored as parquet
+TBLPROPERTIES ('format-version'='3')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@ice_parq
+PREHOOK: query: INSERT INTO ice_parq (id) VALUES (1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ice_parq
+POSTHOOK: query: INSERT INTO ice_parq (id) VALUES (1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ice_parq
+PREHOOK: query: ALTER TABLE ice_parq ADD COLUMNS (point STRUCT<x:INT, y:INT>
DEFAULT '{"x":100,"y":99}',
+ name STRING DEFAULT 'unknown',
+ age INT DEFAULT 25)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: default@ice_parq
+PREHOOK: Output: default@ice_parq
+POSTHOOK: query: ALTER TABLE ice_parq ADD COLUMNS (point STRUCT<x:INT, y:INT>
DEFAULT '{"x":100,"y":99}',
+ name STRING DEFAULT 'unknown',
+ age INT DEFAULT 25)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@ice_parq
+POSTHOOK: Output: default@ice_parq
+PREHOOK: query: INSERT INTO ice_parq (id) VALUES (2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ice_parq
+POSTHOOK: query: INSERT INTO ice_parq (id) VALUES (2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ice_parq
+PREHOOK: query: SELECT * FROM ice_parq ORDER BY id
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ice_parq
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT * FROM ice_parq ORDER BY id
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ice_parq
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1 NULL unknown 25
+2 {"x":100,"y":99} unknown 25
+PREHOOK: query: DESCRIBE FORMATTED ice_parq
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@ice_parq
+POSTHOOK: query: DESCRIBE FORMATTED ice_parq
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@ice_parq
+# col_name data_type comment
+id int
+point struct<x:int,y:int>
+name string
+age int
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Retention: 0
+#### A masked pattern was here ####
+Table Type: EXTERNAL_TABLE
+Table Parameters:
+ COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"}
+ EXTERNAL TRUE
+ bucketing_version 2
+ current-schema
{\"type\":\"struct\",\"schema-id\":1,\"fields\":[{\"id\":1,\"name\":\"id\",\"required\":false,\"type\":\"int\"},{\"id\":2,\"name\":\"point\",\"required\":false,\"type\":{\"type\":\"struct\",\"fields\":[{\"id\":3,\"name\":\"x\",\"required\":false,\"type\":\"int\",\"write-default\":100},{\"id\":4,\"name\":\"y\",\"required\":false,\"type\":\"int\",\"write-default\":99}]}},{\"id\":5,\"name\":\"name\",\"required\":false,\"type\":\"string\",\"initial-default\":\"unknown\",\"write-default\":\"unknown\"},{\"id\":6,\"name\":\"age\",\"required\":false,\"type\":\"int\",\"initial-default\":25,\"write-default\":25}]}
+ current-snapshot-id #Masked#
+ current-snapshot-summary
{\"added-data-files\":\"1\",\"added-records\":\"1\",\"added-files-size\":\"#Masked#\",\"changed-partition-count\":\"1\",\"total-records\":\"2\",\"total-files-size\":\"#Masked#\",\"total-data-files\":\"2\",\"total-delete-files\":\"0\",\"total-position-deletes\":\"0\",\"total-equality-deletes\":\"0\",\"iceberg-version\":\"Apache
Iceberg 1.10.1 (commit ccb8bc435062171e64bc8b7e5f56e6aed9c5b934)\"}
+ current-snapshot-timestamp-ms #Masked#
+ format-version 3
+ iceberg.orc.files.only false
+ metadata_location hdfs://### HDFS PATH ###
+ numFiles 2
+ numRows 2
+ parquet.compression zstd
+ previous_metadata_location hdfs://### HDFS PATH ###
+ rawDataSize 0
+ serialization.format 1
+ snapshot-count 2
+ storage_handler
org.apache.iceberg.mr.hive.HiveIcebergStorageHandler
+ table_type ICEBERG
+ totalSize #Masked#
+#### A masked pattern was here ####
+ uuid #Masked#
+ write.delete.mode merge-on-read
+ write.format.default parquet
+ write.merge.mode merge-on-read
+ write.metadata.delete-after-commit.enabled true
+ write.update.mode merge-on-read
+
+# Storage Information
+SerDe Library: org.apache.iceberg.mr.hive.HiveIcebergSerDe
+InputFormat: org.apache.iceberg.mr.hive.HiveIcebergInputFormat
+OutputFormat: org.apache.iceberg.mr.hive.HiveIcebergOutputFormat
+Compressed: No
+Sort Columns: []
+
+# Constraints
+
+# Default Constraints
+Table: default.ice_parq
+Constraint Name: #### A masked pattern was here ####
+Column Name:age Initial Default Value:25 Write Default Value:25
+
+Constraint Name: #### A masked pattern was here ####
+Column Name:point Initial Default Value: Write Default Value:'x:100,y:99'
+
+Constraint Name: #### A masked pattern was here ####
+Column Name:name Initial Default Value:'unknown' Write Default
Value:'unknown'
+
+PREHOOK: query: CREATE TABLE ice_orc (
+ id INT)
+STORED BY ICEBERG stored as orc
+TBLPROPERTIES ('format-version'='3')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ice_orc
+POSTHOOK: query: CREATE TABLE ice_orc (
+ id INT)
+STORED BY ICEBERG stored as orc
+TBLPROPERTIES ('format-version'='3')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@ice_orc
+PREHOOK: query: INSERT INTO ice_orc (id) VALUES (1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ice_orc
+POSTHOOK: query: INSERT INTO ice_orc (id) VALUES (1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ice_orc
+PREHOOK: query: ALTER TABLE ice_orc ADD COLUMNS (point STRUCT<x:INT, y:INT>
DEFAULT '{"x":100,"y":99}',
+ name STRING DEFAULT 'unknown',
+ age INT DEFAULT 25)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: default@ice_orc
+PREHOOK: Output: default@ice_orc
+POSTHOOK: query: ALTER TABLE ice_orc ADD COLUMNS (point STRUCT<x:INT, y:INT>
DEFAULT '{"x":100,"y":99}',
+ name STRING DEFAULT 'unknown',
+ age INT DEFAULT 25)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@ice_orc
+POSTHOOK: Output: default@ice_orc
+PREHOOK: query: INSERT INTO ice_orc (id) VALUES (2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ice_orc
+POSTHOOK: query: INSERT INTO ice_orc (id) VALUES (2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ice_orc
+PREHOOK: query: SELECT * FROM ice_orc ORDER BY id
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ice_orc
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT * FROM ice_orc ORDER BY id
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ice_orc
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1 NULL NULL NULL
+2 {"x":100,"y":99} unknown 25
+PREHOOK: query: DESCRIBE FORMATTED ice_orc
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@ice_orc
+POSTHOOK: query: DESCRIBE FORMATTED ice_orc
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@ice_orc
+# col_name data_type comment
+id int
+point struct<x:int,y:int>
+name string
+age int
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Retention: 0
+#### A masked pattern was here ####
+Table Type: EXTERNAL_TABLE
+Table Parameters:
+ COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"}
+ EXTERNAL TRUE
+ bucketing_version 2
+ current-schema
{\"type\":\"struct\",\"schema-id\":1,\"fields\":[{\"id\":1,\"name\":\"id\",\"required\":false,\"type\":\"int\"},{\"id\":2,\"name\":\"point\",\"required\":false,\"type\":{\"type\":\"struct\",\"fields\":[{\"id\":3,\"name\":\"x\",\"required\":false,\"type\":\"int\",\"write-default\":100},{\"id\":4,\"name\":\"y\",\"required\":false,\"type\":\"int\",\"write-default\":99}]}},{\"id\":5,\"name\":\"name\",\"required\":false,\"type\":\"string\",\"write-default\":\"unknown\"},{\"id\":6,\"name\":\"age\",\"required\":false,\"type\":\"int\",\"write-default\":25}]}
+ current-snapshot-id #Masked#
+ current-snapshot-summary
{\"added-data-files\":\"1\",\"added-records\":\"1\",\"added-files-size\":\"#Masked#\",\"changed-partition-count\":\"1\",\"total-records\":\"2\",\"total-files-size\":\"#Masked#\",\"total-data-files\":\"2\",\"total-delete-files\":\"0\",\"total-position-deletes\":\"0\",\"total-equality-deletes\":\"0\",\"iceberg-version\":\"Apache
Iceberg 1.10.1 (commit ccb8bc435062171e64bc8b7e5f56e6aed9c5b934)\"}
+ current-snapshot-timestamp-ms #Masked#
+ format-version 3
+ iceberg.orc.files.only true
+ metadata_location hdfs://### HDFS PATH ###
+ numFiles 2
+ numRows 2
+ parquet.compression zstd
+ previous_metadata_location hdfs://### HDFS PATH ###
+ rawDataSize 0
+ serialization.format 1
+ snapshot-count 2
+ storage_handler
org.apache.iceberg.mr.hive.HiveIcebergStorageHandler
+ table_type ICEBERG
+ totalSize #Masked#
+#### A masked pattern was here ####
+ uuid #Masked#
+ write.delete.mode merge-on-read
+ write.format.default orc
+ write.merge.mode merge-on-read
+ write.metadata.delete-after-commit.enabled true
+ write.update.mode merge-on-read
+
+# Storage Information
+SerDe Library: org.apache.iceberg.mr.hive.HiveIcebergSerDe
+InputFormat: org.apache.iceberg.mr.hive.HiveIcebergInputFormat
+OutputFormat: org.apache.iceberg.mr.hive.HiveIcebergOutputFormat
+Compressed: No
+Sort Columns: []
+
+# Constraints
+
+# Default Constraints
+Table: default.ice_orc
+Constraint Name: #### A masked pattern was here ####
+Column Name:point Initial Default Value: Write Default Value:'x:100,y:99'
Review Comment:
Addressed this in commit -
[69ba9ee](https://github.com/apache/hive/pull/6367/commits/69ba9eefb6b6cf5b76f4c1f9ed1576b8cc2f4030)
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]