This is an automated email from the ASF dual-hosted git repository.

pvary pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 6c0b86e  HIVE-26064: For Iceberg external table do not set 
external.table.purge=true by default (Peter Vary reviewed by Marton Bod) (#3132)
6c0b86e is described below

commit 6c0b86ef0cfc67c5acb3468408e1d46fa6ef8024
Author: pvary <pv...@cloudera.com>
AuthorDate: Wed Mar 30 14:05:55 2022 +0200

    HIVE-26064: For Iceberg external table do not set external.table.purge=true 
by default (Peter Vary reviewed by Marton Bod) (#3132)
---
 .../iceberg/mr/hive/HiveIcebergMetaHook.java       |  3 +-
 .../hive/TestHiveIcebergStorageHandlerNoScan.java  | 79 +++++++++++++++++++--
 .../mr/hive/TestHiveIcebergTruncateTable.java      | 22 ++----
 .../truncate_iceberg_table_external_purge_false.q  |  8 ---
 .../test/queries/positive/truncate_iceberg_table.q |  9 +++
 ...uncate_iceberg_table_external_purge_false.q.out | 29 --------
 .../alter_multi_part_table_to_iceberg.q.out        |  3 -
 .../positive/alter_part_table_to_iceberg.q.out     |  3 -
 .../results/positive/alter_table_to_iceberg.q.out  |  3 -
 .../results/positive/create_iceberg_table.q.out    |  1 -
 ...create_iceberg_table_stored_as_fileformat.q.out |  5 --
 .../create_iceberg_table_stored_by_iceberg.q.out   |  1 -
 ...le_stored_by_iceberg_with_serdeproperties.q.out |  1 -
 .../results/positive/describe_iceberg_table.q.out  |  4 --
 .../positive/show_create_iceberg_table.q.out       |  4 --
 .../results/positive/truncate_iceberg_table.q.out  | 82 ++++++++++++++++++++++
 .../table/misc/truncate/TruncateTableAnalyzer.java | 18 +++--
 17 files changed, 180 insertions(+), 95 deletions(-)

diff --git 
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java
 
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java
index cb036dd..cb72480 100644
--- 
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java
+++ 
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java
@@ -96,8 +96,7 @@ import org.slf4j.LoggerFactory;
 public class HiveIcebergMetaHook implements HiveMetaHook {
   private static final Logger LOG = 
LoggerFactory.getLogger(HiveIcebergMetaHook.class);
   public static final Map<String, String> COMMON_HMS_PROPERTIES = 
ImmutableMap.of(
-      BaseMetastoreTableOperations.TABLE_TYPE_PROP, 
BaseMetastoreTableOperations.ICEBERG_TABLE_TYPE_VALUE.toUpperCase(),
-      InputFormatConfig.EXTERNAL_TABLE_PURGE, "TRUE"
+      BaseMetastoreTableOperations.TABLE_TYPE_PROP, 
BaseMetastoreTableOperations.ICEBERG_TABLE_TYPE_VALUE.toUpperCase()
   );
   private static final Set<String> PARAMETERS_TO_REMOVE = ImmutableSet
       .of(InputFormatConfig.TABLE_SCHEMA, Catalogs.LOCATION, Catalogs.NAME, 
InputFormatConfig.PARTITION_SPEC);
diff --git 
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java
 
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java
index 8c45921..cabea6d 100644
--- 
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java
+++ 
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java
@@ -34,6 +34,7 @@ import java.util.stream.Collectors;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.StatsSetupConst;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
@@ -408,6 +409,7 @@ public class TestHiveIcebergStorageHandlerNoScan {
         "'" + InputFormatConfig.PARTITION_SPEC + "'='" +
         PartitionSpecParser.toJson(PartitionSpec.unpartitioned()) + "', " +
         "'dummy'='test', " +
+        "'" + InputFormatConfig.EXTERNAL_TABLE_PURGE + "'='TRUE', " +
         "'" + InputFormatConfig.CATALOG_NAME + "'='" + 
testTables.catalogName() + "')");
 
     // Check the Iceberg table data
@@ -465,7 +467,7 @@ public class TestHiveIcebergStorageHandlerNoScan {
         " last_name STRING COMMENT 'This is last name')" +
         " STORED BY ICEBERG " +
         testTables.locationForCreateTableSQL(identifier) +
-        testTables.propertiesForCreateTableSQL(ImmutableMap.of());
+        
testTables.propertiesForCreateTableSQL(ImmutableMap.of(InputFormatConfig.EXTERNAL_TABLE_PURGE,
 "TRUE"));
     shell.executeStatement(createSql);
 
     Table icebergTable = testTables.loadTable(identifier);
@@ -784,7 +786,8 @@ public class TestHiveIcebergStorageHandlerNoScan {
       shell.executeStatement("CREATE EXTERNAL TABLE not_supported_types 
(not_supported " + notSupportedType + ") " +
               "STORED BY ICEBERG " +
               testTables.locationForCreateTableSQL(identifier) +
-              testTables.propertiesForCreateTableSQL(ImmutableMap.of()));
+              testTables.propertiesForCreateTableSQL(
+                  ImmutableMap.of(InputFormatConfig.EXTERNAL_TABLE_PURGE, 
"TRUE")));
 
       org.apache.iceberg.Table icebergTable = testTables.loadTable(identifier);
       Assert.assertEquals(notSupportedTypes.get(notSupportedType), 
icebergTable.schema().columns().get(0).type());
@@ -936,9 +939,8 @@ public class TestHiveIcebergStorageHandlerNoScan {
     Assert.assertEquals(expectedIcebergProperties, icebergTable.properties());
 
     if (Catalogs.hiveCatalog(shell.getHiveConf(), tableProperties)) {
-      Assert.assertEquals(10, hmsParams.size());
+      Assert.assertEquals(9, hmsParams.size());
       Assert.assertEquals("initial_val", hmsParams.get("custom_property"));
-      Assert.assertEquals("TRUE", 
hmsParams.get(InputFormatConfig.EXTERNAL_TABLE_PURGE));
       Assert.assertEquals("TRUE", hmsParams.get("EXTERNAL"));
       Assert.assertEquals("true", 
hmsParams.get(TableProperties.ENGINE_HIVE_ENABLED));
       Assert.assertEquals(HiveIcebergStorageHandler.class.getName(),
@@ -951,7 +953,7 @@ public class TestHiveIcebergStorageHandlerNoScan {
       Assert.assertNotNull(hmsParams.get(hive_metastoreConstants.DDL_TIME));
       Assert.assertNotNull(hmsParams.get(serdeConstants.SERIALIZATION_FORMAT));
     } else {
-      Assert.assertEquals(7, hmsParams.size());
+      Assert.assertEquals(6, hmsParams.size());
       Assert.assertNull(hmsParams.get(TableProperties.ENGINE_HIVE_ENABLED));
     }
 
@@ -974,7 +976,7 @@ public class TestHiveIcebergStorageHandlerNoScan {
         .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
 
     if (Catalogs.hiveCatalog(shell.getHiveConf(), tableProperties)) {
-      Assert.assertEquals(13, hmsParams.size()); // 2 newly-added properties + 
previous_metadata_location prop
+      Assert.assertEquals(12, hmsParams.size()); // 2 newly-added properties + 
previous_metadata_location prop
       Assert.assertEquals("true", hmsParams.get("new_prop_1"));
       Assert.assertEquals("false", hmsParams.get("new_prop_2"));
       Assert.assertEquals("new_val", hmsParams.get("custom_property"));
@@ -984,7 +986,7 @@ public class TestHiveIcebergStorageHandlerNoScan {
       
Assert.assertEquals(hmsParams.get(BaseMetastoreTableOperations.PREVIOUS_METADATA_LOCATION_PROP),
 prevSnapshot);
       
Assert.assertEquals(hmsParams.get(BaseMetastoreTableOperations.METADATA_LOCATION_PROP),
 newSnapshot);
     } else {
-      Assert.assertEquals(7, hmsParams.size());
+      Assert.assertEquals(6, hmsParams.size());
     }
 
     // Remove some Iceberg props and see if they're removed from HMS table 
props as well
@@ -1057,6 +1059,69 @@ public class TestHiveIcebergStorageHandlerNoScan {
   }
 
   @Test
+  public void testDropTableWithPurgeFalse() throws IOException, TException, 
InterruptedException {
+    TableIdentifier identifier = TableIdentifier.of("default", "customers");
+
+    shell.executeStatement("CREATE EXTERNAL TABLE customers (t_int INT, 
t_string STRING) STORED BY ICEBERG " +
+        testTables.locationForCreateTableSQL(identifier) +
+        
testTables.propertiesForCreateTableSQL(ImmutableMap.of(InputFormatConfig.EXTERNAL_TABLE_PURGE,
 "FALSE")));
+
+    String purge = 
shell.metastore().getTable(identifier).getParameters().get(InputFormatConfig.EXTERNAL_TABLE_PURGE);
+    Assert.assertEquals("FALSE", purge);
+    org.apache.iceberg.Table icebergTable = testTables.loadTable(identifier);
+    Path tableLocation = new Path(icebergTable.location());
+    shell.executeStatement("DROP TABLE customers");
+
+    // Check if the files are kept
+    FileSystem fs = Util.getFs(tableLocation, shell.getHiveConf());
+    Assert.assertEquals(1, fs.listStatus(tableLocation).length);
+    Assert.assertTrue(fs.listStatus(new Path(tableLocation, 
"metadata")).length > 0);
+  }
+
+  @Test
+  public void testDropTableWithPurgeTrue() throws IOException, TException, 
InterruptedException {
+    TableIdentifier identifier = TableIdentifier.of("default", "customers");
+
+    shell.executeStatement("CREATE EXTERNAL TABLE customers (t_int INT, 
t_string STRING) STORED BY ICEBERG " +
+        testTables.locationForCreateTableSQL(identifier) +
+        
testTables.propertiesForCreateTableSQL(ImmutableMap.of(InputFormatConfig.EXTERNAL_TABLE_PURGE,
 "TRUE")));
+
+    String purge = 
shell.metastore().getTable(identifier).getParameters().get(InputFormatConfig.EXTERNAL_TABLE_PURGE);
+    Assert.assertEquals("TRUE", purge);
+    org.apache.iceberg.Table icebergTable = testTables.loadTable(identifier);
+    Path tableLocation = new Path(icebergTable.location());
+    shell.executeStatement("DROP TABLE customers");
+
+    // Check if the files are kept
+    FileSystem fs = Util.getFs(tableLocation, shell.getHiveConf());
+    Assert.assertFalse(fs.exists(tableLocation));
+  }
+
+  @Test
+  public void testDropTableWithoutPurge() throws IOException, TException, 
InterruptedException {
+    TableIdentifier identifier = TableIdentifier.of("default", "customers");
+
+    shell.executeStatement("CREATE EXTERNAL TABLE customers (t_int INT, 
t_string STRING) STORED BY ICEBERG " +
+        testTables.locationForCreateTableSQL(identifier) +
+        testTables.propertiesForCreateTableSQL(ImmutableMap.of()));
+
+    String purge = 
shell.metastore().getTable(identifier).getParameters().get(InputFormatConfig.EXTERNAL_TABLE_PURGE);
+    Assert.assertNull(purge);
+    org.apache.iceberg.Table icebergTable = testTables.loadTable(identifier);
+    Path tableLocation = new Path(icebergTable.location());
+    shell.executeStatement("DROP TABLE customers");
+
+    FileSystem fs = Util.getFs(tableLocation, shell.getHiveConf());
+    // This comes from the default Hive behavior based on 
hive.external.table.purge.default
+    if (HiveConf.getBoolVar(shell.getHiveConf(), 
HiveConf.ConfVars.HIVE_EXTERNALTABLE_PURGE_DEFAULT)) {
+      Assert.assertFalse(fs.exists(tableLocation));
+    } else {
+      Assert.assertEquals(1, fs.listStatus(tableLocation).length);
+      Assert.assertTrue(fs.listStatus(new Path(tableLocation, 
"metadata")).length > 0);
+    }
+  }
+
+  @Test
   public void testDropHiveTableWithoutUnderlyingTable() throws IOException {
     Assume.assumeFalse("Not relevant for HiveCatalog",
             testTableType.equals(TestTables.TestTableType.HIVE_CATALOG));
diff --git 
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergTruncateTable.java
 
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergTruncateTable.java
index 0526e64..e63ab78 100644
--- 
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergTruncateTable.java
+++ 
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergTruncateTable.java
@@ -103,27 +103,15 @@ public class TestHiveIcebergTruncateTable extends 
HiveIcebergStorageHandlerWithE
 
   @Test
   public void testTruncateTableExternalPurgeFalse() throws IOException, 
TException, InterruptedException {
-    // Create an Iceberg table with some records in it and set the 
'external.table.purge' table property
-    // to false and try to run a truncate table command on it. The command 
should fail with a SemanticException.
-    // Then check if the data is not deleted from the table and also the 
statistics are not changed.
+    // Create an Iceberg table with some records and set the 
'external.table.purge' table parameter to false.
+    // Then execute a truncate table command which should run without any 
error, even without force.
+    // Then check if the data is deleted from the table and the statistics are 
reset.
     String databaseName = "default";
     String tableName = "customers";
-    TableIdentifier identifier = TableIdentifier.of(databaseName, tableName);
     Table icebergTable = testTables.createTable(shell, tableName, 
HiveIcebergStorageHandlerTestUtils.CUSTOMER_SCHEMA,
         fileFormat, HiveIcebergStorageHandlerTestUtils.CUSTOMER_RECORDS);
-    shell.executeStatement("ALTER TABLE " + identifier + " SET 
TBLPROPERTIES('external.table.purge'='false')");
-    shell.executeStatement("ANALYZE TABLE " + identifier + " COMPUTE 
STATISTICS");
-
-    AssertHelpers.assertThrows("should throw exception", 
IllegalArgumentException.class,
-        "Cannot truncate non-managed table", () -> {
-          shell.executeStatement("TRUNCATE " + identifier);
-        });
-
-    List<Object[]> rows = shell.executeStatement("SELECT * FROM " + 
identifier);
-    
HiveIcebergTestUtils.validateData(HiveIcebergStorageHandlerTestUtils.CUSTOMER_RECORDS,
-        
HiveIcebergTestUtils.valueForRow(HiveIcebergStorageHandlerTestUtils.CUSTOMER_SCHEMA,
 rows), 0);
-    icebergTable = testTables.loadTable(TableIdentifier.of(databaseName, 
tableName));
-    validateBasicStats(icebergTable, databaseName, tableName);
+    testTruncateTable(databaseName, tableName, icebergTable, 
HiveIcebergStorageHandlerTestUtils.CUSTOMER_RECORDS,
+        HiveIcebergStorageHandlerTestUtils.CUSTOMER_SCHEMA, false, false);
   }
 
   @Test
diff --git 
a/iceberg/iceberg-handler/src/test/queries/negative/truncate_iceberg_table_external_purge_false.q
 
b/iceberg/iceberg-handler/src/test/queries/negative/truncate_iceberg_table_external_purge_false.q
deleted file mode 100644
index 72e93f0..0000000
--- 
a/iceberg/iceberg-handler/src/test/queries/negative/truncate_iceberg_table_external_purge_false.q
+++ /dev/null
@@ -1,8 +0,0 @@
-set hive.vectorized.execution.enabled=false;
-
-drop table if exists test_truncate_neg1;
-create external table test_truncate_neg1 (id int, value string) stored by 
iceberg stored as orc;
-alter table test_truncate_neg1 set 
tblproperties('external.table.purge'='false');
-insert into test_truncate_neg1 values (1, 
'one'),(2,'two'),(3,'three'),(4,'four'),(5,'five'); 
-
-truncate test_truncate_neg1;
\ No newline at end of file
diff --git 
a/iceberg/iceberg-handler/src/test/queries/positive/truncate_iceberg_table.q 
b/iceberg/iceberg-handler/src/test/queries/positive/truncate_iceberg_table.q
index a6a0e4e..a52e791 100644
--- a/iceberg/iceberg-handler/src/test/queries/positive/truncate_iceberg_table.q
+++ b/iceberg/iceberg-handler/src/test/queries/positive/truncate_iceberg_table.q
@@ -31,4 +31,13 @@ select count(*) from test_truncate;
 select * from test_truncate;
 describe formatted test_truncate;
 
+insert into test_truncate values (1, 
'one'),(2,'two'),(3,'three'),(4,'four'),(5,'five');
+alter table test_truncate set tblproperties('external.table.purge'='false');
+
+truncate test_truncate;
+
+select count(*) from test_truncate;
+select * from test_truncate;
+describe formatted test_truncate;
+
 drop table if exists test_truncate;
\ No newline at end of file
diff --git 
a/iceberg/iceberg-handler/src/test/results/negative/truncate_iceberg_table_external_purge_false.q.out
 
b/iceberg/iceberg-handler/src/test/results/negative/truncate_iceberg_table_external_purge_false.q.out
deleted file mode 100644
index a25dc6b..0000000
--- 
a/iceberg/iceberg-handler/src/test/results/negative/truncate_iceberg_table_external_purge_false.q.out
+++ /dev/null
@@ -1,29 +0,0 @@
-PREHOOK: query: drop table if exists test_truncate_neg1
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table if exists test_truncate_neg1
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create external table test_truncate_neg1 (id int, value 
string) stored by iceberg stored as orc
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@test_truncate_neg1
-POSTHOOK: query: create external table test_truncate_neg1 (id int, value 
string) stored by iceberg stored as orc
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@test_truncate_neg1
-PREHOOK: query: alter table test_truncate_neg1 set 
tblproperties('external.table.purge'='false')
-PREHOOK: type: ALTERTABLE_PROPERTIES
-PREHOOK: Input: default@test_truncate_neg1
-PREHOOK: Output: default@test_truncate_neg1
-POSTHOOK: query: alter table test_truncate_neg1 set 
tblproperties('external.table.purge'='false')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
-POSTHOOK: Input: default@test_truncate_neg1
-POSTHOOK: Output: default@test_truncate_neg1
-PREHOOK: query: insert into test_truncate_neg1 values (1, 
'one'),(2,'two'),(3,'three'),(4,'four'),(5,'five')
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@test_truncate_neg1
-POSTHOOK: query: insert into test_truncate_neg1 values (1, 
'one'),(2,'two'),(3,'three'),(4,'four'),(5,'five')
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@test_truncate_neg1
-FAILED: SemanticException [Error 10146]: Cannot truncate non-managed table 
test_truncate_neg1.
diff --git 
a/iceberg/iceberg-handler/src/test/results/positive/alter_multi_part_table_to_iceberg.q.out
 
b/iceberg/iceberg-handler/src/test/results/positive/alter_multi_part_table_to_iceberg.q.out
index d6004e4..b5c290e 100644
--- 
a/iceberg/iceberg-handler/src/test/results/positive/alter_multi_part_table_to_iceberg.q.out
+++ 
b/iceberg/iceberg-handler/src/test/results/positive/alter_multi_part_table_to_iceberg.q.out
@@ -186,7 +186,6 @@ Table Parameters:
        MIGRATED_TO_ICEBERG     true                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
 #### A masked pattern was here ####
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                7                   
@@ -435,7 +434,6 @@ Table Parameters:
        MIGRATED_TO_ICEBERG     true                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
 #### A masked pattern was here ####
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                7                   
@@ -684,7 +682,6 @@ Table Parameters:
        MIGRATED_TO_ICEBERG     true                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
 #### A masked pattern was here ####
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                7                   
diff --git 
a/iceberg/iceberg-handler/src/test/results/positive/alter_part_table_to_iceberg.q.out
 
b/iceberg/iceberg-handler/src/test/results/positive/alter_part_table_to_iceberg.q.out
index 27e2585..0205daa 100644
--- 
a/iceberg/iceberg-handler/src/test/results/positive/alter_part_table_to_iceberg.q.out
+++ 
b/iceberg/iceberg-handler/src/test/results/positive/alter_part_table_to_iceberg.q.out
@@ -144,7 +144,6 @@ Table Parameters:
        MIGRATED_TO_ICEBERG     true                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
 #### A masked pattern was here ####
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                4                   
@@ -342,7 +341,6 @@ Table Parameters:
        MIGRATED_TO_ICEBERG     true                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
 #### A masked pattern was here ####
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                4                   
@@ -540,7 +538,6 @@ Table Parameters:
        MIGRATED_TO_ICEBERG     true                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
 #### A masked pattern was here ####
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                4                   
diff --git 
a/iceberg/iceberg-handler/src/test/results/positive/alter_table_to_iceberg.q.out
 
b/iceberg/iceberg-handler/src/test/results/positive/alter_table_to_iceberg.q.out
index 7748049..4ee448b 100644
--- 
a/iceberg/iceberg-handler/src/test/results/positive/alter_table_to_iceberg.q.out
+++ 
b/iceberg/iceberg-handler/src/test/results/positive/alter_table_to_iceberg.q.out
@@ -99,7 +99,6 @@ Table Parameters:
        MIGRATED_TO_ICEBERG     true                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
 #### A masked pattern was here ####
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                1                   
@@ -249,7 +248,6 @@ Table Parameters:
        MIGRATED_TO_ICEBERG     true                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
 #### A masked pattern was here ####
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                1                   
@@ -399,7 +397,6 @@ Table Parameters:
        MIGRATED_TO_ICEBERG     true                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
 #### A masked pattern was here ####
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                1                   
diff --git 
a/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table.q.out 
b/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table.q.out
index 3ef381f..722f7d5 100644
--- 
a/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table.q.out
+++ 
b/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table.q.out
@@ -29,7 +29,6 @@ Table Parameters:
        EXTERNAL                TRUE                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                0                   
        numRows                 0                   
diff --git 
a/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table_stored_as_fileformat.q.out
 
b/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table_stored_as_fileformat.q.out
index 8dd4842..4530cd5 100644
--- 
a/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table_stored_as_fileformat.q.out
+++ 
b/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table_stored_as_fileformat.q.out
@@ -33,7 +33,6 @@ Table Parameters:
        EXTERNAL                TRUE                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                0                   
        numRows                 0                   
@@ -96,7 +95,6 @@ Table Parameters:
        EXTERNAL                TRUE                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                0                   
        numRows                 0                   
@@ -159,7 +157,6 @@ Table Parameters:
        EXTERNAL                TRUE                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                0                   
        numRows                 0                   
@@ -222,7 +219,6 @@ Table Parameters:
        EXTERNAL                TRUE                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                0                   
        numRows                 0                   
@@ -282,7 +278,6 @@ Table Parameters:
        bucketing_version       2                   
        dummy                   dummy_value         
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                0                   
        numRows                 0                   
diff --git 
a/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table_stored_by_iceberg.q.out
 
b/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table_stored_by_iceberg.q.out
index 3ef381f..722f7d5 100644
--- 
a/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table_stored_by_iceberg.q.out
+++ 
b/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table_stored_by_iceberg.q.out
@@ -29,7 +29,6 @@ Table Parameters:
        EXTERNAL                TRUE                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                0                   
        numRows                 0                   
diff --git 
a/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table_stored_by_iceberg_with_serdeproperties.q.out
 
b/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table_stored_by_iceberg_with_serdeproperties.q.out
index a386b88..a993b15 100644
--- 
a/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table_stored_by_iceberg_with_serdeproperties.q.out
+++ 
b/iceberg/iceberg-handler/src/test/results/positive/create_iceberg_table_stored_by_iceberg_with_serdeproperties.q.out
@@ -29,7 +29,6 @@ Table Parameters:
        EXTERNAL                TRUE                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                0                   
        numRows                 0                   
diff --git 
a/iceberg/iceberg-handler/src/test/results/positive/describe_iceberg_table.q.out
 
b/iceberg/iceberg-handler/src/test/results/positive/describe_iceberg_table.q.out
index 3567116..fd363d4 100644
--- 
a/iceberg/iceberg-handler/src/test/results/positive/describe_iceberg_table.q.out
+++ 
b/iceberg/iceberg-handler/src/test/results/positive/describe_iceberg_table.q.out
@@ -69,7 +69,6 @@ Table Parameters:
        EXTERNAL                TRUE                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                0                   
        numRows                 0                   
@@ -124,7 +123,6 @@ Table Parameters:
        EXTERNAL                TRUE                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                0                   
        numRows                 0                   
@@ -180,7 +178,6 @@ Table Parameters:
        EXTERNAL                TRUE                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                0                   
        numRows                 0                   
@@ -224,7 +221,6 @@ Table Parameters:
        EXTERNAL                TRUE                
        bucketing_version       2                   
        engine.hive.enabled     true                
-       external.table.purge    TRUE                
        metadata_location       hdfs://### HDFS PATH ###
        numFiles                0                   
        numRows                 0                   
diff --git 
a/iceberg/iceberg-handler/src/test/results/positive/show_create_iceberg_table.q.out
 
b/iceberg/iceberg-handler/src/test/results/positive/show_create_iceberg_table.q.out
index a3640d7..55330cb 100644
--- 
a/iceberg/iceberg-handler/src/test/results/positive/show_create_iceberg_table.q.out
+++ 
b/iceberg/iceberg-handler/src/test/results/positive/show_create_iceberg_table.q.out
@@ -31,7 +31,6 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
   'engine.hive.enabled'='true', 
-  'external.table.purge'='TRUE', 
   'metadata_location'='hdfs://### HDFS PATH ###', 
   'serialization.format'='1', 
   'table_type'='ICEBERG', 
@@ -80,7 +79,6 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
   'engine.hive.enabled'='true', 
-  'external.table.purge'='TRUE', 
   'metadata_location'='hdfs://### HDFS PATH ###', 
   'serialization.format'='1', 
   'table_type'='ICEBERG', 
@@ -130,7 +128,6 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
   'engine.hive.enabled'='true', 
-  'external.table.purge'='TRUE', 
   'metadata_location'='hdfs://### HDFS PATH ###', 
   'serialization.format'='1', 
   'table_type'='ICEBERG', 
@@ -168,7 +165,6 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
   'engine.hive.enabled'='true', 
-  'external.table.purge'='TRUE', 
   'metadata_location'='hdfs://### HDFS PATH ###', 
   'serialization.format'='1', 
   'table_type'='ICEBERG', 
diff --git 
a/iceberg/iceberg-handler/src/test/results/positive/truncate_iceberg_table.q.out
 
b/iceberg/iceberg-handler/src/test/results/positive/truncate_iceberg_table.q.out
index 42925ee..ce85148 100644
--- 
a/iceberg/iceberg-handler/src/test/results/positive/truncate_iceberg_table.q.out
+++ 
b/iceberg/iceberg-handler/src/test/results/positive/truncate_iceberg_table.q.out
@@ -307,6 +307,88 @@ Compressed:                No
 Num Buckets:           0                        
 Bucket Columns:        []                       
 Sort Columns:          []                       
+PREHOOK: query: insert into test_truncate values (1, 
'one'),(2,'two'),(3,'three'),(4,'four'),(5,'five')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_truncate
+POSTHOOK: query: insert into test_truncate values (1, 
'one'),(2,'two'),(3,'three'),(4,'four'),(5,'five')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_truncate
+PREHOOK: query: alter table test_truncate set 
tblproperties('external.table.purge'='false')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: default@test_truncate
+PREHOOK: Output: default@test_truncate
+POSTHOOK: query: alter table test_truncate set 
tblproperties('external.table.purge'='false')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: default@test_truncate
+POSTHOOK: Output: default@test_truncate
+PREHOOK: query: truncate test_truncate
+PREHOOK: type: TRUNCATETABLE
+PREHOOK: Output: default@test_truncate
+POSTHOOK: query: truncate test_truncate
+POSTHOOK: type: TRUNCATETABLE
+POSTHOOK: Output: default@test_truncate
+PREHOOK: query: select count(*) from test_truncate
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_truncate
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select count(*) from test_truncate
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_truncate
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+0
+PREHOOK: query: select * from test_truncate
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_truncate
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from test_truncate
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_truncate
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+PREHOOK: query: describe formatted test_truncate
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@test_truncate
+POSTHOOK: query: describe formatted test_truncate
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@test_truncate
+# col_name             data_type               comment             
+id                     int                                         
+value                  string                                      
+                
+# Detailed Table Information            
+Database:              default                  
+#### A masked pattern was here ####
+Retention:             0                        
+#### A masked pattern was here ####
+Table Type:            EXTERNAL_TABLE           
+Table Parameters:               
+       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
+       EXTERNAL                TRUE                
+       bucketing_version       2                   
+       engine.hive.enabled     true                
+       external.table.purge    false               
+#### A masked pattern was here ####
+       metadata_location       hdfs://### HDFS PATH ###
+       numFiles                0                   
+       numRows                 0                   
+       previous_metadata_location      hdfs://### HDFS PATH ###
+       rawDataSize             0                   
+       serialization.format    1                   
+       storage_handler         
org.apache.iceberg.mr.hive.HiveIcebergStorageHandler
+       table_type              ICEBERG             
+       totalSize               #Masked#                   
+#### A masked pattern was here ####
+       write.format.default    orc                 
+                
+# Storage Information           
+SerDe Library:         org.apache.iceberg.mr.hive.HiveIcebergSerDe      
+InputFormat:           org.apache.iceberg.mr.hive.HiveIcebergInputFormat       
 
+OutputFormat:          org.apache.iceberg.mr.hive.HiveIcebergOutputFormat      
 
+Compressed:            No                       
+Num Buckets:           0                        
+Bucket Columns:        []                       
+Sort Columns:          []                       
 PREHOOK: query: drop table if exists test_truncate
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@test_truncate
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/truncate/TruncateTableAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/truncate/TruncateTableAnalyzer.java
index 47d39d4..40b00e0 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/truncate/TruncateTableAnalyzer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/truncate/TruncateTableAnalyzer.java
@@ -95,6 +95,17 @@ public class TruncateTableAnalyzer extends 
AbstractBaseAlterTableAnalyzer {
 
   private void checkTruncateEligibility(ASTNode ast, ASTNode root, String 
tableName, Table table)
       throws SemanticException {
+    validateUnsupportedPartitionClause(table, root.getChildCount() > 1);
+
+    if (table.isNonNative()) {
+      if (table.getStorageHandler() == null || 
!table.getStorageHandler().supportsTruncateOnNonNativeTables()) {
+        throw new 
SemanticException(ErrorMsg.TRUNCATE_FOR_NON_NATIVE_TABLE.format(tableName)); 
//TODO
+      } else {
+        // If the storage handler supports truncate, then we do not need to 
check anything else
+        return;
+      }
+    }
+
     boolean isForce = ast.getFirstChildWithType(HiveParser.TOK_FORCE) != null;
     if (!isForce &&
         table.getTableType() != TableType.MANAGED_TABLE &&
@@ -102,13 +113,6 @@ public class TruncateTableAnalyzer extends 
AbstractBaseAlterTableAnalyzer {
       throw new 
SemanticException(ErrorMsg.TRUNCATE_FOR_NON_MANAGED_TABLE.format(tableName));
     }
 
-    validateUnsupportedPartitionClause(table, root.getChildCount() > 1);
-
-    if (table.isNonNative()
-        && (table.getStorageHandler() == null || 
!table.getStorageHandler().supportsTruncateOnNonNativeTables())) {
-      throw new 
SemanticException(ErrorMsg.TRUNCATE_FOR_NON_NATIVE_TABLE.format(tableName)); 
//TODO
-    }
-
     if (!table.isPartitioned() && root.getChildCount() > 1) {
       throw new 
SemanticException(ErrorMsg.PARTSPEC_FOR_NON_PARTITIONED_TABLE.format(tableName));
     }

Reply via email to