This is an automated email from the ASF dual-hosted git repository.

dengzh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new ba652d87562 HIVE-28848: Remove DFS_URI auth from ALTER_PARTITION if 
there is no change in partition location (#5766)
ba652d87562 is described below

commit ba652d875629c3d5216cfab238b34ef318a14efc
Author: rtrivedi12 <[email protected]>
AuthorDate: Thu Jul 10 18:35:37 2025 -0500

    HIVE-28848: Remove DFS_URI auth from ALTER_PARTITION if there is no change 
in partition location (#5766)
---
 .../metastore/events/AlterPartitionEvent.java      |  33 ++++++-
 .../metastore/TestHiveMetaStoreAuthorizer.java     | 108 ++++++++++++++++++++-
 .../apache/hadoop/hive/metastore/HMSHandler.java   |   3 +-
 3 files changed, 136 insertions(+), 8 deletions(-)

diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterPartitionEvent.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterPartitionEvent.java
index a2ffb0ba9af..561cf604e33 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterPartitionEvent.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterPartitionEvent.java
@@ -20,12 +20,15 @@
 package 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events;
 
 import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo;
 import org.slf4j.Logger;
@@ -76,10 +79,17 @@ private List<HivePrivilegeObject> getOutputHObjs() {
     ret.add(getHivePrivilegeObject(event.getTable()));
 
     Partition newPartition = event.getNewPartition();
-    String    newUri       = (newPartition != null) ? 
getSdLocation(newPartition.getSd()) : "";
+    String oldUri = "";
 
-    if (StringUtils.isNotEmpty(newUri)) {
-        ret.add(getHivePrivilegeObjectDfsUri(newUri));
+    if (event.getOldPartVals() != null && event.getOldPartVals().size() > 0) {
+      oldUri = this.getOldPartSdLocation(event);
+    }
+
+    String newUri = (newPartition != null) ? 
getSdLocation(newPartition.getSd()) : "";
+
+    // Skip DFS_URI auth if new partition loc is empty or old and new 
partition loc are same
+    if (StringUtils.isNotEmpty(newUri) && 
!StringUtils.equalsIgnoreCase(oldUri, newUri)) {
+      ret.add(getHivePrivilegeObjectDfsUri(newUri));
     }
 
     COMMAND_STR = buildCommandString(COMMAND_STR, event.getTableName(), 
newPartition);
@@ -89,6 +99,19 @@ private List<HivePrivilegeObject> getOutputHObjs() {
     return ret;
   }
 
+  private String getOldPartSdLocation(PreAlterPartitionEvent event) {
+    Table table = new Table(event.getTable());
+    try {
+      org.apache.hadoop.hive.ql.metadata.Partition partition = 
Hive.get().getPartition(table,
+          Warehouse.makeSpecFromValues(event.getTable().getPartitionKeys(), 
event.getOldPartVals()));
+
+      return partition.getLocation();
+
+    } catch (HiveException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
   private String buildCommandString(String cmdStr, String tbl, Partition 
partition ) {
     String ret = cmdStr;
 
@@ -100,4 +123,4 @@ private String buildCommandString(String cmdStr, String 
tbl, Partition partition
 
     return ret;
   }
-}
+}
\ No newline at end of file
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java
index 07e287baca0..52cc1d10726 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore;
 
+import com.google.common.collect.Lists;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.tuple.ImmutablePair;
 import org.apache.commons.lang3.tuple.Pair;
@@ -45,12 +46,13 @@
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext.TableFilterContext;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.thrift.TException;
+import org.junit.Before;
 import org.junit.FixMethodOrder;
+import org.junit.Test;
 import org.junit.runners.MethodSorters;
-import org.junit.Before;
 import org.mockito.ArgumentCaptor;
 import org.mockito.Mockito;
-import org.junit.Test;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -66,6 +68,7 @@
 import static org.junit.Assert.fail;
 import static org.mockito.ArgumentMatchers.any;
 import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.eq;
 import static org.mockito.Mockito.verify;
 
 /*
@@ -87,6 +90,7 @@ public class TestHiveMetaStoreAuthorizer {
   private static final String metaConfVal = "";
 
   private static final String TEST_DATA_DIR = new 
File("file:///testdata").getPath();
+  private static final List<String> PARTCOL_SCHEMA = 
Lists.newArrayList("yyyy", "mm", "dd");
   private RawStore rawStore;
   private Configuration conf;
   private HMSHandler hmsHandler;
@@ -696,6 +700,106 @@ public void testU_DropDataConnector_authorizedUser() {
     }
   }
 
+
+  /**
+   * Captures and returns the privilege objects for Alter Partition
+   */
+  private Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> 
getHivePrivilegeObjectsForAlterPartition()
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    @SuppressWarnings("unchecked")
+    Class<List<HivePrivilegeObject>> class_listPrivObjects = (Class) 
List.class;
+    ArgumentCaptor<List<HivePrivilegeObject>> inputsCapturer = ArgumentCaptor
+        .forClass(class_listPrivObjects);
+    ArgumentCaptor<List<HivePrivilegeObject>> outputsCapturer = ArgumentCaptor
+        .forClass(class_listPrivObjects);
+
+    
verify(mockHiveAuthorizer).checkPrivileges(eq(HiveOperationType.ALTERPARTITION_FILEFORMAT),
+        inputsCapturer.capture(), outputsCapturer.capture(),
+        any(HiveAuthzContext.class));
+
+    return new ImmutablePair<>(inputsCapturer.getValue(), 
outputsCapturer.getValue());
+  }
+  @Test
+  public void testV_AlterPartition_DFSUriPrivObject() {
+    
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser));
+    try {
+      List<List<String>> testValues = createTable4PartColsParts();
+      List<Partition> oldParts = hmsHandler.get_partitions(dbName, tblName, 
(short) -1);
+      Partition oldPart = oldParts.get(3);
+      Partition newPart = makeTestChangesOnPartition(oldPart);
+
+      hmsHandler.rename_partition(dbName, tblName,oldPart.getValues(),newPart);
+
+      Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = 
getHivePrivilegeObjectsForAlterPartition();
+      List<HivePrivilegeObject> outputs = io.getRight();
+
+      List<HivePrivilegeObject> tableOutputs = outputs.stream()
+          .filter(o -> o.getType() == 
HivePrivilegeObject.HivePrivilegeObjectType.DFS_URI)
+          .collect(Collectors.toList());
+
+      assertEquals("Should have one DFS_URI privilege object", 1, 
tableOutputs.size());
+      HivePrivilegeObject DFSUriObj = tableOutputs.get(0);
+
+      assertEquals("DFS_URI should be same as new partition location",
+          oldPart.getSd().getLocation()+ "/hh=01", DFSUriObj.getObjectName());
+    } catch (Exception e) {
+      fail("testV_AlterPartition_DFSUriPrivObject() failed with " + e);
+    }
+  }
+
+  protected Table createPartitionedTestTable(String dbName, String tableName,
+      List<String> partCols, boolean setPartitionLevelPrivilages)
+      throws Exception {
+
+    Database db = new DatabaseBuilder()
+        .setName(dbName)
+        .build(conf);
+    hmsHandler.create_database(db);
+
+    TableBuilder builder = new TableBuilder()
+        .setDbName(dbName)
+        .setTableName(tableName)
+        .addCol("id", "int")
+        .addCol("name", "string");
+
+    partCols.forEach(col -> builder.addPartCol(col, "string"));
+    Table table = builder.build(conf);
+
+    hmsHandler.create_table(table);
+    return table;
+  }
+
+  protected List<List<String>> createTable4PartColsParts() throws
+      Exception {
+    Table table = createPartitionedTestTable(dbName, tblName, PARTCOL_SCHEMA, 
false);
+    List<List<String>> testValues = Lists.newArrayList(
+        Lists.newArrayList("1999", "01", "02"),
+        Lists.newArrayList("2009", "02", "10"),
+        Lists.newArrayList("2017", "10", "26"),
+        Lists.newArrayList("2017", "11", "27"));
+
+    for (List<String> vals : testValues) {
+      addPartition(table, vals);
+    }
+
+    return testValues;
+  }
+
+  protected void addPartition(Table table, List<String> values)
+      throws TException {
+    PartitionBuilder partitionBuilder = new PartitionBuilder().inTable(table);
+    values.forEach(val -> partitionBuilder.addValue(val));
+    hmsHandler.add_partition(partitionBuilder.build(conf));
+  }
+
+  protected static Partition makeTestChangesOnPartition(Partition partition) {
+    Partition newPart = new Partition(partition);
+    newPart.getParameters().put("hmsTestParam001", "testValue001");
+    newPart.getSd().setLocation(partition.getSd().getLocation() + "/hh=01");
+    newPart.setValues(Lists.newArrayList("2018", "11", "27"));
+    return newPart;
+  }
+
   @Test
   public void testUnAuthorizedCause() {
     
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser));
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
index 75266aa6b1b..46d6470ed0e 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
@@ -6103,7 +6103,8 @@ private void 
alter_partitions_with_environment_context(String catName, String db
         if (tmpPart.getSd() != null && tmpPart.getSd().getCols() != null && 
tmpPart.getSd().getCols().isEmpty()) {
           tmpPart.getSd().setCols(table.getSd().getCols());
         }
-        firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, table, 
null, tmpPart, this));
+        // old part values are same as new partition values here
+        firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, table, 
tmpPart.getValues(), tmpPart, this));
       }
       oldParts = alterHandler.alterPartitions(getMS(), wh, catName, db_name, 
tbl_name, new_parts,
           environmentContext, writeIdList, writeId, this);

Reply via email to