This is an automated email from the ASF dual-hosted git repository.
zabetak pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 2b4a07eeea5 HIVE-29012: NPE in ExplainTask when protologging posthook
is enabled (#5872)
2b4a07eeea5 is described below
commit 2b4a07eeea52e192220c6a79b0308cd0720ad3be
Author: Raghav Aggarwal <[email protected]>
AuthorDate: Mon Jul 14 01:39:44 2025 +0530
HIVE-29012: NPE in ExplainTask when protologging posthook is enabled (#5872)
---
.../ql/hooks/HiveHookEventProtoPartialBuilder.java | 3 +-
.../TestHiveHookEventProtoPartialBuilder.java | 53 +++++++++++++++++++---
2 files changed, 49 insertions(+), 7 deletions(-)
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveHookEventProtoPartialBuilder.java
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveHookEventProtoPartialBuilder.java
index 9b9b4e108cb..d55d71c4fa7 100644
---
a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveHookEventProtoPartialBuilder.java
+++
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveHookEventProtoPartialBuilder.java
@@ -24,6 +24,7 @@
import javax.annotation.Nullable;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.ExplainTask;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.hooks.HiveProtoLoggingHook.OtherInfoType;
@@ -80,7 +81,7 @@ private void addQueryObj(ExplainWork explainWork) {
}
private JSONObject getExplainJSON(ExplainWork explainWork) throws Exception {
- ExplainTask explain = (ExplainTask) TaskFactory.get(explainWork, null);
+ ExplainTask explain = (ExplainTask) TaskFactory.get(explainWork, new
HiveConf());
return explain.getJSONPlan(null, explainWork, stageIdRearrange);
}
}
diff --git
a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHiveHookEventProtoPartialBuilder.java
b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHiveHookEventProtoPartialBuilder.java
index c6ab40796b7..5ada5bb4516 100644
---
a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHiveHookEventProtoPartialBuilder.java
+++
b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHiveHookEventProtoPartialBuilder.java
@@ -21,13 +21,24 @@
package org.apache.hadoop.hive.ql.hooks;
import static java.util.Collections.singletonList;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
-
+import java.util.Set;
+import java.util.stream.Collectors;
+import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.CompilationOpContext;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.hooks.proto.HiveHookEvents;
+import org.apache.hadoop.hive.ql.parse.ExplainConfiguration;
+import org.apache.hadoop.hive.ql.plan.ExplainWork;
+import org.apache.hadoop.hive.ql.plan.FetchWork;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.json.JSONObject;
import org.junit.Test;
@@ -45,10 +56,21 @@ public void testEquality() {
json.put("key2", "value2");
json.put("key3", "value3");
HiveHookEvents.HiveHookEventProto event1 = buildWithOtherInfo(json);
- HiveHookEvents.HiveHookEventProto event2 = buildIn2Steps(json);
+ HiveHookEvents.HiveHookEventProto event2 = buildIn2Steps(json, null);
assertArrayEquals(event1.toByteArray(), event2.toByteArray());
}
+ @Test
+ public void testOtherInfoQueryPlan() {
+ HiveHookEvents.HiveHookEventProto event = buildIn2Steps(new JSONObject(),
createExplainWork());
+ Set<String> expectedOutput = Set.of("CONF", "QUERY");
+ Set<String> actualOutput =
+ event.getOtherInfoList().stream()
+ .map(HiveHookEvents.MapFieldEntry::getKey)
+ .collect(Collectors.toSet());
+ assertEquals(expectedOutput, actualOutput);
+ }
+
private HiveHookEvents.HiveHookEventProto buildWithOtherInfo(JSONObject
json) {
return HiveHookEvents.HiveHookEventProto
.newBuilder()
@@ -65,7 +87,7 @@ private HiveHookEvents.HiveHookEventProto
buildWithOtherInfo(JSONObject json) {
.build();
}
- private HiveHookEvents.HiveHookEventProto buildIn2Steps(JSONObject json) {
+ private HiveHookEvents.HiveHookEventProto buildIn2Steps(JSONObject json,
ExplainWork work) {
HiveHookEvents.HiveHookEventProto.Builder builder =
HiveHookEvents.HiveHookEventProto
.newBuilder()
.setEventType(HiveProtoLoggingHook.EventType.QUERY_SUBMITTED.name())
@@ -77,6 +99,25 @@ private HiveHookEvents.HiveHookEventProto
buildIn2Steps(JSONObject json) {
.setExecutionMode(TEZ);
Map<HiveProtoLoggingHook.OtherInfoType, JSONObject> otherInfo = new
HashMap<>();
otherInfo.put(HiveProtoLoggingHook.OtherInfoType.CONF, json);
- return new HiveHookEventProtoPartialBuilder(builder, null, otherInfo,
null, null).build();
+ return new HiveHookEventProtoPartialBuilder(
+ builder, work, otherInfo, null,
HiveConf.ConfVars.HIVE_STAGE_ID_REARRANGE.defaultStrVal)
+ .build();
+ }
+
+ private static ExplainWork createExplainWork() {
+ CompilationOpContext cCtx = new CompilationOpContext();
+ TableScanOperator scanOp = new TableScanOperator(cCtx);
+
+ FetchWork taskWork = new FetchWork(new Path("mock"), new TableDesc());
+ taskWork.setSource(scanOp);
+
+ FetchTask task = new FetchTask();
+ task.setWork(taskWork);
+
+ ExplainWork work = new ExplainWork();
+ work.setConfig(new ExplainConfiguration());
+ work.setRootTasks(List.of(task));
+
+ return work;
}
-}
\ No newline at end of file
+}