This is an automated email from the ASF dual-hosted git repository.

tchoi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 6d6c86b4d8d HIVE-29501: [hiveACIDRepl] For incremental replication 
newly created table data count is not updated in metricCollector (#6355)
6d6c86b4d8d is described below

commit 6d6c86b4d8dad2266ea038674d06e6d09dd25c65
Author: Shivam Kumar <[email protected]>
AuthorDate: Sat Mar 14 13:19:13 2026 +0530

    HIVE-29501: [hiveACIDRepl] For incremental replication newly created table 
data count is not updated in metricCollector (#6355)
---
 ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java     | 4 ++++
 ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplStateLogWork.java | 3 +++
 .../hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java    | 1 +
 3 files changed, 8 insertions(+)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java
index d6bb2eb7949..3b741851945 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java
@@ -954,6 +954,7 @@ private Long incrementalDump(Path dumpRoot, DumpMetaData 
dmd, Path cmRoot, Hive
       replLogger.startLog();
       Map<String, Long> metricMap = new HashMap<>();
       metricMap.put(ReplUtils.MetricName.EVENTS.name(), estimatedNumEvents);
+      metricMap.put(ReplUtils.MetricName.TABLES.name(), 0L);
       int size = tablesForBootstrap.size();
       if (db != null && db.getParameters()!=null &&
         
Boolean.parseBoolean(db.getParameters().get(REPL_RESUME_STARTED_AFTER_FAILOVER)))
 {
@@ -1296,6 +1297,9 @@ private void dumpEvent(NotificationEvent ev, Path evRoot, 
Path dumpRoot, Path cm
     if (context.isDmdCreated()) {
       eventsDumpMetadata.incrementEventsDumpedCount();
       work.getMetricCollector().reportStageProgress(getName(), 
ReplUtils.MetricName.EVENTS.name(), 1);
+      if (eventHandler.dumpType() == DumpType.EVENT_CREATE_TABLE) {
+        work.getMetricCollector().reportStageProgress(getName(), 
ReplUtils.MetricName.TABLES.name(), 1);
+      }
     }
     work.getReplLogger().eventLog(String.valueOf(ev.getEventId()), 
eventHandler.dumpType().toString());
   }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplStateLogWork.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplStateLogWork.java
index 7c573c4b5e5..5a31f88a44c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplStateLogWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplStateLogWork.java
@@ -151,6 +151,9 @@ public void replStateLog() throws SemanticException {
     case EVENT:
       replLogger.eventLog(eventId, eventType);
       metricCollector.reportStageProgress("REPL_LOAD", 
ReplUtils.MetricName.EVENTS.name(), 1);
+      if ("EVENT_CREATE_TABLE".equals(eventType)) {
+        metricCollector.reportStageProgress("REPL_LOAD", 
ReplUtils.MetricName.TABLES.name(), 1);
+      }
       break;
     case END:
       replLogger.endLog(lastReplId);
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java
index 3d43ebab95a..9e370b49f90 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java
@@ -99,6 +99,7 @@ public IncrementalLoadTasksBuilder(String dbName, String 
loadPath, IncrementalLo
     this.metricCollector = metricCollector;
     Map<String, Long> metricMap = new HashMap<>();
     metricMap.put(ReplUtils.MetricName.EVENTS.name(), (long) 
iterator.getTotalEventsCount());
+    metricMap.put(ReplUtils.MetricName.TABLES.name(), 0L);
     this.shouldFailover = shouldFailover;
     if (shouldFailover) {
       Database db = null;

Reply via email to