yihua commented on a change in pull request #3741:
URL: https://github.com/apache/hudi/pull/3741#discussion_r728307580



##########
File path: 
hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/compact/HoodieCompactor.java
##########
@@ -18,39 +18,277 @@
 
 package org.apache.hudi.table.action.compact;
 
+import org.apache.hudi.avro.model.HoodieCompactionOperation;
 import org.apache.hudi.avro.model.HoodieCompactionPlan;
+import org.apache.hudi.client.AbstractHoodieWriteClient;
+import org.apache.hudi.client.WriteStatus;
+import org.apache.hudi.common.data.HoodieAccumulator;
+import org.apache.hudi.common.data.HoodieData;
 import org.apache.hudi.common.engine.HoodieEngineContext;
+import org.apache.hudi.common.engine.TaskContextSupplier;
+import org.apache.hudi.common.fs.FSUtils;
+import org.apache.hudi.common.model.CompactionOperation;
+import org.apache.hudi.common.model.HoodieBaseFile;
 import org.apache.hudi.common.model.HoodieFileGroupId;
+import org.apache.hudi.common.model.HoodieLogFile;
 import org.apache.hudi.common.model.HoodieRecordPayload;
+import org.apache.hudi.common.model.HoodieTableType;
+import org.apache.hudi.common.model.HoodieWriteStat.RuntimeStats;
+import org.apache.hudi.common.table.HoodieTableMetaClient;
+import org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner;
+import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
+import org.apache.hudi.common.table.timeline.HoodieInstant;
+import org.apache.hudi.common.table.timeline.HoodieTimeline;
+import org.apache.hudi.common.table.view.TableFileSystemView.SliceView;
+import org.apache.hudi.common.util.CollectionUtils;
+import org.apache.hudi.common.util.CompactionUtils;
+import org.apache.hudi.common.util.Option;
+import org.apache.hudi.common.util.ValidationUtils;
+import org.apache.hudi.common.util.collection.Pair;
 import org.apache.hudi.config.HoodieWriteConfig;
+import org.apache.hudi.io.IOUtils;
+import org.apache.hudi.table.HoodieCopyOnWriteTableOperation;
 import org.apache.hudi.table.HoodieTable;
+import org.apache.hudi.table.action.compact.strategy.CompactionStrategy;
+
+import org.apache.avro.Schema;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
 
 import java.io.IOException;
 import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
 import java.util.Set;
+import java.util.stream.StreamSupport;
+
+import static java.util.stream.Collectors.toList;
 
 /**
  * A HoodieCompactor runs compaction on a hoodie table.
  */
-public interface HoodieCompactor<T extends HoodieRecordPayload, I, K, O> 
extends Serializable {
+public abstract class HoodieCompactor<T extends HoodieRecordPayload, I, K, O> 
implements Serializable {
+
+  private static final Logger LOG = 
LogManager.getLogger(HoodieCompactor.class);
 
   /**
-   * Generate a new compaction plan for scheduling.
+   * @param config Write config.
+   * @return the reader schema for {@link HoodieMergedLogRecordScanner}.
+   */
+  public abstract Schema getReaderSchema(HoodieWriteConfig config);
+
+  /**
+   * Updates the reader schema for actual compaction operations.
    *
-   * @param context HoodieEngineContext
-   * @param hoodieTable Hoodie Table
-   * @param config Hoodie Write Configuration
-   * @param compactionCommitTime scheduled compaction commit time
-   * @param fgIdsInPendingCompactions partition-fileId pairs for which 
compaction is pending
-   * @return Compaction Plan
-   * @throws IOException when encountering errors
+   * @param config     Write config.
+   * @param metaClient {@link HoodieTableMetaClient} instance to use.
    */
-  HoodieCompactionPlan generateCompactionPlan(HoodieEngineContext context, 
HoodieTable<T, I, K, O> hoodieTable, HoodieWriteConfig config,
-                                              String compactionCommitTime, 
Set<HoodieFileGroupId> fgIdsInPendingCompactions) throws IOException;
+  public abstract void updateReaderSchema(HoodieWriteConfig config, 
HoodieTableMetaClient metaClient);
+
+  /**
+   * Handles the compaction timeline based on the compaction instant.
+   *
+   * @param table                     {@link HoodieTable} instance to use.
+   * @param pendingCompactionTimeline pending compaction timeline.
+   * @param compactionInstantTime     compaction instant
+   * @param writeClient               Write client.
+   */
+  public abstract void handleCompactionTimeline(
+      HoodieTable table, HoodieTimeline pendingCompactionTimeline,

Review comment:
       Fixed.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@hudi.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to