nsivabalan commented on code in PR #17936:
URL: https://github.com/apache/hudi/pull/17936#discussion_r2842891494


##########
hudi-spark-datasource/hudi-spark3.4.x/src/main/scala/org/apache/spark/sql/hudi/analysis/Spark34HoodiePruneFileSourcePartitions.scala:
##########
@@ -15,30 +15,26 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.hudi.analysis
-
-import org.apache.hudi.{HoodieBaseRelation, HoodieFileIndex}
 import org.apache.hudi.SparkAdapterSupport.sparkAdapter
-
+import org.apache.hudi.{HoodieBaseRelation, HoodieFileIndex}
 import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.catalyst.catalog.CatalogStatistics
 import org.apache.spark.sql.catalyst.expressions.{And, AttributeReference, 
AttributeSet, Expression, ExpressionSet, NamedExpression, PredicateHelper, 
SubqueryExpression}
 import org.apache.spark.sql.catalyst.planning.PhysicalOperation
-import org.apache.spark.sql.catalyst.plans.logical.{Filter, LeafNode, 
LogicalPlan, Project}
 import 
org.apache.spark.sql.catalyst.plans.logical.statsEstimation.FilterEstimation
+import org.apache.spark.sql.catalyst.plans.logical.{Filter, LeafNode, 
LogicalPlan, Project}
 import org.apache.spark.sql.catalyst.rules.Rule
 import org.apache.spark.sql.execution.datasources.{HadoopFsRelation, 
LogicalRelation}
-import 
org.apache.spark.sql.hudi.analysis.Spark3HoodiePruneFileSourcePartitions.{exprUtils,
 getPartitionFiltersAndDataFilters, rebuildPhysicalOperation, 
HoodieRelationMatcher}
 import org.apache.spark.sql.sources.BaseRelation
 import org.apache.spark.sql.types.StructType
 
 /**
  * Prune the partitions of Hudi table based relations by the means of pushing 
down the
  * partition filters
  *
- * NOTE: [[HoodiePruneFileSourcePartitions]] is a replica in kind to Spark's 
[[PruneFileSourcePartitions]]
+ * NOTE: [[Spark34HoodiePruneFileSourcePartitions]] is a replica in kind to 
Spark's [[PruneFileSourcePartitions]]
  */
-case class Spark3HoodiePruneFileSourcePartitions(spark: SparkSession) extends 
Rule[LogicalPlan] {
+case class Spark34HoodiePruneFileSourcePartitions(spark: SparkSession) extends 
Rule[LogicalPlan] {

Review Comment:
   looks like there is no difference in 34 and 35. Can we use something like 
   
   `SparkGreaterThan34HoodiePruneFileSourcePartitions` 
   
   and avoid code duplication. 
   



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to