This is an automated email from the ASF dual-hosted git repository.

liujiayi771 pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git


The following commit(s) were added to refs/heads/main by this push:
     new 67b7c3e790 [CORE] Consolidate duplicate logDebug statements in 
OffloadSingleNodeRules
67b7c3e790 is described below

commit 67b7c3e7906e7885b3d51f665b0875213f28a908
Author: Joey <[email protected]>
AuthorDate: Sat Jan 24 09:10:21 2026 +0800

    [CORE] Consolidate duplicate logDebug statements in OffloadSingleNodeRules
---
 .../columnar/offload/OffloadSingleNodeRules.scala  | 36 ++++++----------------
 1 file changed, 10 insertions(+), 26 deletions(-)

diff --git 
a/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/offload/OffloadSingleNodeRules.scala
 
b/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/offload/OffloadSingleNodeRules.scala
index df32527b78..e149ea8f5b 100644
--- 
a/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/offload/OffloadSingleNodeRules.scala
+++ 
b/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/offload/OffloadSingleNodeRules.scala
@@ -59,11 +59,10 @@ case class OffloadJoin() extends OffloadSingleNode with 
LogLevelUtil {
       logDebug(s"Columnar Processing for ${plan.getClass} is under row guard.")
       return plan
     }
-    plan match {
+    val result = plan match {
       case plan: ShuffledHashJoinExec =>
         val left = plan.left
         val right = plan.right
-        logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
         BackendsApiManager.getSparkPlanExecApiInstance
           .genShuffledHashJoinExecTransformer(
             plan.leftKeys,
@@ -77,7 +76,6 @@ case class OffloadJoin() extends OffloadSingleNode with 
LogLevelUtil {
       case plan: SortMergeJoinExec =>
         val left = plan.left
         val right = plan.right
-        logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
         BackendsApiManager.getSparkPlanExecApiInstance
           .genSortMergeJoinExecTransformer(
             plan.leftKeys,
@@ -117,6 +115,10 @@ case class OffloadJoin() extends OffloadSingleNode with 
LogLevelUtil {
             plan.condition)
       case other => other
     }
+    if (result.ne(plan)) {
+      logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
+    }
+    result
   }
 }
 
@@ -193,47 +195,35 @@ object OffloadOthers {
       if (FallbackTags.nonEmpty(plan)) {
         return plan
       }
-      plan match {
+      val result = plan match {
         case plan: BatchScanExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           ScanTransformerFactory.createBatchScanTransformer(plan)
         case plan: FileSourceScanExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           ScanTransformerFactory.createFileSourceScanTransformer(plan)
         case plan if HiveTableScanExecTransformer.isHiveTableScan(plan) =>
           // TODO: Add DynamicPartitionPruningHiveScanSuite.scala
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           HiveTableScanExecTransformer(plan)
         case plan: CoalesceExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           ColumnarCoalesceExec(plan.numPartitions, plan.child)
         case plan: FilterExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           BackendsApiManager.getSparkPlanExecApiInstance
             .genFilterExecTransformer(plan.condition, plan.child)
         case plan: ProjectExec =>
           val columnarChild = plan.child
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           ProjectExecTransformer(plan.projectList, columnarChild)
         case plan: HashAggregateExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           HashAggregateExecBaseTransformer.from(plan)
         case plan: SortAggregateExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           HashAggregateExecBaseTransformer.from(plan)
         case plan: ObjectHashAggregateExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           HashAggregateExecBaseTransformer.from(plan)
         case plan: UnionExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           ColumnarUnionExec.from(plan)
         case plan: ExpandExec =>
           val child = plan.child
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           ExpandExecTransformer(plan.projections, plan.output, child)
         case plan: WriteFilesExec =>
           val child = plan.child
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           val writeTransformer = WriteFilesExecTransformer(
             child,
             plan.fileFormat,
@@ -250,10 +240,8 @@ object OffloadOthers {
             plan.staticPartitions)
         case plan: SortExec =>
           val child = plan.child
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           SortExecTransformer(plan.sortOrder, plan.global, child, 
plan.testSpillFrequency)
         case plan: TakeOrderedAndProjectExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           val child = plan.child
           val (limit, offset) = 
SparkShimLoader.getSparkShims.getLimitAndOffsetFromTopK(plan)
           TakeOrderedAndProjectExecTransformer(
@@ -280,17 +268,14 @@ object OffloadOthers {
             windowGroupLimitExecShim.child
           )
         case plan: GlobalLimitExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           val child = plan.child
           val (limit, offset) =
             
SparkShimLoader.getSparkShims.getLimitAndOffsetFromGlobalLimit(plan)
           LimitExecTransformer(child, offset, limit)
         case plan: LocalLimitExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           val child = plan.child
           LimitExecTransformer(child, 0L, plan.limit)
         case plan: GenerateExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           val child = plan.child
           
BackendsApiManager.getSparkPlanExecApiInstance.genGenerateTransformer(
             plan.generator,
@@ -299,11 +284,9 @@ object OffloadOthers {
             plan.generatorOutput,
             child)
         case plan: BatchEvalPythonExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           val child = plan.child
           EvalPythonExecTransformer(plan.udfs, plan.resultAttrs, child)
         case plan: ArrowEvalPythonExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           val child = plan.child
           // For ArrowEvalPythonExec, CH supports it through 
EvalPythonExecTransformer while
           // Velox backend uses ColumnarArrowEvalPythonExec.
@@ -320,10 +303,8 @@ object OffloadOthers {
               plan.evalType)
           }
         case plan: RangeExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           ColumnarRangeBaseExec.from(plan)
         case plan: SampleExec =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           val child = plan.child
           
BackendsApiManager.getSparkPlanExecApiInstance.genSampleExecTransformer(
             plan.lowerBound,
@@ -332,13 +313,16 @@ object OffloadOthers {
             plan.seed,
             child)
         case plan: RDDScanExec if 
RDDScanTransformer.isSupportRDDScanExec(plan) =>
-          logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
           RDDScanTransformer.getRDDScanTransform(plan)
         case p if !p.isInstanceOf[GlutenPlan] =>
           logDebug(s"Transformation for ${p.getClass} is currently not 
supported.")
           p
         case other => other
       }
+      if (result.ne(plan)) {
+        logDebug(s"Columnar Processing for ${plan.getClass} is currently 
supported.")
+      }
+      result
     }
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to