rui-mo commented on code in PR #9107:
URL: https://github.com/apache/incubator-gluten/pull/9107#discussion_r2458622131


##########
gluten-substrait/src/main/scala/org/apache/gluten/expression/ExpressionConverter.scala:
##########
@@ -144,6 +144,66 @@ object ExpressionConverter extends SQLConfHelper with 
Logging {
     DecimalArithmeticExpressionTransformer(substraitName, leftChild, 
rightChild, resultType, b)
   }
 
+  private def replaceStaticInvokeWithExpressionTransformer(
+      i: StaticInvoke,
+      attributeSeq: Seq[Attribute],
+      expressionsMap: Map[Class[_], String]): ExpressionTransformer = {
+    def validateAndTransform(
+        exprName: String,
+        childTransformers: => Seq[ExpressionTransformer]): 
ExpressionTransformer = {
+      if (!BackendsApiManager.getValidatorApiInstance.doExprValidate(exprName, 
i)) {
+        throw new GlutenNotSupportException(
+          s"Not supported to map current ${i.getClass} call on function: 
${i.functionName}.")
+      }
+      GenericExpressionTransformer(exprName, childTransformers, i)
+    }
+
+    i.functionName match {
+      case "encode" | "decode" if i.objectName.endsWith("UrlCodec") =>
+        validateAndTransform(
+          "url_" + i.functionName,
+          Seq(replaceWithExpressionTransformer0(i.arguments.head, 
attributeSeq, expressionsMap))
+        )
+
+      case "isLuhnNumber" =>
+        validateAndTransform(
+          ExpressionNames.LUHN_CHECK,
+          Seq(replaceWithExpressionTransformer0(i.arguments.head, 
attributeSeq, expressionsMap))
+        )
+
+      case "encode" | "decode" if i.objectName.endsWith("Base64") =>
+        if 
(!BackendsApiManager.getValidatorApiInstance.doExprValidate(ExpressionNames.BASE64,
 i)) {
+          throw new GlutenNotSupportException(
+            s"Not supported to map current ${i.getClass} call on function: 
${i.functionName}.")
+        }
+        
BackendsApiManager.getSparkPlanExecApiInstance.genBase64StaticInvokeTransformer(
+          ExpressionNames.BASE64,
+          replaceWithExpressionTransformer0(i.arguments.head, attributeSeq, 
expressionsMap),
+          i
+        )
+
+      case fn
+          if i.objectName.endsWith("CharVarcharCodegenUtils") && Set(
+            "varcharTypeWriteSideCheck",
+            "charTypeWriteSideCheck",
+            "readSidePadding").contains(fn) =>
+        val exprName = fn match {
+          case "varcharTypeWriteSideCheck" => 
ExpressionNames.VARCHAR_TYPE_WRITE_SIDE_CHECK
+          case "charTypeWriteSideCheck" => 
ExpressionNames.CHAR_TYPE_WRITE_SIDE_CHECK
+          case "readSidePadding" => ExpressionNames.READ_SIDE_PADDING

Review Comment:
   Typically, in Gluten’s ExpressionNames.scala, the Spark function names are 
defined, while in the C++ map, the corresponding function names for a specific 
backend are provided. For instance, the Spark function add might be represented 
as plus in certain backends. To maintain consistency, Gluten’s Scala side keeps 
Spark’s original naming (e.g., add), and it is the responsibility of each 
backend to map it to the appropriate equivalent name. This is just a minor 
suggestion—please feel free to decide whether you’d like to make this change.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to