This is an automated email from the ASF dual-hosted git repository.

bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 98b74d2ad [KYUUBI #5399] [AUTHZ] Cleanup Spark 3.0 specific 
implementation
98b74d2ad is described below

commit 98b74d2ad0cf5ff4b84424bb45c8092ced381aec
Author: Bowen Liang <liangbo...@gf.com.cn>
AuthorDate: Thu Oct 12 08:44:02 2023 +0800

    [KYUUBI #5399] [AUTHZ] Cleanup Spark 3.0 specific implementation
    
    ### _Why are the changes needed?_
    
    The cleanup follow-up for #5362, which removed the Spark 3.0 tests for 
Authz plugin.
    Remove the `isSpark31OrGreater` and Spark 3.0 specific implementations in 
Authz plugin.
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run 
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
 locally before make a pull request
    
    ### _Was this patch authored or co-authored using generative AI tooling?_
    
    No.
    
    Closes #5399 from bowenliang123/authz-31greater.
    
    Closes #5399
    
    db4369b13 [Bowen Liang] import
    cc38b1c1e [Bowen Liang] fix MASK_SHOW_FIRST_4
    cf3ef4e1c [Bowen Liang] remove isSparkV31OrGreater
    
    Authored-by: Bowen Liang <liangbo...@gf.com.cn>
    Signed-off-by: Bowen Liang <liangbo...@gf.com.cn>
---
 .../kyuubi/plugin/spark/authz/ranger/SparkRangerAdminPlugin.scala   | 6 +-----
 .../org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala      | 3 +--
 2 files changed, 2 insertions(+), 7 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/SparkRangerAdminPlugin.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/SparkRangerAdminPlugin.scala
index 9abb9cd28..d3059ef2d 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/SparkRangerAdminPlugin.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/SparkRangerAdminPlugin.scala
@@ -26,7 +26,6 @@ import org.apache.ranger.plugin.service.RangerBasePlugin
 import org.slf4j.LoggerFactory
 
 import org.apache.kyuubi.plugin.spark.authz.AccessControlException
-import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
 import org.apache.kyuubi.plugin.spark.authz.util.RangerConfigProvider
 
 object SparkRangerAdminPlugin extends RangerBasePlugin("spark", "sparkSql")
@@ -109,11 +108,8 @@ object SparkRangerAdminPlugin extends 
RangerBasePlugin("spark", "sparkSql")
       } else if (result.getMaskTypeDef != null) {
         result.getMaskTypeDef.getName match {
           case "MASK" => regexp_replace(col)
-          case "MASK_SHOW_FIRST_4" if isSparkV31OrGreater =>
-            regexp_replace(col, hasLen = true)
           case "MASK_SHOW_FIRST_4" =>
-            val right = regexp_replace(s"substr($col, 5)")
-            s"concat(substr($col, 0, 4), $right)"
+            regexp_replace(col, hasLen = true)
           case "MASK_SHOW_LAST_4" =>
             val left = regexp_replace(s"left($col, length($col) - 4)")
             s"concat($left, right($col, 4))"
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala
index 4f7cbb9ef..e95ff91ed 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala
@@ -61,7 +61,7 @@ private[authz] object AuthZUtils {
 
   def hasResolvedPermanentView(plan: LogicalPlan): Boolean = {
     plan match {
-      case view: View if view.resolved && isSparkV31OrGreater =>
+      case view: View if view.resolved =>
         !getField[Boolean](view, "isTempView")
       case _ =>
         false
@@ -84,7 +84,6 @@ private[authz] object AuthZUtils {
   }
 
   lazy val SPARK_RUNTIME_VERSION: SemanticVersion = 
SemanticVersion(SPARK_VERSION)
-  lazy val isSparkV31OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.1"
   lazy val isSparkV32OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.2"
   lazy val isSparkV33OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.3"
 

Reply via email to