This is an automated email from the ASF dual-hosted git repository.

bowenliang pushed a commit to branch branch-1.9
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/branch-1.9 by this push:
     new d1c07147a [KYUUBI #6541] [AUTHZ] Fix 
DataSourceV2RelationTableExtractor can't get the 'database' attribute if it's a 
Paimon plan.
d1c07147a is described below

commit d1c07147ab1b906af1da98a4845c8774d05f4cf4
Author: caoyu <cao...@jifenn.com>
AuthorDate: Sun Jul 28 23:25:04 2024 +0800

    [KYUUBI #6541] [AUTHZ] Fix DataSourceV2RelationTableExtractor can't get the 
'database' attribute if it's a Paimon plan.
    
    # :mag: Description
    ## Issue References 🔗
    
    This pull request fixes #6541
    
    ## Describe Your Solution 🔧
    Fix an issue where DataSourceV2RelationTableExtractor#table could not fetch 
the ‘database’ attribute causing the Ranger checks to fail when using the 
Paimon Catalog.
    If the database attribute is not resolved, use 
DataSourceV2RelationTableExtractor#identifier to complete it.
    
    ## Types of changes :bookmark:
    
    - [x] Bugfix (non-breaking change which fixes an issue)
    - [ ] New feature (non-breaking change which adds functionality)
    - [ ] Breaking change (fix or feature that would cause existing 
functionality to change)
    
    ## Test Plan 🧪
    
    #### Behavior Without This Pull Request :coffin:
    
    #### Behavior With This Pull Request :tada:
    
    #### Related Unit Tests
    
    ---
    
    # Checklist 📝
    
    - [ ] This patch was not authored or co-authored using [Generative 
Tooling](https://www.apache.org/legal/generative-tooling.html)
    
    **Be nice. Be informative.**
    
    Closes #6544 from promising-forever/issues/6541.
    
    Closes #6541
    
    6549f8528 [caoyu] Fix test failure, paimon-spark run on Scala 2.12.
    c1a09214a [caoyu] Optimising the 'database' capture logic
    69fb0bc7e [caoyu] PolicyJsonFileGenerator#genPolicies add paimonNamespace
    c89c70bad [caoyu] [KYUUBI #6541] [AUTHZ] Fix 
DataSourceV2RelationTableExtractor#table can't get the 'database' attribute if 
it's a Paimon plan.
    77f121b0d [caoyu] [KYUUBI #6541] [AUTHZ] Fix 
DataSourceV2RelationTableExtractor#table can't get the 'database' attribute if 
it's a Paimon plan.
    9cfb5847b [caoyu] [KYUUBI #6541] [AUTHZ] Fix 
DataSourceV2RelationTableExtractor#table can't get the 'database' attribute if 
it's a Paimon plan.
    
    Authored-by: caoyu <cao...@jifenn.com>
    Signed-off-by: Bowen Liang <liangbo...@gf.com.cn>
    (cherry picked from commit d9d2109070c5a4dbedf6fe0fd7e08b2c0bb6d751)
    Signed-off-by: Bowen Liang <liangbo...@gf.com.cn>
---
 .../plugin/spark/authz/serde/tableExtractors.scala | 10 +++++-
 .../spark/authz/gen/PolicyJsonFileGenerator.scala  |  2 +-
 .../src/test/resources/sparkSql_hive_jenkins.json  |  2 +-
 .../plugin/spark/authz/RangerTestResources.scala   |  1 +
 .../PaimonCatalogRangerSparkExtensionSuite.scala   | 40 ++++++++++++++++++++++
 5 files changed, 52 insertions(+), 3 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala
index 28c6fd870..03506036d 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala
@@ -199,7 +199,15 @@ class DataSourceV2RelationTableExtractor extends 
TableExtractor {
         lookupExtractor[TableTableExtractor].apply(spark, v2Relation.table)
           .map { table =>
             val maybeOwner = TableExtractor.getOwner(v2Relation)
-            table.copy(catalog = maybeCatalog, owner = maybeOwner)
+            val maybeDatabase: Option[String] = table.database match {
+              case Some(x) => Some(x)
+              case None =>
+                val maybeIdentifier = invokeAs[Option[AnyRef]](v2Relation, 
"identifier")
+                maybeIdentifier.flatMap { id =>
+                  lookupExtractor[IdentifierTableExtractor].apply(spark, id)
+                }.flatMap(table => table.database)
+            }
+            table.copy(catalog = maybeCatalog, database = maybeDatabase, owner 
= maybeOwner)
           }
       case _ => None
     }
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/gen/scala/org/apache/kyuubi/plugin/spark/authz/gen/PolicyJsonFileGenerator.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/gen/scala/org/apache/kyuubi/plugin/spark/authz/gen/PolicyJsonFileGenerator.scala
index afc7a5fde..d06a67a65 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/gen/scala/org/apache/kyuubi/plugin/spark/authz/gen/PolicyJsonFileGenerator.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/gen/scala/org/apache/kyuubi/plugin/spark/authz/gen/PolicyJsonFileGenerator.scala
@@ -173,7 +173,7 @@ class PolicyJsonFileGenerator extends AnyFunSuite {
     name = "all - database, udf",
     description = "Policy for all - database, udf",
     resources = Map(
-      databaseRes(defaultDb, sparkCatalog, icebergNamespace, namespace1),
+      databaseRes(defaultDb, sparkCatalog, icebergNamespace, namespace1, 
paimonNamespace),
       allTableRes,
       allColumnRes),
     policyItems = List(
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/resources/sparkSql_hive_jenkins.json
 
b/extensions/spark/kyuubi-spark-authz/src/test/resources/sparkSql_hive_jenkins.json
index 76d8c788a..840d4a491 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/resources/sparkSql_hive_jenkins.json
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/resources/sparkSql_hive_jenkins.json
@@ -229,7 +229,7 @@
     "isAuditEnabled" : true,
     "resources" : {
       "database" : {
-        "values" : [ "default", "spark_catalog", "iceberg_ns", "ns1" ],
+        "values" : [ "default", "spark_catalog", "iceberg_ns", "ns1", 
"paimon_ns" ],
         "isExcludes" : false,
         "isRecursive" : false
       },
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/RangerTestResources.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/RangerTestResources.scala
index 4f870d504..c2ed9aa0b 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/RangerTestResources.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/RangerTestResources.scala
@@ -42,6 +42,7 @@ object RangerTestNamespace {
   val sparkCatalog = "spark_catalog"
   val icebergNamespace = "iceberg_ns"
   val hudiNamespace = "hudi_ns"
+  val paimonNamespace = "paimon_ns"
   val deltaNamespace = "delta_ns"
   val namespace1 = "ns1"
   val namespace2 = "ns2"
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
index 7584a6367..81fe228d2 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
@@ -16,6 +16,8 @@
  */
 package org.apache.kyuubi.plugin.spark.authz.ranger
 
+import scala.util.Properties
+
 import org.scalatest.Outcome
 
 import org.apache.kyuubi.Utils
@@ -32,6 +34,7 @@ class PaimonCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
   override protected val catalogImpl: String = "hive"
   private def isSupportedVersion = true
 
+  val scalaVersion: String = Properties.versionString
   val catalogV2 = "paimon_catalog"
   val namespace1 = "paimon_ns"
   val table1 = "table1"
@@ -81,4 +84,41 @@ class PaimonCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
       doAs(admin, createTable)
     }
   }
+
+  test("[KYUUBI #6541] INSERT/SELECT TABLE") {
+    val tName = "t_paimon"
+
+    /**
+     * paimon-spark run on Scala 2.12.
+     */
+    if (scalaVersion.startsWith("version 2.12")) {
+      withCleanTmpResources(Seq((s"$catalogV2.$namespace1.$tName", "table"))) {
+
+        doAs(bob, sql(createTableSql(namespace1, tName)))
+
+        interceptEndsWith[AccessControlException] {
+          doAs(someone, sql(s"INSERT INTO $catalogV2.$namespace1.$tName VALUES 
(1, 'name_1')"))
+        }(s"does not have [update] privilege on [$namespace1/$tName]")
+        doAs(bob, sql(s"INSERT INTO $catalogV2.$namespace1.$tName VALUES (1, 
'name_1')"))
+        doAs(bob, sql(s"INSERT INTO $catalogV2.$namespace1.$tName VALUES (1, 
'name_2')"))
+
+        interceptEndsWith[AccessControlException] {
+          doAs(someone, sql(s"SELECT id FROM 
$catalogV2.$namespace1.$tName").show())
+        }(s"does not have [select] privilege on [$namespace1/$tName/id]")
+        doAs(bob, sql(s"SELECT name FROM 
$catalogV2.$namespace1.$tName").show())
+      }
+    }
+
+  }
+
+  def createTableSql(namespace: String, table: String): String =
+    s"""
+       |CREATE TABLE IF NOT EXISTS $catalogV2.$namespace.$table
+       |(id int, name string)
+       |USING paimon
+       |OPTIONS (
+       | 'primary-key' = 'id'
+       |)
+       |""".stripMargin
+
 }

Reply via email to