This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 06df5e5dc3 [KYUUBI #6979] Support check paimon system producers
06df5e5dc3 is described below
commit 06df5e5dc3f143a241fea7c5c48ef8e29b69fd67
Author: davidyuan <[email protected]>
AuthorDate: Wed Mar 26 14:13:59 2025 +0800
[KYUUBI #6979] Support check paimon system producers
### Why are the changes needed?
Cuurently, ranger check missing paimom system producers command, need to
support these command
1. create_tag
2. delete_tag
3. rollback
#6979
PS: There has a question about paimon, paimon'sparkCatalog need the
currentCatalog Env is the paimon's catalog, use default spark_catalog will
throw exception, maybe we should add this hint to the documentation.
such as
If you wanna support producers check with paimon, you need use sql `use
$paimon_catalog` to ensure the session currentCatalog is paimon_catalog
PS: paimon-spark-3.3:0.8.2 has some compaitable question, suggest upgrade
the paimon version
### How was this patch tested?
producers test cases
1. create_tag
2. delete_tag
3. rollback
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #6980 from davidyuan1223/paimon_producers.
Closes #6979
90f367c6a [davidyuan] update
c0503cb5f [davidyuan] Merge remote-tracking branch
'origin/paimon_producers' into paimon_producers
993d1dcb8 [davidyuan] Merge branch 'master' into paimon_producers
f68edef41 [davidyuan] producers
58224191b [davidyuan] Merge branch 'master' into paimon_producers
57aac600b [davidyuan] update
cbcdd8dbf [davidyuan] producers
Authored-by: davidyuan <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
---
.../src/main/resources/table_command_spec.json | 16 ++++++
.../plugin/spark/authz/gen/PaimonCommands.scala | 9 +++-
.../PaimonCatalogRangerSparkExtensionSuite.scala | 57 ++++++++++++++++++++++
3 files changed, 81 insertions(+), 1 deletion(-)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index 2c7f2d4727..ba018c6420 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -2589,6 +2589,22 @@
"isInput" : false,
"comment" : "Delta"
} ]
+}, {
+ "classname" :
"org.apache.paimon.spark.catalyst.plans.logical.PaimonCallCommand",
+ "tableDescs" : [ {
+ "fieldName" : "args",
+ "fieldExtractor" : "ExpressionSeqTableExtractor",
+ "columnDesc" : null,
+ "actionTypeDesc" : null,
+ "tableTypeDesc" : null,
+ "catalogDesc" : null,
+ "isInput" : false,
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Paimon"
+ } ],
+ "opType" : "ALTERTABLE_PROPERTIES",
+ "queryDescs" : [ ],
+ "uriDescs" : [ ]
}, {
"classname" :
"org.apache.paimon.spark.commands.DeleteFromPaimonTableCommand",
"tableDescs" : [ {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/PaimonCommands.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/PaimonCommands.scala
index 4d3a111591..c16c13e040 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/PaimonCommands.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/PaimonCommands.scala
@@ -17,6 +17,7 @@
package org.apache.kyuubi.plugin.spark.authz.gen
+import org.apache.kyuubi.plugin.spark.authz.OperationType
import org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType._
import org.apache.kyuubi.plugin.spark.authz.serde._
@@ -56,8 +57,14 @@ object PaimonCommands extends CommandSpecs[TableCommandSpec]
{
TableCommandSpec(cmd, Seq(targetTableDesc, sourceTableDesc))
}
+ val PaimonCallCommand = {
+ val cmd =
"org.apache.paimon.spark.catalyst.plans.logical.PaimonCallCommand"
+ val td = TableDesc("args", classOf[ExpressionSeqTableExtractor], comment =
"Paimon")
+ TableCommandSpec(cmd, Seq(td), opType =
OperationType.ALTERTABLE_PROPERTIES)
+ }
override def specs: Seq[TableCommandSpec] = Seq(
UpdatePaimonTable,
DeleteFromPaimonTable,
- MergeIntoPaimonTable)
+ MergeIntoPaimonTable,
+ PaimonCallCommand)
}
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
index 12478cccc9..efaf28df8e 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
@@ -568,6 +568,63 @@ class PaimonCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
}
}
+ test("Producers") {
+ if (isSparkV34OrGreater) {
+ withCleanTmpResources(Seq(
+ (s"$catalogV2.$namespace1.$table1", "table"))) {
+ try {
+ doAs(admin, sql(createTableSql(namespace1, table1)))
+ doAs(admin, sql(s"INSERT INTO $catalogV2.$namespace1.$table1 VALUES
(1, 'a'), (2, 'b')"))
+
+ var currentCatalogName =
+ doAs(admin,
spark.sessionState.catalogManager.currentCatalog.name())
+
+ doAs(admin, sql(s"use $catalogV2"))
+ currentCatalogName = doAs(admin,
spark.sessionState.catalogManager.currentCatalog.name())
+ assert(currentCatalogName.equals(catalogV2))
+
+ // Create Tag
+ val createTagSql = s"Call sys.create_tag(table =>" +
+ s"'$catalogV2.$namespace1.$table1', tag => 'test_tag', snapshot =>
1)"
+ interceptEndsWith[AccessControlException] {
+ doAs(table1OnlyUserForNs, sql(createTagSql))
+ }(s"does not have [alter] privilege on [$namespace1/$table1]")
+ interceptEndsWith[AccessControlException] {
+ doAs(someone, sql(createTagSql))
+ }(s"does not have [alter] privilege on [$namespace1/$table1]")
+ doAs(admin, sql(createTagSql))
+
+ // Delete Tag
+ val deleteTagSql = s"Call sys.delete_tag(table =>" +
+ s"'$catalogV2.$namespace1.$table1', tag => 'test_tag')"
+ interceptEndsWith[AccessControlException] {
+ doAs(table1OnlyUserForNs, sql(deleteTagSql))
+ }(s"does not have [alter] privilege on [$namespace1/$table1]")
+ interceptEndsWith[AccessControlException] {
+ doAs(someone, sql(deleteTagSql))
+ }(s"does not have [alter] privilege on [$namespace1/$table1]")
+ doAs(admin, sql(deleteTagSql))
+
+ // Rollback
+ doAs(admin, sql(s"INSERT INTO $catalogV2.$namespace1.$table1 VALUES
(3, 'a'), (4, 'b')"))
+ doAs(admin, sql(s"INSERT INTO $catalogV2.$namespace1.$table1 VALUES
(5, 'a'), (6, 'b')"))
+ val rollbackTagSql = s"Call sys.rollback(table =>" +
+ s"'$catalogV2.$namespace1.$table1', version => '2')"
+ interceptEndsWith[AccessControlException] {
+ doAs(table1OnlyUserForNs, sql(rollbackTagSql))
+ }(s"does not have [alter] privilege on [$namespace1/$table1]")
+ interceptEndsWith[AccessControlException] {
+ doAs(someone, sql(rollbackTagSql))
+ }(s"does not have [alter] privilege on [$namespace1/$table1]")
+ doAs(admin, sql(rollbackTagSql))
+
+ } finally {
+ doAs(admin, sql(s"use spark_catalog"))
+ }
+ }
+ }
+ }
+
def createTableSql(namespace: String, table: String): String =
s"""
|CREATE TABLE IF NOT EXISTS $catalogV2.$namespace.$table