yaooqinn commented on a change in pull request #26080: [SPARK-29425][SQL] The 
ownership of a database should be respected
URL: https://github.com/apache/spark/pull/26080#discussion_r348364913
 
 

 ##########
 File path: 
sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
 ##########
 @@ -170,6 +171,34 @@ class VersionsSuite extends SparkFunSuite with Logging {
       client.createDatabase(tempDB, ignoreIfExists = true)
     }
 
+    test(s"$version: create/get/alter database should pick right user name as 
owner") {
+      if (version != "0.12") {
+        val currentUser = UserGroupInformation.getCurrentUser.getUserName
+        val ownerName = "SPARK_29425"
+        val db1 = "SPARK_29425_1"
+        val db2 = "SPARK_29425_2"
+        val ownerProps = Map("ownerName" -> ownerName)
+
+        // create database with owner
+        val dbWithOwner = CatalogDatabase(db1, "desc", 
Utils.createTempDir().toURI, ownerProps)
+        client.createDatabase(dbWithOwner, ignoreIfExists = true)
+        val getDbWithOwner = client.getDatabase(db1)
+        assert(getDbWithOwner.properties("ownerName") === ownerName)
+        // alter database without owner
+        client.alterDatabase(getDbWithOwner.copy(properties = Map()))
+        assert(client.getDatabase(getDbWithOwner.name).properties("ownerName") 
=== currentUser)
 
 Review comment:
   I can add a test for current behavior in `HiveDDLSuite `, because if we 
change ownerName/Type to private, create database/alter database cmd shall 
force to use spark's default user for lack of ownership switch syntax

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to