Github user yhuai commented on a diff in the pull request:

    https://github.com/apache/spark/pull/12121#discussion_r58300927
  
    --- Diff: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala 
---
    @@ -159,6 +193,187 @@ class DDLSuite extends QueryTest with 
SharedSQLContext {
         }
       }
     
    +  // TODO: test drop database in restrict mode
    +
    +  test("alter table: rename") {
    +    val catalog = sqlContext.sessionState.catalog
    +    val tableIdent1 = TableIdentifier("tab1", Some("dbx"))
    +    val tableIdent2 = TableIdentifier("tab2", Some("dbx"))
    +    createDatabase(catalog, "dbx")
    +    createTable(catalog, tableIdent1)
    +    assert(catalog.listTables("dbx") == Seq(tableIdent1))
    +    sql("ALTER TABLE dbx.tab1 RENAME TO dbx.tab2")
    +    assert(catalog.listTables("dbx") == Seq(tableIdent2))
    +    catalog.setCurrentDatabase("dbx")
    +    // rename without explicitly specifying database
    +    sql("ALTER TABLE tab2 RENAME TO tab1")
    +    assert(catalog.listTables("dbx") == Seq(tableIdent1))
    +    // table to rename does not exist
    +    intercept[AnalysisException] {
    +      sql("ALTER TABLE dbx.does_not_exist RENAME TO dbx.tab2")
    +    }
    +    // destination database is different
    +    intercept[AnalysisException] {
    +      sql("ALTER TABLE dbx.tab1 RENAME TO dby.tab2")
    +    }
    +  }
    +
    +  test("alter table: set location") {
    +    val catalog = sqlContext.sessionState.catalog
    +    val tableIdent = TableIdentifier("tab1", Some("dbx"))
    +    val partSpec = Map("a" -> "1")
    +    createDatabase(catalog, "dbx")
    +    createTable(catalog, tableIdent)
    +    createTablePartition(catalog, partSpec, tableIdent)
    +    assert(catalog.getTable(tableIdent).storage.locationUri.isEmpty)
    +    assert(catalog.getPartition(tableIdent, 
partSpec).storage.locationUri.isEmpty)
    +    // set table location
    +    sql("ALTER TABLE dbx.tab1 SET LOCATION '/path/to/your/lovely/heart'")
    +    assert(catalog.getTable(tableIdent).storage.locationUri ===
    +      Some("/path/to/your/lovely/heart"))
    +    // set table partition location
    +    sql("ALTER TABLE dbx.tab1 PARTITION (a='1') SET LOCATION 
'/path/to/part/ways'")
    +    assert(catalog.getPartition(tableIdent, partSpec).storage.locationUri 
===
    +      Some("/path/to/part/ways"))
    +    // set table location without explicitly specifying database
    +    catalog.setCurrentDatabase("dbx")
    +    sql("ALTER TABLE tab1 SET LOCATION '/swanky/steak/place'")
    +    assert(catalog.getTable(tableIdent).storage.locationUri === 
Some("/swanky/steak/place"))
    +    // set table partition location
    +    sql("ALTER TABLE tab1 PARTITION (a='1') SET LOCATION 'vienna'")
    +    assert(catalog.getPartition(tableIdent, partSpec).storage.locationUri 
=== Some("vienna"))
    +    // table to alter does not exist
    +    intercept[AnalysisException] {
    +      sql("ALTER TABLE dbx.does_not_exist SET LOCATION '/mister/spark'")
    +    }
    +  }
    +
    +  test("alter table: set properties") {
    +    val catalog = sqlContext.sessionState.catalog
    +    val tableIdent = TableIdentifier("tab1", Some("dbx"))
    +    createDatabase(catalog, "dbx")
    +    createTable(catalog, tableIdent)
    +    assert(catalog.getTable(tableIdent).properties.isEmpty)
    +    // set table properties
    +    sql("ALTER TABLE dbx.tab1 SET TBLPROPERTIES ('andrew' = 'or14', 'kor' 
= 'bel')")
    +    assert(catalog.getTable(tableIdent).properties == Map("andrew" -> 
"or14", "kor" -> "bel"))
    +    // set table properties without explicitly specifying database
    +    catalog.setCurrentDatabase("dbx")
    +    sql("ALTER TABLE tab1 SET TBLPROPERTIES ('kor' = 'belle', 'kar' = 
'bol')")
    +    assert(catalog.getTable(tableIdent).properties ==
    +      Map("andrew" -> "or14", "kor" -> "belle", "kar" -> "bol"))
    +    // table to alter does not exist
    +    intercept[AnalysisException] {
    +      sql("ALTER TABLE does_not_exist SET TBLPROPERTIES ('winner' = 
'loser')")
    +    }
    +    // throw exception for datasource tables
    +    catalog.alterTable(catalog.getTable(tableIdent).copy(
    +      properties = Map("spark.sql.sources.provider" -> "csv")))
    +    val e = intercept[AnalysisException] {
    +      sql("ALTER TABLE tab1 SET TBLPROPERTIES ('sora' = 'bol')")
    +    }
    +    assert(e.getMessage.contains("datasource"))
    +  }
    +
    +  test("alter table: unset properties") {
    +    val catalog = sqlContext.sessionState.catalog
    +    val tableIdent = TableIdentifier("tab1", Some("dbx"))
    +    createDatabase(catalog, "dbx")
    +    createTable(catalog, tableIdent)
    +    // unset table properties
    +    sql("ALTER TABLE dbx.tab1 SET TBLPROPERTIES ('j' = 'am', 'p' = 'an', 
'c' = 'lan')")
    +    sql("ALTER TABLE dbx.tab1 UNSET TBLPROPERTIES ('j')")
    +    assert(catalog.getTable(tableIdent).properties == Map("p" -> "an", "c" 
-> "lan"))
    +    // unset table properties without explicitly specifying database
    +    catalog.setCurrentDatabase("dbx")
    +    sql("ALTER TABLE tab1 UNSET TBLPROPERTIES ('p')")
    +    assert(catalog.getTable(tableIdent).properties == Map("c" -> "lan"))
    +    // table to alter does not exist
    +    intercept[AnalysisException] {
    +      sql("ALTER TABLE does_not_exist UNSET TBLPROPERTIES ('c' = 'lan')")
    +    }
    +    // property to unset does not exist
    +    val e = intercept[AnalysisException] {
    +      sql("ALTER TABLE tab1 UNSET TBLPROPERTIES ('c', 'xyz')")
    +    }
    +    assert(e.getMessage.contains("xyz"))
    +    // property to unset does not exist, but "IF EXISTS" is specified
    +    sql("ALTER TABLE tab1 UNSET TBLPROPERTIES IF EXISTS ('c', 'xyz')")
    +    assert(catalog.getTable(tableIdent).properties.isEmpty)
    +    // throw exception for datasource tables
    +    catalog.alterTable(catalog.getTable(tableIdent).copy(
    +      properties = Map("spark.sql.sources.provider" -> "csv")))
    +    val e1 = intercept[AnalysisException] {
    +      sql("ALTER TABLE tab1 UNSET TBLPROPERTIES ('sora')")
    +    }
    +    assert(e1.getMessage.contains("datasource"))
    +  }
    +
    +  test("alter table: set serde") {
    +    val catalog = sqlContext.sessionState.catalog
    +    val tableIdent = TableIdentifier("tab1", Some("dbx"))
    +    createDatabase(catalog, "dbx")
    +    createTable(catalog, tableIdent)
    +    assert(catalog.getTable(tableIdent).storage.serde.isEmpty)
    +    assert(catalog.getTable(tableIdent).storage.serdeProperties.isEmpty)
    +    // set table serde
    +    sql("ALTER TABLE dbx.tab1 SET SERDE 'org.apache.jadoop'")
    +    assert(catalog.getTable(tableIdent).storage.serde == 
Some("org.apache.jadoop"))
    +    assert(catalog.getTable(tableIdent).storage.serdeProperties.isEmpty)
    +    // set table serde and properties
    +    sql("ALTER TABLE dbx.tab1 SET SERDE 'org.apache.madoop' " +
    +      "WITH SERDEPROPERTIES ('k' = 'v', 'kay' = 'vee')")
    +    assert(catalog.getTable(tableIdent).storage.serde == 
Some("org.apache.madoop"))
    +    assert(catalog.getTable(tableIdent).storage.serdeProperties ==
    +      Map("k" -> "v", "kay" -> "vee"))
    +    // set serde properties only
    +    sql("ALTER TABLE dbx.tab1 SET SERDEPROPERTIES ('k' = 'vvv')")
    --- End diff --
    
    Also have a test for data source tables?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to