Github user gatorsmile commented on a diff in the pull request:

    https://github.com/apache/spark/pull/16592#discussion_r96166442
  
    --- Diff: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala 
---
    @@ -102,6 +76,198 @@ class DDLSuite extends QueryTest with SharedSQLContext 
with BeforeAndAfterEach {
           tracksPartitionsInCatalog = true)
       }
     
    +  test("desc table for parquet data source table using in-memory catalog") 
{
    +    val tabName = "tab1"
    +    withTable(tabName) {
    +      sql(s"CREATE TABLE $tabName(a int comment 'test') USING parquet ")
    +
    +      checkAnswer(
    +        sql(s"DESC $tabName").select("col_name", "data_type", "comment"),
    +        Row("a", "int", "test")
    +      )
    +    }
    +  }
    +
    +  test("select/insert into the managed table") {
    +    val tabName = "tbl"
    +    withTable(tabName) {
    +      sql(s"CREATE TABLE $tabName (i INT, j STRING)")
    +      val catalogTable =
    +        
spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName, 
Some("default")))
    +      assert(catalogTable.tableType == CatalogTableType.MANAGED)
    +
    +      var message = intercept[AnalysisException] {
    +        sql(s"INSERT OVERWRITE TABLE $tabName SELECT 1, 'a'")
    +      }.getMessage
    +      assert(message.contains("Hive support is required to insert into the 
following tables"))
    +      message = intercept[AnalysisException] {
    +        sql(s"SELECT * FROM $tabName")
    +      }.getMessage
    +      assert(message.contains("Hive support is required to select over the 
following tables"))
    +    }
    +  }
    +
    +  test("select/insert into external table") {
    +    withTempDir { tempDir =>
    +      val tabName = "tbl"
    +      withTable(tabName) {
    +        sql(
    +          s"""
    +             |CREATE EXTERNAL TABLE $tabName (i INT, j STRING)
    +             |ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
    +             |LOCATION '$tempDir'
    +           """.stripMargin)
    +        val catalogTable =
    +          
spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName, 
Some("default")))
    +        assert(catalogTable.tableType == CatalogTableType.EXTERNAL)
    +
    +        var message = intercept[AnalysisException] {
    +          sql(s"INSERT OVERWRITE TABLE $tabName SELECT 1, 'a'")
    +        }.getMessage
    +        assert(message.contains("Hive support is required to insert into 
the following tables"))
    +        message = intercept[AnalysisException] {
    +          sql(s"SELECT * FROM $tabName")
    +        }.getMessage
    +        assert(message.contains("Hive support is required to select over 
the following tables"))
    +      }
    +    }
    +  }
    +
    +  test("Create Hive Table As Select") {
    +    import testImplicits._
    +    withTable("t", "t1") {
    +      var e = intercept[AnalysisException] {
    +        sql("CREATE TABLE t SELECT 1 as a, 1 as b")
    +      }.getMessage
    +      assert(e.contains("Hive support is required to use CREATE Hive TABLE 
AS SELECT"))
    +
    +      spark.range(1).select('id as 'a, 'id as 'b).write.saveAsTable("t1")
    +      e = intercept[AnalysisException] {
    +        sql("CREATE TABLE t SELECT a, b from t1")
    +      }.getMessage
    +      assert(e.contains("Hive support is required to use CREATE Hive TABLE 
AS SELECT"))
    +    }
    +  }
    +
    +  test("alter table: set location (datasource table)") {
    +    testSetLocation(isDatasourceTable = true)
    +  }
    +
    +  test("alter table: set properties (datasource table)") {
    +    testSetProperties(isDatasourceTable = true)
    +  }
    +
    +  test("alter table: unset properties (datasource table)") {
    +    testUnsetProperties(isDatasourceTable = true)
    +  }
    +
    +  test("alter table: set serde (datasource table)") {
    +    testSetSerde(isDatasourceTable = true)
    +  }
    +
    +  test("alter table: set serde partition (datasource table)") {
    +    testSetSerdePartition(isDatasourceTable = true)
    +  }
    +
    +  test("alter table: change column (datasource table)") {
    +    testChangeColumn(isDatasourceTable = true)
    +  }
    +
    +  test("alter table: add partition (datasource table)") {
    +    testAddPartitions(isDatasourceTable = true)
    +  }
    +
    +  test("alter table: drop partition (datasource table)") {
    +    testDropPartitions(isDatasourceTable = true)
    +  }
    +
    +  test("alter table: rename partition (datasource table)") {
    +    testRenamePartitions(isDatasourceTable = true)
    +  }
    +
    +  test("drop table - data source table") {
    +    testDropTable(isDatasourceTable = true)
    +  }
    --- End diff --
    
    The above 10 test cases are currently running with `InMemoryCatalog` only. 
The reason is `HiveExternalCatalog` does not allow users to change the table 
provider from `hive` to the others. In the future PRs, we can fix it. 


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to