Github user gatorsmile commented on a diff in the pull request:

    https://github.com/apache/spark/pull/16592#discussion_r96166347
  
    --- Diff: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala 
---
    @@ -102,6 +76,198 @@ class DDLSuite extends QueryTest with SharedSQLContext 
with BeforeAndAfterEach {
           tracksPartitionsInCatalog = true)
       }
     
    +  test("desc table for parquet data source table using in-memory catalog") 
{
    +    val tabName = "tab1"
    +    withTable(tabName) {
    +      sql(s"CREATE TABLE $tabName(a int comment 'test') USING parquet ")
    +
    +      checkAnswer(
    +        sql(s"DESC $tabName").select("col_name", "data_type", "comment"),
    +        Row("a", "int", "test")
    +      )
    +    }
    +  }
    +
    +  test("select/insert into the managed table") {
    +    val tabName = "tbl"
    +    withTable(tabName) {
    +      sql(s"CREATE TABLE $tabName (i INT, j STRING)")
    +      val catalogTable =
    +        
spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName, 
Some("default")))
    +      assert(catalogTable.tableType == CatalogTableType.MANAGED)
    +
    +      var message = intercept[AnalysisException] {
    +        sql(s"INSERT OVERWRITE TABLE $tabName SELECT 1, 'a'")
    +      }.getMessage
    +      assert(message.contains("Hive support is required to insert into the 
following tables"))
    +      message = intercept[AnalysisException] {
    +        sql(s"SELECT * FROM $tabName")
    +      }.getMessage
    +      assert(message.contains("Hive support is required to select over the 
following tables"))
    +    }
    +  }
    +
    +  test("select/insert into external table") {
    +    withTempDir { tempDir =>
    +      val tabName = "tbl"
    +      withTable(tabName) {
    +        sql(
    +          s"""
    +             |CREATE EXTERNAL TABLE $tabName (i INT, j STRING)
    +             |ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
    +             |LOCATION '$tempDir'
    +           """.stripMargin)
    +        val catalogTable =
    +          
spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName, 
Some("default")))
    +        assert(catalogTable.tableType == CatalogTableType.EXTERNAL)
    +
    +        var message = intercept[AnalysisException] {
    +          sql(s"INSERT OVERWRITE TABLE $tabName SELECT 1, 'a'")
    +        }.getMessage
    +        assert(message.contains("Hive support is required to insert into 
the following tables"))
    +        message = intercept[AnalysisException] {
    +          sql(s"SELECT * FROM $tabName")
    +        }.getMessage
    +        assert(message.contains("Hive support is required to select over 
the following tables"))
    +      }
    +    }
    +  }
    +
    +  test("Create Hive Table As Select") {
    +    import testImplicits._
    +    withTable("t", "t1") {
    +      var e = intercept[AnalysisException] {
    +        sql("CREATE TABLE t SELECT 1 as a, 1 as b")
    +      }.getMessage
    +      assert(e.contains("Hive support is required to use CREATE Hive TABLE 
AS SELECT"))
    +
    +      spark.range(1).select('id as 'a, 'id as 'b).write.saveAsTable("t1")
    +      e = intercept[AnalysisException] {
    +        sql("CREATE TABLE t SELECT a, b from t1")
    +      }.getMessage
    +      assert(e.contains("Hive support is required to use CREATE Hive TABLE 
AS SELECT"))
    +    }
    +  }
    --- End diff --
    
    The above four cases are copied from the existing ones in DDLSuites. These 
test cases only makes sense to InMemoryCatalog. 


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to