Github user jiangxb1987 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20177#discussion_r162746808
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeTableCommand.scala
 ---
    @@ -31,9 +31,9 @@ case class AnalyzeTableCommand(
     
       override def run(sparkSession: SparkSession): Seq[Row] = {
         val sessionState = sparkSession.sessionState
    -    val db = 
tableIdent.database.getOrElse(sessionState.catalog.getCurrentDatabase)
    -    val tableIdentWithDB = TableIdentifier(tableIdent.table, Some(db))
    -    val tableMeta = sessionState.catalog.getTableMetadata(tableIdentWithDB)
    +    val db = tableIdent.database
    +    val tableIdentWithDB = TableIdentifier(tableIdent.table, db)
    +    val tableMeta = 
sessionState.catalog.getTempViewOrPermanentTableMetadata(tableIdentWithDB)
    --- End diff --
    
    Wouldn't this fail if we have a table that neglect the current database in 
tableIdent?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to