Repository: spark Updated Branches: refs/heads/branch-2.0 920162a1e -> d63428af6
[SPARK-16368][SQL] Fix Strange Errors When Creating View With Unmatched Column Num #### What changes were proposed in this pull request? When creating a view, a common user error is the number of columns produced by the `SELECT` clause does not match the number of column names specified by `CREATE VIEW`. For example, given Table `t1` only has 3 columns ```SQL create view v1(col2, col4, col3, col5) as select * from t1 ``` Currently, Spark SQL reports the following error: ``` requirement failed java.lang.IllegalArgumentException: requirement failed at scala.Predef$.require(Predef.scala:212) at org.apache.spark.sql.execution.command.CreateViewCommand.run(views.scala:90) ``` This error message is very confusing. This PR is to detect the error and issue a meaningful error message. #### How was this patch tested? Added test cases Author: gatorsmile <gatorsm...@gmail.com> Closes #14047 from gatorsmile/viewMismatchedColumns. (cherry picked from commit ab05db0b48f395543cd7d91e2ad9dd760516868b) Signed-off-by: Reynold Xin <r...@databricks.com> Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/d63428af Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/d63428af Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/d63428af Branch: refs/heads/branch-2.0 Commit: d63428af6d1c7c0a0533567a0a7ccb5817a65de3 Parents: 920162a Author: gatorsmile <gatorsm...@gmail.com> Authored: Thu Jul 7 00:07:25 2016 -0700 Committer: Reynold Xin <r...@databricks.com> Committed: Thu Jul 7 00:07:31 2016 -0700 ---------------------------------------------------------------------- .../spark/sql/execution/command/views.scala | 6 ++++- .../spark/sql/execution/command/DDLSuite.scala | 23 ++++++++++++++++++++ .../spark/sql/hive/execution/HiveDDLSuite.scala | 23 ++++++++++++++++++++ 3 files changed, 51 insertions(+), 1 deletion(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/d63428af/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala index 088f684..6533d79 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala @@ -88,7 +88,11 @@ case class CreateViewCommand( qe.assertAnalyzed() val analyzedPlan = qe.analyzed - require(tableDesc.schema == Nil || tableDesc.schema.length == analyzedPlan.output.length) + if (tableDesc.schema != Nil && tableDesc.schema.length != analyzedPlan.output.length) { + throw new AnalysisException(s"The number of columns produced by the SELECT clause " + + s"(num: `${analyzedPlan.output.length}`) does not match the number of column names " + + s"specified by CREATE VIEW (num: `${tableDesc.schema.length}`).") + } val sessionState = sparkSession.sessionState if (isTemporary) { http://git-wip-us.apache.org/repos/asf/spark/blob/d63428af/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index 0ee8d17..7d1f1d1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -1314,6 +1314,29 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { } } + test("create temporary view with mismatched schema") { + withTable("tab1") { + spark.range(10).write.saveAsTable("tab1") + withView("view1") { + val e = intercept[AnalysisException] { + sql("CREATE TEMPORARY VIEW view1 (col1, col3) AS SELECT * FROM tab1") + }.getMessage + assert(e.contains("the SELECT clause (num: `1`) does not match") + && e.contains("CREATE VIEW (num: `2`)")) + } + } + } + + test("create temporary view with specified schema") { + withView("view1") { + sql("CREATE TEMPORARY VIEW view1 (col1, col2) AS SELECT 1, 2") + checkAnswer( + sql("SELECT * FROM view1"), + Row(1, 2) :: Nil + ) + } + } + test("truncate table - external table, temporary table, view (not allowed)") { import testImplicits._ val path = Utils.createTempDir().getAbsolutePath http://git-wip-us.apache.org/repos/asf/spark/blob/d63428af/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala ---------------------------------------------------------------------- diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index 9d3c4cd..93e50f4 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -391,6 +391,29 @@ class HiveDDLSuite } } + test("create view with mismatched schema") { + withTable("tab1") { + spark.range(10).write.saveAsTable("tab1") + withView("view1") { + val e = intercept[AnalysisException] { + sql("CREATE VIEW view1 (col1, col3) AS SELECT * FROM tab1") + }.getMessage + assert(e.contains("the SELECT clause (num: `1`) does not match") + && e.contains("CREATE VIEW (num: `2`)")) + } + } + } + + test("create view with specified schema") { + withView("view1") { + sql("CREATE VIEW view1 (col1, col2) AS SELECT 1, 2") + checkAnswer( + sql("SELECT * FROM view1"), + Row(1, 2) :: Nil + ) + } + } + test("desc table for Hive table") { withTable("tab1") { val tabName = "tab1" --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org