This is an automated email from the ASF dual-hosted git repository. dongjoon pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 16e5035 [SPARK-34302][FOLLOWUP][SQL][TESTS] Update jdbc.v2.*IntegrationSuite 16e5035 is described below commit 16e50356ee3dc4401c5cc9115411fe10128d4327 Author: Dongjoon Hyun <dh...@apple.com> AuthorDate: Mon Jun 28 23:01:54 2021 -0700 [SPARK-34302][FOLLOWUP][SQL][TESTS] Update jdbc.v2.*IntegrationSuite ### What changes were proposed in this pull request? This PR aims to update JDBC v2 integration suite by adding `catalogName`. ### Why are the changes needed? To recover the integration test suite. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Pass the GitHub Action. Closes #33124 from dongjoon-hyun/SPARK-34302. Authored-by: Dongjoon Hyun <dh...@apple.com> Signed-off-by: Dongjoon Hyun <dongj...@apache.org> --- .../test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala | 3 ++- .../org/apache/spark/sql/jdbc/v2/MsSqlServerIntegrationSuite.scala | 3 ++- .../scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala | 3 ++- .../scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala | 3 ++- .../scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala | 3 ++- 5 files changed, 10 insertions(+), 5 deletions(-) diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala index 012bdae..cb0dd1e 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala @@ -75,7 +75,8 @@ class DB2IntegrationSuite extends DockerJDBCIntegrationSuite with V2JDBCTest { val msg1 = intercept[AnalysisException] { sql(s"ALTER TABLE $tbl ALTER COLUMN id TYPE VARCHAR(10)") }.getMessage - assert(msg1.contains("Cannot update alt_table field ID: double cannot be cast to varchar")) + assert(msg1.contains( + s"Cannot update $catalogName.alt_table field ID: double cannot be cast to varchar")) } override def testCreateTableWithProperty(tbl: String): Unit = { diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MsSqlServerIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MsSqlServerIntegrationSuite.scala index 083fa3c..b9f5b77 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MsSqlServerIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MsSqlServerIntegrationSuite.scala @@ -77,7 +77,8 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite with V2JDBC val msg1 = intercept[AnalysisException] { sql(s"ALTER TABLE $tbl ALTER COLUMN id TYPE INTEGER") }.getMessage - assert(msg1.contains("Cannot update alt_table field ID: string cannot be cast to int")) + assert(msg1.contains( + s"Cannot update $catalogName.alt_table field ID: string cannot be cast to int")) } override def testUpdateColumnNullability(tbl: String): Unit = { diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala index 9a9bda5..db626df 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala @@ -79,7 +79,8 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite with V2JDBCTest { val msg1 = intercept[AnalysisException] { sql(s"ALTER TABLE $tbl ALTER COLUMN id TYPE INTEGER") }.getMessage - assert(msg1.contains("Cannot update alt_table field ID: string cannot be cast to int")) + assert(msg1.contains( + s"Cannot update $catalogName.alt_table field ID: string cannot be cast to int")) } override def testRenameColumn(tbl: String): Unit = { diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala index d77b216..45d793a 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala @@ -86,6 +86,7 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with V2JDBCTest val msg1 = intercept[AnalysisException] { sql(s"ALTER TABLE $tbl ALTER COLUMN id TYPE INTEGER") }.getMessage - assert(msg1.contains("Cannot update alt_table field ID: string cannot be cast to int")) + assert(msg1.contains( + s"Cannot update $catalogName.alt_table field ID: string cannot be cast to int")) } } diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala index 386a7ad..932ddb9 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala @@ -64,7 +64,8 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite with V2JDBCTes val msg = intercept[AnalysisException] { sql(s"ALTER TABLE $tbl ALTER COLUMN id TYPE INTEGER") }.getMessage - assert(msg.contains("Cannot update alt_table field ID: string cannot be cast to int")) + assert(msg.contains( + s"Cannot update $catalogName.alt_table field ID: string cannot be cast to int")) } override def testCreateTableWithProperty(tbl: String): Unit = { --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org