This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 38c779d6827 [SPARK-37599][SQL][TESTS] Unify v1 and v2 ALTER TABLE .. SET LOCATION tests 38c779d6827 is described below commit 38c779d6827b1b6110f2f97d555df26a1ddea673 Author: panbingkun <pbk1...@gmail.com> AuthorDate: Sun Aug 14 19:25:03 2022 +0300 [SPARK-37599][SQL][TESTS] Unify v1 and v2 ALTER TABLE .. SET LOCATION tests ### What changes were proposed in this pull request? - Move parser tests from DDLParserSuite to AlterTableSetLocationParserSuite. - Port DS v1 tests from DDLSuite and other test suites to v1.AlterTableSetLocationSuite. - Add a test for DSv2 ALTER TABLE .. SET SERDE to v2.AlterTableSetLocationSuite. ### Why are the changes needed? To improve test coverage. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? By running new test suites: > $ build/sbt -Phive-2.3 -Phive-thriftserver "test:testOnly *AlterTableSetLocationSuite" > $ build/sbt -Phive-2.3 -Phive-thriftserver "test:testOnly *AlterTableSetLocationParserSuite" Closes #37376 from panbingkun/SPARK-37599. Authored-by: panbingkun <pbk1...@gmail.com> Signed-off-by: Max Gekk <max.g...@gmail.com> --- .../spark/sql/catalyst/parser/DDLParserSuite.scala | 17 --- .../spark/sql/connector/AlterTableTests.scala | 28 ----- .../command/AlterTableSetLocationParserSuite.scala | 46 +++++++ .../command/AlterTableSetLocationSuiteBase.scala | 38 ++++++ .../spark/sql/execution/command/DDLSuite.scala | 68 ----------- .../command/v1/AlterTableSetLocationSuite.scala | 135 +++++++++++++++++++++ .../command/v2/AlterTableSetLocationSuite.scala | 66 ++++++++++ .../spark/sql/hive/execution/HiveDDLSuite.scala | 4 - .../command/AlterTableSetLocationSuite.scala | 35 ++++++ 9 files changed, 320 insertions(+), 117 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala index 1eb7b011d97..25bacc3631e 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala @@ -900,23 +900,6 @@ class DDLParserSuite extends AnalysisTest { ))) } - test("alter table: set location") { - val hint = Some("Please use ALTER VIEW instead.") - comparePlans( - parsePlan("ALTER TABLE a.b.c SET LOCATION 'new location'"), - SetTableLocation( - UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... SET LOCATION ...", hint), - None, - "new location")) - - comparePlans( - parsePlan("ALTER TABLE a.b.c PARTITION(ds='2017-06-10') SET LOCATION 'new location'"), - SetTableLocation( - UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... SET LOCATION ...", hint), - Some(Map("ds" -> "2017-06-10")), - "new location")) - } - test("alter table: rename column") { comparePlans( parsePlan("ALTER TABLE table_name RENAME COLUMN a.b.c TO d"), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala index c43c5636974..ee707acf5a0 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala @@ -1160,34 +1160,6 @@ trait AlterTableTests extends SharedSparkSession { } } - test("AlterTable: set location") { - val t = s"${catalogAndNamespace}table_name" - withTable(t) { - sql(s"CREATE TABLE $t (id int) USING $v2Format") - sql(s"ALTER TABLE $t SET LOCATION 's3://bucket/path'") - - val tableName = fullTableName(t) - val table = getTableMetadata(tableName) - - assert(table.name === tableName) - assert(table.properties === - withDefaultOwnership(Map("provider" -> v2Format, "location" -> "s3://bucket/path")).asJava) - } - } - - test("AlterTable: set partition location") { - val t = s"${catalogAndNamespace}table_name" - withTable(t) { - sql(s"CREATE TABLE $t (id int) USING $v2Format") - - val exc = intercept[AnalysisException] { - sql(s"ALTER TABLE $t PARTITION(ds='2017-06-10') SET LOCATION 's3://bucket/path'") - } - assert(exc.getMessage.contains( - "ALTER TABLE SET LOCATION does not support partition for v2 tables")) - } - } - test("AlterTable: set table property") { val t = s"${catalogAndNamespace}table_name" withTable(t) { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetLocationParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetLocationParserSuite.scala new file mode 100644 index 00000000000..375d4229e96 --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetLocationParserSuite.scala @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command + +import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedTable} +import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan +import org.apache.spark.sql.catalyst.plans.logical.SetTableLocation +import org.apache.spark.sql.test.SharedSparkSession + +class AlterTableSetLocationParserSuite extends AnalysisTest with SharedSparkSession { + + private val HINT = Some("Please use ALTER VIEW instead.") + + test("alter table: set location") { + val sql1 = "ALTER TABLE a.b.c SET LOCATION 'new location'" + val parsed1 = parsePlan(sql1) + val expected1 = SetTableLocation( + UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... SET LOCATION ...", HINT), + None, + "new location") + comparePlans(parsed1, expected1) + + val sql2 = "ALTER TABLE a.b.c PARTITION(ds='2017-06-10') SET LOCATION 'new location'" + val parsed2 = parsePlan(sql2) + val expected2 = SetTableLocation( + UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... SET LOCATION ...", HINT), + Some(Map("ds" -> "2017-06-10")), + "new location") + comparePlans(parsed2, expected2) + } +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetLocationSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetLocationSuiteBase.scala new file mode 100644 index 00000000000..dbc583154d4 --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetLocationSuiteBase.scala @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command + +import org.apache.spark.sql.QueryTest + +/** + * This base suite contains unified tests for the `ALTER TABLE .. SET LOCATION` + * command that check V1 and V2 table catalogs. The tests that cannot run for all supported + * catalogs are located in more specific test suites: + * + * - V2 table catalog tests: + * `org.apache.spark.sql.execution.command.v2.AlterTableSetLocationSuite` + * - V1 table catalog tests: + * `org.apache.spark.sql.execution.command.v1.AlterTableSetLocationSuiteBase` + * - V1 In-Memory catalog: + * `org.apache.spark.sql.execution.command.v1.AlterTableSetLocationSuite` + * - V1 Hive External catalog: + * `org.apache.spark.sql.hive.execution.command.AlterTableSetLocationSuite` + */ +trait AlterTableSetLocationSuiteBase extends QueryTest with DDLCommandTestUtils { + override val command = "ALTER TABLE .. SET LOCATION" +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index c4a787cb891..c405bf046b3 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -313,10 +313,6 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase { protected val reversedProperties = Seq(PROP_OWNER) - test("alter table: set location (datasource table)") { - testSetLocation(isDatasourceTable = true) - } - test("alter table: set properties (datasource table)") { testSetProperties(isDatasourceTable = true) } @@ -1029,70 +1025,6 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase { assert(getProps == Map("x" -> "y")) } - protected def testSetLocation(isDatasourceTable: Boolean): Unit = { - if (!isUsingHiveMetastore) { - assert(isDatasourceTable, "InMemoryCatalog only supports data source tables") - } - val catalog = spark.sessionState.catalog - val tableIdent = TableIdentifier("tab1", Some("dbx")) - val partSpec = Map("a" -> "1", "b" -> "2") - createDatabase(catalog, "dbx") - createTable(catalog, tableIdent, isDatasourceTable) - createTablePartition(catalog, partSpec, tableIdent) - assert(catalog.getTableMetadata(tableIdent).storage.locationUri.isDefined) - assert(normalizeSerdeProp(catalog.getTableMetadata(tableIdent).storage.properties).isEmpty) - assert(catalog.getPartition(tableIdent, partSpec).storage.locationUri.isDefined) - assert( - normalizeSerdeProp(catalog.getPartition(tableIdent, partSpec).storage.properties).isEmpty) - - // Verify that the location is set to the expected string - def verifyLocation(expected: URI, spec: Option[TablePartitionSpec] = None): Unit = { - val storageFormat = spec - .map { s => catalog.getPartition(tableIdent, s).storage } - .getOrElse { catalog.getTableMetadata(tableIdent).storage } - // TODO(gatorsmile): fix the bug in alter table set location. - // if (isUsingHiveMetastore) { - // assert(storageFormat.properties.get("path") === expected) - // } - assert(storageFormat.locationUri === - Some(makeQualifiedPath(CatalogUtils.URIToString(expected)))) - } - // set table location - sql("ALTER TABLE dbx.tab1 SET LOCATION '/path/to/your/lovely/heart'") - verifyLocation(new URI("/path/to/your/lovely/heart")) - // set table partition location - sql("ALTER TABLE dbx.tab1 PARTITION (a='1', b='2') SET LOCATION '/path/to/part/ways'") - verifyLocation(new URI("/path/to/part/ways"), Some(partSpec)) - // set location for partition spec in the upper case - withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { - sql("ALTER TABLE dbx.tab1 PARTITION (A='1', B='2') SET LOCATION '/path/to/part/ways2'") - verifyLocation(new URI("/path/to/part/ways2"), Some(partSpec)) - } - withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { - val errMsg = intercept[AnalysisException] { - sql("ALTER TABLE dbx.tab1 PARTITION (A='1', B='2') SET LOCATION '/path/to/part/ways3'") - }.getMessage - assert(errMsg.contains("not a valid partition column")) - } - // set table location without explicitly specifying database - catalog.setCurrentDatabase("dbx") - sql("ALTER TABLE tab1 SET LOCATION '/swanky/steak/place'") - verifyLocation(new URI("/swanky/steak/place")) - // set table partition location without explicitly specifying database - sql("ALTER TABLE tab1 PARTITION (a='1', b='2') SET LOCATION 'vienna'") - val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("tab1")) - val viennaPartPath = new Path(new Path(table. location), "vienna") - verifyLocation(CatalogUtils.stringToURI(viennaPartPath.toString), Some(partSpec)) - // table to alter does not exist - intercept[AnalysisException] { - sql("ALTER TABLE dbx.does_not_exist SET LOCATION '/mister/spark'") - } - // partition to alter does not exist - intercept[AnalysisException] { - sql("ALTER TABLE dbx.tab1 PARTITION (b='2') SET LOCATION '/mister/spark'") - } - } - protected def testChangeColumn(isDatasourceTable: Boolean): Unit = { if (!isUsingHiveMetastore) { assert(isDatasourceTable, "InMemoryCatalog only supports data source tables") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableSetLocationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableSetLocationSuite.scala new file mode 100644 index 00000000000..a8af349a89b --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableSetLocationSuite.scala @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command.v1 + +import java.net.URI + +import org.apache.hadoop.fs.Path + +import org.apache.spark.sql.AnalysisException +import org.apache.spark.sql.catalyst.TableIdentifier +import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec +import org.apache.spark.sql.catalyst.catalog.CatalogUtils +import org.apache.spark.sql.execution.command +import org.apache.spark.sql.internal.SQLConf + +/** + * This base suite contains unified tests for the `ALTER TABLE .. SET LOCATION` + * command that check V1 table catalogs. The tests that cannot run for all V1 catalogs + * are located in more specific test suites: + * + * - V1 In-Memory catalog: `org.apache.spark.sql.execution.command.v1.AlterTableSetLocationSuite` + * - V1 Hive External catalog: + * `org.apache.spark.sql.hive.execution.command.AlterTableSetLocationSuite` + */ +trait AlterTableSetLocationSuiteBase extends command.AlterTableSetLocationSuiteBase { + + private lazy val sessionCatalog = spark.sessionState.catalog + + protected def buildCreateTableSQL(t: String): String + + private def normalizeSerdeProp(props: Map[String, String]): Map[String, String] = { + props.filterNot(p => Seq("serialization.format", "path").contains(p._1)) + } + + // Verify that the location is set to the expected string + private def checkLocation( + tableIdent: TableIdentifier, + expected: URI, + spec: Option[TablePartitionSpec] = None): Unit = { + val storageFormat = spec + .map { s => sessionCatalog.getPartition(tableIdent, s).storage } + .getOrElse { sessionCatalog.getTableMetadata(tableIdent).storage } + assert(storageFormat.locationUri === + Some(makeQualifiedPath(CatalogUtils.URIToString(expected)))) + } + + test("alter table set location") { + withNamespaceAndTable("ns", "tbl") { t => + sql(buildCreateTableSQL(t)) + sql(s"INSERT INTO $t PARTITION (a = '1', b = '2') SELECT 1, 'abc'") + + val tableIdent = TableIdentifier("tbl", Some("ns")) + val partSpec = Map("a" -> "1", "b" -> "2") + + val catalogTable = sessionCatalog.getTableMetadata(tableIdent) + assert(catalogTable.storage.locationUri.isDefined) + assert(normalizeSerdeProp(catalogTable.storage.properties).isEmpty) + + val catalogTablePartition = sessionCatalog.getPartition(tableIdent, partSpec) + assert(catalogTablePartition.storage.locationUri.isDefined) + assert(normalizeSerdeProp(catalogTablePartition.storage.properties).isEmpty) + + // set table location + sql(s"ALTER TABLE $t SET LOCATION '/path/to/your/lovely/heart'") + checkLocation(tableIdent, new URI("/path/to/your/lovely/heart")) + + // set table partition location + sql(s"ALTER TABLE $t PARTITION (a='1', b='2') SET LOCATION '/path/to/part/ways'") + checkLocation(tableIdent, new URI("/path/to/part/ways"), Some(partSpec)) + + // set location for partition spec in the upper case + withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { + sql(s"ALTER TABLE $t PARTITION (A='1', B='2') SET LOCATION '/path/to/part/ways2'") + checkLocation(tableIdent, new URI("/path/to/part/ways2"), Some(partSpec)) + } + withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { + val e = intercept[AnalysisException] { + sql(s"ALTER TABLE $t PARTITION (A='1', B='2') SET LOCATION '/path/to/part/ways3'") + }.getMessage + assert(e.contains("not a valid partition column")) + } + + sessionCatalog.setCurrentDatabase("ns") + // set table location without explicitly specifying database + sql("ALTER TABLE tbl SET LOCATION '/swanky/steak/place'") + checkLocation(tableIdent, new URI("/swanky/steak/place")) + // set table partition location without explicitly specifying database + sql("ALTER TABLE tbl PARTITION (a='1', b='2') SET LOCATION 'vienna'") + val table = sessionCatalog.getTableMetadata(TableIdentifier("tbl")) + val viennaPartPath = new Path(new Path(table.location), "vienna") + checkLocation(tableIdent, CatalogUtils.stringToURI(viennaPartPath.toString), Some(partSpec)) + } + } + + test("table to alter set location does not exist") { + val e = intercept[AnalysisException] { + sql("ALTER TABLE ns.does_not_exist SET LOCATION '/mister/spark'") + } + assert(e.getMessage.contains("Table not found: ns.does_not_exist")) + } + + test("partition to alter set location does not exist") { + withNamespaceAndTable("ns", "tbl") { t => + sql(buildCreateTableSQL(t)) + + sql(s"INSERT INTO $t PARTITION (a = '1', b = '2') SELECT 1, 'abc'") + val e = intercept[AnalysisException] { + sql(s"ALTER TABLE $t PARTITION (b='2') SET LOCATION '/mister/spark'") + } + assert(e.getMessage == "Partition spec is invalid. The spec (b) must match the partition " + + "spec (a, b) defined in table '`spark_catalog`.`ns`.`tbl`'") + } + } +} + +class AlterTableSetLocationSuite extends AlterTableSetLocationSuiteBase with CommandSuiteBase { + + override def buildCreateTableSQL(t: String): String = + s"CREATE TABLE $t (col1 int, col2 string, a int, b int) $defaultUsing PARTITIONED BY (a, b)" +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableSetLocationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableSetLocationSuite.scala new file mode 100644 index 00000000000..babd3bb3714 --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableSetLocationSuite.scala @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command.v2 + +import scala.collection.JavaConverters._ + +import org.apache.spark.sql.AnalysisException +import org.apache.spark.sql.connector.catalog.{Identifier, Table} +import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.CatalogHelper +import org.apache.spark.sql.connector.catalog.CatalogV2Util.withDefaultOwnership +import org.apache.spark.sql.execution.command + +/** + * The class contains tests for the `ALTER TABLE .. SET LOCATION` command to + * check V2 table catalogs. + */ +class AlterTableSetLocationSuite + extends command.AlterTableSetLocationSuiteBase with CommandSuiteBase { + + private def getTableMetadata(tableName: String): Table = { + val nameParts = spark.sessionState.sqlParser.parseMultipartIdentifier(tableName) + val v2Catalog = spark.sessionState.catalogManager.catalog(nameParts.head).asTableCatalog + val namespace = nameParts.drop(1).init.toArray + v2Catalog.loadTable(Identifier.of(namespace, nameParts.last)) + } + + test("alter table set location") { + withNamespaceAndTable("ns", "tbl") { t => + sql(s"CREATE TABLE $t (id int) USING foo") + sql(s"ALTER TABLE $t SET LOCATION 's3://bucket/path'") + + val table = getTableMetadata(t) + + assert(table.name === t) + assert(table.properties === withDefaultOwnership( + Map("provider" -> "foo", "location" -> "s3://bucket/path")).asJava) + } + } + + test("alter table set partition location") { + withNamespaceAndTable("ns", "tbl") { t => + sql(s"CREATE TABLE $t (id int) USING foo") + + val e = intercept[AnalysisException] { + sql(s"ALTER TABLE $t PARTITION(ds='2017-06-10') SET LOCATION 's3://bucket/path'") + } + assert(e.getMessage.contains( + "ALTER TABLE SET LOCATION does not support partition for v2 tables")) + } + } +} diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index 87773d502ae..363db9badaf 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -135,10 +135,6 @@ class HiveCatalogedDDLSuite extends DDLSuite with TestHiveSingleton with BeforeA ) } - test("alter table: set location") { - testSetLocation(isDatasourceTable = false) - } - test("alter table: set properties") { testSetProperties(isDatasourceTable = false) } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableSetLocationSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableSetLocationSuite.scala new file mode 100644 index 00000000000..c6ca65ac2bb --- /dev/null +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableSetLocationSuite.scala @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.hive.execution.command + +import org.apache.spark.sql.execution.command.v1 + +/** + * The class contains tests for the `ALTER TABLE .. SET LOCATION` command to check + * V1 Hive external table catalog. + */ +class AlterTableSetLocationSuite extends v1.AlterTableSetLocationSuiteBase with CommandSuiteBase { + + override def buildCreateTableSQL(t: String): String = { + s"""CREATE TABLE $t (col1 int, col2 string, a int, b int) + |PARTITIONED BY (a, b) + |ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' + |STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' + |OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'""".stripMargin + } +} --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org