Kejian-Li commented on a change in pull request #3947: URL: https://github.com/apache/carbondata/pull/3947#discussion_r494964752
########## File path: integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/TestInsertAndOtherCommandConcurrent.scala ########## @@ -170,17 +146,19 @@ class TestInsertAndOtherCommandConcurrent extends QueryTest with BeforeAndAfterA } test("alter rename table should fail if insert overwrite is in progress") { - val future = runSqlAsync("insert overwrite table orders select * from orders_overwrite") + sql("drop table if exists other_orders") + val future = AsyncExecutorUtils.runSqlAsync("insert overwrite table orders select * from orders_overwrite") val ex = intercept[ConcurrentOperationException] { - sql("alter table orders rename to other") + sql("alter table orders rename to other_orders") Review comment: done ########## File path: integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/TestInsertAndOtherCommandConcurrent.scala ########## @@ -92,54 +77,46 @@ class TestInsertAndOtherCommandConcurrent extends QueryTest with BeforeAndAfterA private def createTable(tableName: String, schema: StructType): Unit = { val schemaString = schema.fields.map(x => x.name + " " + x.dataType.typeName).mkString(", ") sql(s"CREATE TABLE $tableName ($schemaString) stored as carbondata tblproperties" + - s"('sort_scope'='local_sort','sort_columns'='o_country,o_name,o_phonetype,o_serialname," + - s"o_comment')") - } - - override def afterAll { - executorService.shutdownNow() - dropTable() + s"('sort_scope'='local_sort','sort_columns'='o_country,o_name,o_phonetype,o_serialname," + Review comment: done ########## File path: integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/TestInsertAndOtherCommandConcurrent.scala ########## @@ -68,8 +53,8 @@ class TestInsertAndOtherCommandConcurrent extends QueryTest with BeforeAndAfterA .mode(SaveMode.Overwrite) .save() - sql(s"insert into orders select * from temp_table") - sql(s"insert into orders_overwrite select * from temp_table") + sql(s"insert into orders select * from temp_table") // load_0 success + sql(s"insert into orders_overwrite select * from temp_table") // load_0 success Review comment: done ########## File path: integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/TestInsertAndOtherCommandConcurrent.scala ########## @@ -68,8 +53,8 @@ class TestInsertAndOtherCommandConcurrent extends QueryTest with BeforeAndAfterA .mode(SaveMode.Overwrite) .save() - sql(s"insert into orders select * from temp_table") - sql(s"insert into orders_overwrite select * from temp_table") + sql(s"insert into orders select * from temp_table") // load_0 success Review comment: done ########## File path: integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala ########## @@ -18,21 +18,18 @@ package org.apache.carbondata.spark.testsuite.iud import java.io.File -import org.apache.spark.sql.hive.CarbonRelation -import org.apache.spark.sql.test.SparkTestQueryExecutor -import org.apache.spark.sql.test.util.QueryTest -import org.apache.spark.sql.{CarbonEnv, Row, SaveMode} -import org.scalatest.BeforeAndAfterAll - import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException import org.apache.carbondata.core.constants.CarbonCommonConstants -import org.apache.carbondata.core.index.Segment import org.apache.carbondata.core.datastore.filesystem.{CarbonFile, CarbonFileFilter} import org.apache.carbondata.core.datastore.impl.FileFactory import org.apache.carbondata.core.mutate.CarbonUpdateUtil -import org.apache.carbondata.core.util.CarbonProperties -import org.apache.carbondata.core.util.CarbonUtil import org.apache.carbondata.core.util.path.CarbonTablePath +import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil} +import org.apache.spark.sql.hive.CarbonRelation Review comment: done ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org