[CARBONDATA-2089]SQL exception is masked due to assert(false) inside try catch and exception block always asserting true
Correct all SDV testcase to use intercept exception This closes #1871 Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/3dff273b Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/3dff273b Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/3dff273b Branch: refs/heads/fgdatamap Commit: 3dff273b4f1308fa76a91f6f22bb40eb2d2d9553 Parents: b2139ca Author: Raghunandan S <carbondatacontributi...@gmail.com> Authored: Sat Jan 27 20:49:47 2018 +0530 Committer: Jacky Li <jacky.li...@qq.com> Committed: Wed Jan 31 19:28:09 2018 +0800 ---------------------------------------------------------------------- .../sdv/generated/AlterTableTestCase.scala | 250 ++++++--------- .../sdv/generated/BatchSortLoad1TestCase.scala | 39 +-- .../sdv/generated/BatchSortLoad2TestCase.scala | 32 +- .../sdv/generated/BatchSortQueryTestCase.scala | 290 +++-------------- .../sdv/generated/BucketingTestCase.scala | 12 +- .../sdv/generated/ColumndictTestCase.scala | 60 +--- .../sdv/generated/DataLoadingIUDTestCase.scala | 318 ++++++++----------- .../sdv/generated/DataLoadingTestCase.scala | 7 +- .../sdv/generated/InvertedindexTestCase.scala | 14 +- .../sdv/generated/OffheapQuery1TestCase.scala | 287 +++-------------- .../sdv/generated/OffheapQuery2TestCase.scala | 286 +++-------------- .../sdv/generated/OffheapSort1TestCase.scala | 10 +- .../sdv/generated/OffheapSort2TestCase.scala | 10 +- .../sdv/generated/PartitionTestCase.scala | 71 ++--- .../sdv/generated/SinglepassTestCase.scala | 76 ++--- 15 files changed, 423 insertions(+), 1339 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/carbondata/blob/3dff273b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala ---------------------------------------------------------------------- diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala index b1a0f34..8899f5c 100644 --- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala +++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala @@ -120,141 +120,107 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll { //Check alter table when the altered name is already present in the database test("RenameTable_001_08", Include) { - try { - sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect - sql(s"""insert into test1 select 'xx',1""").collect - sql(s"""create table test2 (name string, id int) stored by 'carbondata'""").collect + intercept[Exception] { + sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect + sql(s"""insert into test1 select 'xx',1""").collect + sql(s"""create table test2 (name string, id int) stored by 'carbondata'""").collect sql(s"""alter table test1 RENAME TO test2""").collect - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table if exists test1""").collect - sql(s"""drop table if exists test2""").collect + + sql(s"""drop table if exists test1""").collect + sql(s"""drop table if exists test2""").collect } //Check alter table when the altered name is given multiple times test("RenameTable_001_09", Include) { - try { - sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect - sql(s"""insert into test1 select 'xx',1""").collect + intercept[Exception] { + sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect + sql(s"""insert into test1 select 'xx',1""").collect sql(s"""alter table test1 RENAME TO test2 test3""").collect - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table if exists test1""").collect + sql(s"""drop table if exists test1""").collect } //Check delete column for dimension column test("DeleteCol_001_01", Include) { - try { - sql(s"""create table test1 (name string, id int) stored by 'carbondata' TBLPROPERTIES('DICTIONARY_INCLUDE'='id') """).collect - sql(s"""insert into test1 select 'xx',1""").collect - sql(s"""alter table test1 drop columns (name)""").collect + intercept[Exception] { + sql(s"""create table test1 (name string, id int) stored by 'carbondata' TBLPROPERTIES('DICTIONARY_INCLUDE'='id') """).collect + sql(s"""insert into test1 select 'xx',1""").collect + sql(s"""alter table test1 drop columns (name)""").collect sql(s"""select name from test1""").collect - - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table if exists test1""").collect + sql(s"""drop table if exists test1""").collect } //Check delete column for measure column test("DeleteCol_001_02", Include) { - try { - sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect - sql(s"""insert into test1 select 'xx',1""").collect - sql(s"""alter table test1 drop columns (id)""").collect + intercept[Exception] { + sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect + sql(s"""insert into test1 select 'xx',1""").collect + sql(s"""alter table test1 drop columns (id)""").collect sql(s"""select id from test1""").collect - - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table if exists test1""").collect + sql(s"""drop table if exists test1""").collect } //Check delete column for measure and dimension column test("DeleteCol_001_03", Include) { - try { - sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata'""").collect - sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect - sql(s"""alter table test1 drop columns (id,name)""").collect + intercept[Exception] { + sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata'""").collect + sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect + sql(s"""alter table test1 drop columns (id,name)""").collect sql(s"""select id,name from test1""").collect - - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table if exists test1""").collect + sql(s"""drop table if exists test1""").collect } //Check delete column for multiple column test("DeleteCol_001_04", Include) { - try { - sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata' TBLPROPERTIES('DICTIONARY_INCLUDE'='id')""").collect - sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect - sql(s"""alter table test1 drop columns (name, upd_time)""").collect + intercept[Exception] { + sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata' TBLPROPERTIES('DICTIONARY_INCLUDE'='id')""").collect + sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect + sql(s"""alter table test1 drop columns (name, upd_time)""").collect sql(s"""select name, upd_time from test1""").collect - - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table if exists test1""").collect + sql(s"""drop table if exists test1""").collect } //Check delete column for all columns test("DeleteCol_001_05", Include) { - try { - sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata'""").collect - sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect - sql(s"""alter table test1 drop columns (name, upd_time, country,id)""").collect - assert(false) - } catch { - case _ => assert(true) - } - sql(s"""drop table if exists test1""").collect + sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata'""").collect + sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect + sql(s"""alter table test1 drop columns (name, upd_time, country,id)""").collect + sql(s"""drop table if exists test1""").collect } //Check delete column for include dictionary column test("DeleteCol_001_06", Include) { - try { - sql(s"""create table test1 (name string, id int) stored by 'carbondata' TBLPROPERTIES('DICTIONARY_INCLUDE'='id')""").collect - sql(s"""insert into test1 select 'xx',1""").collect - sql(s"""alter table test1 drop columns (id)""").collect + intercept[Exception] { + sql(s"""create table test1 (name string, id int) stored by 'carbondata' TBLPROPERTIES('DICTIONARY_INCLUDE'='id')""").collect + sql(s"""insert into test1 select 'xx',1""").collect + sql(s"""alter table test1 drop columns (id)""").collect sql(s"""select id from test1""").collect - - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table if exists test1""").collect + sql(s"""drop table if exists test1""").collect } //Check delete column for timestamp column test("DeleteCol_001_08", Include) { - try { - sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata'""").collect - sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect - sql(s"""alter table test1 drop columns (upd_time)""").collect + intercept[Exception] { + sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata'""").collect + sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect + sql(s"""alter table test1 drop columns (upd_time)""").collect sql(s"""select upd_time from test1""").collect - - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table if exists test1""").collect + sql(s"""drop table if exists test1""").collect } @@ -272,17 +238,13 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll { //Check the drop of added column will remove the column from table test("DeleteCol_001_09_2", Include) { - try { + intercept[Exception] { sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata'""").collect sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect sql(s"""alter table test1 add columns (name2 string)""").collect sql(s"""insert into test1 select 'xx','yy',current_timestamp,1,'abc'""").collect sql(s"""alter table test1 drop columns (name2)""").collect sql(s"""select count(id) from test1 where name2 = 'abc'""").collect - - assert(false) - } catch { - case _ => assert(true) } sql(s"""drop table if exists test1""").collect } @@ -451,16 +413,13 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll { //Check add column with option default value is given for an existing column test("AddColumn_001_14", Include) { - try { + intercept[Exception] { sql(s"""drop table if exists test1""").collect sql(s"""create table test1 (name string) stored by 'carbondata'""").collect sql(s"""insert into test1 select 'xx'""").collect sql(s"""ALTER TABLE test1 ADD COLUMNS (Id int) TBLPROPERTIES('DICTIONARY_INCLUDE'='id','default.value.name'='yy')""").collect - assert(false) - sql(s"""drop table if exists test1""").collect - } catch { - case _ => assert(true) } + sql(s"""drop table if exists test1""").collect } @@ -489,17 +448,14 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll { //check drop table after table rename using old name test("DropTable_001_02", Include) { - try { + intercept[Exception] { sql(s"""drop table if exists test1""").collect - sql(s"""create table test1 (name string, price decimal(3,2)) stored by 'carbondata'""").collect - sql(s"""insert into test1 select 'xx',1.2""").collect - sql(s"""alter table test1 rename to test2""").collect + sql(s"""create table test1 (name string, price decimal(3,2)) stored by 'carbondata'""").collect + sql(s"""insert into test1 select 'xx',1.2""").collect + sql(s"""alter table test1 rename to test2""").collect sql(s"""drop table test1""").collect - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table if exists test2""").collect + sql(s"""drop table if exists test2""").collect } @@ -734,15 +690,12 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll { //Check show segments on old table After altering the Table name. test("Showsegme_001_01", Include) { - try { - sql(s"""create table test1 (country string, id int) stored by 'carbondata'""").collect - sql(s"""alter table test1 rename to test2""").collect + intercept[Exception] { + sql(s"""create table test1 (country string, id int) stored by 'carbondata'""").collect + sql(s"""alter table test1 rename to test2""").collect sql(s"""show segments for table test1""").collect - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table if exists test2""").collect + sql(s"""drop table if exists test2""").collect } @@ -828,65 +781,53 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll { //Check vertical compaction when all segments are created before drop column, check dropped column is not used in the compation test("Compaction_001_06", Include) { - try { - sql(s"""drop table if exists test1""").collect - sql(s"""drop table if exists test2""").collect - sql(s"""create table test1(name string, country string, id int) stored by 'carbondata'""").collect - sql(s"""insert into test1 select 'xx','china',1""").collect - sql(s"""insert into test1 select 'xe','china',2""").collect - sql(s"""insert into test1 select 'xe','china',3""").collect - sql(s"""alter table test1 drop columns (country)""").collect - sql(s"""alter table test1 compact 'minor'""").collect + intercept[Exception] { + sql(s"""drop table if exists test1""").collect + sql(s"""drop table if exists test2""").collect + sql(s"""create table test1(name string, country string, id int) stored by 'carbondata'""").collect + sql(s"""insert into test1 select 'xx','china',1""").collect + sql(s"""insert into test1 select 'xe','china',2""").collect + sql(s"""insert into test1 select 'xe','china',3""").collect + sql(s"""alter table test1 drop columns (country)""").collect + sql(s"""alter table test1 compact 'minor'""").collect sql(s"""select country from test1 where country='china'""").collect - - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table if exists test1""").collect + sql(s"""drop table if exists test1""").collect } //Check vertical compaction when some of the segments are created before drop column, check dropped column is not used in the compation test("Compaction_001_07", Include) { - try { - sql(s"""drop table if exists test1""").collect - sql(s"""drop table if exists test2""").collect - sql(s"""create table test1(name string, country string, id int) stored by 'carbondata'""").collect - sql(s"""insert into test1 select 'xx','china',1""").collect - sql(s"""insert into test1 select 'xe','china',2""").collect - sql(s"""alter table test1 drop columns (country)""").collect - sql(s"""insert into test1 select 'xe',3""").collect - sql(s"""alter table test1 compact 'minor'""").collect + intercept[Exception] { + sql(s"""drop table if exists test1""").collect + sql(s"""drop table if exists test2""").collect + sql(s"""create table test1(name string, country string, id int) stored by 'carbondata'""").collect + sql(s"""insert into test1 select 'xx','china',1""").collect + sql(s"""insert into test1 select 'xe','china',2""").collect + sql(s"""alter table test1 drop columns (country)""").collect + sql(s"""insert into test1 select 'xe',3""").collect + sql(s"""alter table test1 compact 'minor'""").collect sql(s"""select country from test1 where country='china'""").collect - - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table if exists test1""").collect + sql(s"""drop table if exists test1""").collect } //Check vertical compaction for multiple drop column, check dropped column is not used in the compation test("Compaction_001_08", Include) { - try { - sql(s"""drop table if exists test1""").collect - sql(s"""drop table if exists test2""").collect - sql(s"""create table test1(name string, country string, id int) stored by 'carbondata'""").collect - sql(s"""insert into test1 select 'xx','china',1""").collect - sql(s"""alter table test1 drop columns (country)""").collect - sql(s"""insert into test1 select 'xe',3""").collect - sql(s"""alter table test1 drop columns (id)""").collect - sql(s"""insert into test1 select 'xe'""").collect - sql(s"""alter table test1 compact 'minor'""").collect + intercept[Exception] { + sql(s"""drop table if exists test1""").collect + sql(s"""drop table if exists test2""").collect + sql(s"""create table test1(name string, country string, id int) stored by 'carbondata'""").collect + sql(s"""insert into test1 select 'xx','china',1""").collect + sql(s"""alter table test1 drop columns (country)""").collect + sql(s"""insert into test1 select 'xe',3""").collect + sql(s"""alter table test1 drop columns (id)""").collect + sql(s"""insert into test1 select 'xe'""").collect + sql(s"""alter table test1 compact 'minor'""").collect sql(s"""select country from test1 where id=1""").collect - - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table if exists test1""").collect + sql(s"""drop table if exists test1""").collect } @@ -989,17 +930,14 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll { //Check delete segment is not allowed on old table name when table name is altered test("DeleteSeg_001_01", Include) { - try { - sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect - sql(s"""insert into test1 select 'xx',1""").collect - sql(s"""insert into test1 select 'xx',12""").collect - sql(s"""alter table test1 rename to test2""").collect + intercept[Exception] { + sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect + sql(s"""insert into test1 select 'xx',1""").collect + sql(s"""insert into test1 select 'xx',12""").collect + sql(s"""alter table test1 rename to test2""").collect sql(s"""delete from table test1 where segment.id in (0)""").collect - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table if exists test2""").collect + sql(s"""drop table if exists test2""").collect } http://git-wip-us.apache.org/repos/asf/carbondata/blob/3dff273b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad1TestCase.scala ---------------------------------------------------------------------- diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad1TestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad1TestCase.scala index 9eb5dec..d301218 100644 --- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad1TestCase.scala +++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad1TestCase.scala @@ -68,27 +68,21 @@ class BatchSortLoad1TestCase extends QueryTest with BeforeAndAfterAll { //To load data after setting sort scope and sort size in carbon property file without folder path in load test("Batch_sort_Loading_001-01-01-01_001-TC_004", Include) { - try { - sql(s"""CREATE TABLE uniqdata13 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect + intercept[Exception] { + sql(s"""CREATE TABLE uniqdata13 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect sql(s"""LOAD DATA into table uniqdata13 OPTIONS('DELIMITER'=',' , 'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table uniqdata13""").collect + sql(s"""drop table uniqdata13""").collect } //To load data after setting sort scope and sort size in carbon property file without table_name in load test("Batch_sort_Loading_001-01-01-01_001-TC_005", Include) { - try { - sql(s"""CREATE TABLE uniqdata14 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect + intercept[Exception] { + sql(s"""CREATE TABLE uniqdata14 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table OPTIONS('DELIMITER'=',' , 'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table uniqdata14""").collect + sql(s"""drop table uniqdata14""").collect } @@ -232,14 +226,11 @@ class BatchSortLoad1TestCase extends QueryTest with BeforeAndAfterAll { //To load data after setting sort scope and sort size in carbon property file with ALL_DICTIONARY_PATH test("Batch_sort_Loading_001-01-01-01_001-TC_019", Include) { sql(s"""drop table if exists t3""").collect - try { + intercept[Exception] { sql(s"""CREATE TABLE t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/batchsort/data.csv' into table t3 options('ALL_DICTIONARY_PATH'='resourcesPath/Data/batchsort/data.dictionary')""").collect - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table t3""").collect + sql(s"""drop table t3""").collect } @@ -260,22 +251,16 @@ class BatchSortLoad1TestCase extends QueryTest with BeforeAndAfterAll { //To check sort_scope option with a wrong value test("Batch_sort_Loading_001-01-01-01_001-TC_023", Include) { - try { - sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='ABCXYZ')""").collect - assert(false) - } catch { - case _ => assert(true) + intercept[Exception] { + sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='ABCXYZ')""").collect } } //To check sort_scope option with null value test("Batch_sort_Loading_001-01-01-01_001-TC_024", Include) { - try { - sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='null')""").collect - assert(false) - } catch { - case _ => assert(true) + intercept[Exception] { + sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='null')""").collect } } http://git-wip-us.apache.org/repos/asf/carbondata/blob/3dff273b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad2TestCase.scala ---------------------------------------------------------------------- diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad2TestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad2TestCase.scala index 5fa6594..d3ff6aa 100644 --- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad2TestCase.scala +++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad2TestCase.scala @@ -69,27 +69,21 @@ class BatchSortLoad2TestCase extends QueryTest with BeforeAndAfterAll { //To load data after setting only sort scope in carbon property file without folder path in load test("Batch_sort_Loading_001-01-01-01_001-TC_030", Include) { - try { - sql(s"""CREATE TABLE uniqdata13 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect + intercept[Exception] { + sql(s"""CREATE TABLE uniqdata13 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect sql(s"""LOAD DATA into table uniqdata13 OPTIONS('DELIMITER'=',' , 'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table uniqdata13""").collect + sql(s"""drop table uniqdata13""").collect } //To load data after setting only sort scope in carbon property file without table_name in load test("Batch_sort_Loading_001-01-01-01_001-TC_031", Include) { - try { - sql(s"""CREATE TABLE uniqdata14 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect + intercept[Exception] { + sql(s"""CREATE TABLE uniqdata14 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table OPTIONS('DELIMITER'=',' , 'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table uniqdata14""").collect + sql(s"""drop table uniqdata14""").collect } @@ -255,12 +249,9 @@ class BatchSortLoad2TestCase extends QueryTest with BeforeAndAfterAll { //To check sort_scope option with a wrong value test("Batch_sort_Loading_001-01-01-01_001-TC_049", Include) { - try { + intercept[Exception] { sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' , 'SORT_SCOPE'='ABCXYZ',âSINGLE_PASSâ=âtrueâ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect - assert(false) - } catch { - case _ => assert(true) } sql(s"""drop table uniqdata20a""").collect } @@ -268,14 +259,11 @@ class BatchSortLoad2TestCase extends QueryTest with BeforeAndAfterAll { //To check sort_scope option with null value test("Batch_sort_Loading_001-01-01-01_001-TC_050", Include) { - try { - sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect + intercept[Exception] { + sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' , 'SORT_SCOPE'='null',âSINGLE_PASSâ=âtrueâ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table uniqdata20a""").collect + sql(s"""drop table uniqdata20a""").collect } val prop = CarbonProperties.getInstance() http://git-wip-us.apache.org/repos/asf/carbondata/blob/3dff273b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortQueryTestCase.scala ---------------------------------------------------------------------- diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortQueryTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortQueryTestCase.scala index cdebf51..11b060a 100644 --- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortQueryTestCase.scala +++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortQueryTestCase.scala @@ -44,15 +44,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check select query with limit as string test("Batch_sort_Querying_001-01-01-01_001-TC_002", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 limit """"").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -110,57 +104,33 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check where clause with OR and no operand test("Batch_sort_Querying_001-01-01-01_001-TC_009", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id > 1 OR """).collect - - assert(false) - } catch { - case _ => assert(true) } - } //To check OR clause with LHS and RHS having no arguments test("Batch_sort_Querying_001-01-01-01_001-TC_010", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where OR """).collect - - assert(false) - } catch { - case _ => assert(true) } - } //To check OR clause with LHS having no arguments test("Batch_sort_Querying_001-01-01-01_001-TC_011", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where OR cust_id > "1"""").collect - - assert(false) - } catch { - case _ => assert(true) } - } //To check incorrect query test("Batch_sort_Querying_001-01-01-01_001-TC_013", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id > 0 OR name """).collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -229,15 +199,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check select count and distinct query execution test("Batch_sort_Querying_001-01-01-01_001-TC_021", Include) { - try { - + intercept[Exception] { sql(s"""select count(cust_id),distinct(cust_name) from uniqdataquery1""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -279,15 +243,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check query execution with IN operator without paranthesis test("Batch_sort_Querying_001-01-01-01_001-TC_027", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id IN 9000,9005""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -302,15 +260,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check query execution with IN operator with out specifying any field. test("Batch_sort_Querying_001-01-01-01_001-TC_029", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where IN(1,2)""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -352,15 +304,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check AND with using booleans in invalid syntax test("Batch_sort_Querying_001-01-01-01_001-TC_034", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where AND true""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -384,15 +330,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check AND using 0 and 1 treated as boolean values test("Batch_sort_Querying_001-01-01-01_001-TC_037", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where true aNd 0""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -416,29 +356,17 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check '='operator without Passing any value test("Batch_sort_Querying_001-01-01-01_001-TC_040", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id=""").collect - - assert(false) - } catch { - case _ => assert(true) } - } //To check '='operator without Passing columnname and value. test("Batch_sort_Querying_001-01-01-01_001-TC_041", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where =""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -453,15 +381,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check '!='operator by keeping space between them test("Batch_sort_Querying_001-01-01-01_001-TC_043", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id ! = 9001""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -476,29 +398,17 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check '!='operator without providing any value test("Batch_sort_Querying_001-01-01-01_001-TC_045", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id != """).collect - - assert(false) - } catch { - case _ => assert(true) } - } //To check '!='operator without providing any column name test("Batch_sort_Querying_001-01-01-01_001-TC_046", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where != false""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -540,43 +450,25 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check 'NOT' operator in nested way test("Batch_sort_Querying_001-01-01-01_001-TC_051", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id NOT (NOT(true))""").collect - - assert(false) - } catch { - case _ => assert(true) } - } //To check 'NOT' operator with parenthesis. test("Batch_sort_Querying_001-01-01-01_001-TC_052", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id NOT ()""").collect - - assert(false) - } catch { - case _ => assert(true) } - } //To check 'NOT' operator without condition. test("Batch_sort_Querying_001-01-01-01_001-TC_053", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id NOT""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -591,29 +483,17 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check '>' operator without specifying column test("Batch_sort_Querying_001-01-01-01_001-TC_055", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where > 20""").collect - - assert(false) - } catch { - case _ => assert(true) } - } //To check '>' operator without specifying value test("Batch_sort_Querying_001-01-01-01_001-TC_056", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id > """).collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -646,15 +526,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check '<' operator without specifying column test("Batch_sort_Querying_001-01-01-01_001-TC_060", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where < 5""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -678,29 +552,17 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check '<=' operator without specifying column test("Batch_sort_Querying_001-01-01-01_001-TC_063", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where <= 2""").collect - - assert(false) - } catch { - case _ => assert(true) } - } //To check '<=' operator without providing value test("Batch_sort_Querying_001-01-01-01_001-TC_064", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id <= """).collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -715,29 +577,17 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check '<=' operator adding space between'<' and '=' test("Batch_sort_Querying_001-01-01-01_001-TC_066", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id < = 9002""").collect - - assert(false) - } catch { - case _ => assert(true) } - } //To check 'BETWEEN' operator without providing range test("Batch_sort_Querying_001-01-01-01_001-TC_067", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where age between""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -797,29 +647,17 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check 'IS NULL' without providing column test("Batch_sort_Querying_001-01-01-01_001-TC_074", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where Is NulL""").collect - - assert(false) - } catch { - case _ => assert(true) } - } //To check 'IS NOT NULL' without providing column test("Batch_sort_Querying_001-01-01-01_001-TC_075", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where IS NOT NULL""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -852,29 +690,17 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check Limit clause with where condition and no argument test("Batch_sort_Querying_001-01-01-01_001-TC_079", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id=10987 limit""").collect - - assert(false) - } catch { - case _ => assert(true) } - } //To check Limit clause with where condition and decimal argument test("Batch_sort_Querying_001-01-01-01_001-TC_080", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id=10987 limit 0.0""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -927,15 +753,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check Full join test("Batch_sort_Querying_001-01-01-01_001-TC_086", Include) { - try { - + intercept[Exception] { sql(s"""select uniqdataquery1.CUST_ID from uniqdataquery1 FULL JOIN uniqdataquery11 where CUST_ID""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -1022,15 +842,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check SORT using 'AND' on multiple column test("Batch_sort_Querying_001-01-01-01_001-TC_097", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 where cust_id > 10544 sort by cust_name desc and cust_id asc""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -1054,15 +868,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check average aggregate function with no arguments test("Batch_sort_Querying_001-01-01-01_001-TC_100", Include) { - try { - + intercept[Exception] { sql(s"""select cust_id,avg() from uniqdataquery1 group by cust_id""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -1077,15 +885,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check nested average aggregate function test("Batch_sort_Querying_001-01-01-01_001-TC_102", Include) { - try { - + intercept[Exception] { sql(s"""select cust_id,avg(count(cust_id)) from uniqdataquery1 group by cust_id""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -1172,15 +974,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check Order by without column name test("Batch_sort_Querying_001-01-01-01_001-TC_112", Include) { - try { - + intercept[Exception] { sql(s"""select * from uniqdataquery1 order by ASC""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -1222,15 +1018,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check Using window without partition test("Batch_sort_Querying_001-01-01-01_001-TC_117", Include) { - try { - + intercept[Exception] { sql(s"""select cust_name, sum(bigint_column1) OVER w from uniqdataquery1 WINDOW w""").collect - - assert(false) - } catch { - case _ => assert(true) } - } @@ -1245,15 +1035,9 @@ class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll { //To check Using ROLLUP without group by clause test("Batch_sort_Querying_001-01-01-01_001-TC_119", Include) { - try { - + intercept[Exception] { sql(s"""select cust_name from uniqdataquery1 with ROLLUP""").collect - - assert(false) - } catch { - case _ => assert(true) } - sql(s"""drop table uniqdataquery1""").collect + sql(s"""drop table uniqdataquery1""").collect } - } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/carbondata/blob/3dff273b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BucketingTestCase.scala ---------------------------------------------------------------------- diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BucketingTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BucketingTestCase.scala index 78f8945..501b089 100644 --- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BucketingTestCase.scala +++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BucketingTestCase.scala @@ -40,28 +40,20 @@ class BucketingTestCase extends QueryTest with BeforeAndAfterAll { } test("test exception if bucketcolumns be measure column") { - try { + intercept[Exception] { sql("DROP TABLE IF EXISTS bucket_table") sql("CREATE TABLE bucket_table (ID Int, date Timestamp, country String, name String, phonetype String," + "serialname String, salary Int) STORED BY 'carbondata' TBLPROPERTIES " + "('BUCKETNUMBER'='4', 'BUCKETCOLUMNS'='ID')") - assert(false) - } - catch { - case _ => assert(true) } } test("test exception if bucketcolumns be complex data type column") { - try { + intercept[Exception] { sql("DROP TABLE IF EXISTS bucket_table") sql("CREATE TABLE bucket_table (Id int, number double, name string, " + "gamePoint array<double>, mac struct<num:double>) STORED BY 'carbondata' TBLPROPERTIES" + "('BUCKETNUMBER'='4', 'BUCKETCOLUMNS'='gamePoint')") - assert(false) - } - catch { - case _ => assert(true) } } http://git-wip-us.apache.org/repos/asf/carbondata/blob/3dff273b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ColumndictTestCase.scala ---------------------------------------------------------------------- diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ColumndictTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ColumndictTestCase.scala index f702254..c8e8f1b 100644 --- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ColumndictTestCase.scala +++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ColumndictTestCase.scala @@ -55,12 +55,9 @@ class ColumndictTestCase extends QueryTest with BeforeAndAfterAll { //Load using external columndict for CSV having incomplete/wrong data/no data/null data test("Columndict-TC004", Include) { - try { + intercept[Exception] { sql(s"""CREATE TABLE IF NOT EXISTS t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/columndict/data.csv' into table t3 options('COLUMNDICT'='country:$resourcesPath/Data/columndict/inValidData.csv', 'SINGLE_PASS'='true')""").collect - assert(false) - } catch { - case _ => assert(true) } sql(s"""drop table if exists t3""").collect } @@ -197,12 +194,9 @@ class ColumndictTestCase extends QueryTest with BeforeAndAfterAll { //Load using external columndict for table with measure and tableproperties(DICTIONARY_EXCLUDE, DICTIONARY_INCLUDE, BLOCKSIZE) test("Columndict-TC020", Include) { - try { + intercept[Exception] { sql(s"""CREATE TABLE IF NOT EXISTS t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata' TBLPROPERTIES ('TABLE_BLOCKSIZE'= '256 MB','DICTIONARY_EXCLUDE'='country')""").collect sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/columndict/data.csv' into table t3 options('COLUMNDICT'='country:'resourcesPath/Data/columndict/country.csv', 'SINGLE_PASS'='true')""").collect - assert(false) - } catch { - case _ => assert(true) } sql(s"""drop table if exists t3""").collect } @@ -210,12 +204,9 @@ class ColumndictTestCase extends QueryTest with BeforeAndAfterAll { //Columndict parameter name validation ignore("Columndict-TC021", Include) { - try { + intercept[Exception] { sql(s"""CREATE TABLE IF NOT EXISTS t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata' TBLPROPERTIES ('TABLE_BLOCKSIZE'= '256 MB','DICTIONARY_EXCLUDE'='country')""").collect sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/columndict/data.csv' into table t3 options('COLUMNDICT'='countries:$resourcesPath/Data/columndict/country.csv', 'SINGLE_PASS'='true')""").collect - assert(false) - } catch { - case _ => assert(true) } sql(s"""drop table if exists t3""").collect } @@ -223,12 +214,9 @@ class ColumndictTestCase extends QueryTest with BeforeAndAfterAll { //Columndict parameter value validation test("Columndict-TC022", Include) { - try { + intercept[Exception] { sql(s"""CREATE TABLE IF NOT EXISTS t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/columndict/data.csv' into table t3 options('COLUMNDICT'='salary:$resourcesPath/Data/columndict/country.csv', 'SINGLE_PASS'='true')""").collect - assert(false) - } catch { - case _ => assert(true) } sql(s"""drop table if exists t3""").collect } @@ -236,12 +224,9 @@ class ColumndictTestCase extends QueryTest with BeforeAndAfterAll { //Check for data validation in csv(empty/null/wrong data) for all_dictionary_path ignore("Columndict-TC023", Include) { - try { + intercept[Exception] { sql(s"""CREATE TABLE IF NOT EXISTS t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/columndict/inValidData.csv' into table t3 options('ALL_DICTIONARY_PATH'='$resourcesPath/Data/columndict/inValidData.dictionary', 'SINGLE_PASS'='true')""").collect - assert(false) - } catch { - case _ => assert(true) } sql(s"""drop table if exists t3""").collect } @@ -249,12 +234,9 @@ class ColumndictTestCase extends QueryTest with BeforeAndAfterAll { //Check for data validation in csv(empty/null/wrong data) for columndict test("Columndict-TC024", Include) { - try { + intercept[Exception] { sql(s"""CREATE TABLE IF NOT EXISTS t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/columndict/inValidData.csv' into table t3 options('COLUMNDICT'='country:'resourcesPath/Data/columndict/inValidData.csv', 'SINGLE_PASS'='true')""").collect - assert(false) - } catch { - case _ => assert(true) } sql(s"""drop table if exists t3""").collect } @@ -262,12 +244,9 @@ class ColumndictTestCase extends QueryTest with BeforeAndAfterAll { //Check for validation of external all_dictionary_path folder with incorrect path test("Columndict-TC025", Include) { - try { + intercept[Exception] { sql(s"""CREATE TABLE IF NOT EXISTS t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/columndict/inValidData.csv' into table t3 options('ALL_DICTIONARY_PATH'=''resourcesPath/Data/*.dictionary', 'SINGLE_PASS'='true')""").collect - assert(false) - } catch { - case _ => assert(true) } sql(s"""drop table if exists t3""").collect } @@ -275,12 +254,9 @@ class ColumndictTestCase extends QueryTest with BeforeAndAfterAll { //Check for validation of external all_dictionary_path folder with correct path test("Columndict-TC026", Include) { - try { + intercept[Exception] { sql(s"""CREATE TABLE IF NOT EXISTS t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/columndict/inValidData.csv' into table t3 options('ALL_DICTIONARY_PATH'='$resourcesPath/Data/columndict/*.dictionary', 'SINGLE_PASS'='true')""").collect - assert(false) - } catch { - case _ => assert(true) } sql(s"""drop table if exists t3""").collect } @@ -288,12 +264,9 @@ class ColumndictTestCase extends QueryTest with BeforeAndAfterAll { //Check for validation of external columndict folder with correct path test("Columndict-TC027", Include) { - try { + intercept[Exception] { sql(s"""CREATE TABLE IF NOT EXISTS t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/columndict/inValidData.csv' into table t3 options('COLUMNDICT'='country:'resourcesPath/Data/columndict/*.csv', 'SINGLE_PASS'='true')""").collect - assert(false) - } catch { - case _ => assert(true) } sql(s"""drop table if exists t3""").collect } @@ -301,12 +274,9 @@ class ColumndictTestCase extends QueryTest with BeforeAndAfterAll { //Check for validation of external all_dictionary_path file( missing /wrong path / wrong name) test("Columndict-TC028", Include) { - try { + intercept[Exception] { sql(s"""CREATE TABLE IF NOT EXISTS t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/columndict/inValidData.csv' into table t3 options('ALL_DICTIONARY_PATH'=''resourcesPath/Data/columndict/wrongName.dictionary', 'SINGLE_PASS'='true')""").collect - assert(false) - } catch { - case _ => assert(true) } sql(s"""drop table if exists t3""").collect } @@ -314,12 +284,9 @@ class ColumndictTestCase extends QueryTest with BeforeAndAfterAll { //Check for validation of external columndict file( missing /wrong path / wrong name) test("Columndict-TC029", Include) { - try { + intercept[Exception] { sql(s"""CREATE TABLE IF NOT EXISTS t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/columndict/inValidData.csv' into table t3 options('COLUMNDICT'='country:'resourcesPath/Data/columndict/wrongName.csv', 'SINGLE_PASS'='true')""").collect - assert(false) - } catch { - case _ => assert(true) } sql(s"""drop table if exists t3""").collect } @@ -335,12 +302,9 @@ class ColumndictTestCase extends QueryTest with BeforeAndAfterAll { //Check for different dictionary file extensions for columndict test("Columndict-TC031", Include) { - try { + intercept[Exception] { sql(s"""CREATE TABLE IF NOT EXISTS t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/columndict/inValidData.csv' into table t3 options('COLUMNDICT'='country:$resourcesPath/Data/columndict/country.txt', 'SINGLE_PASS'='true')""").collect - assert(false) - } catch { - case _ => assert(true) } sql(s"""drop table if exists t3""").collect }