Github user kunal642 commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/1856#discussion_r174059512
  
    --- Diff: 
integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/timeseries/TestTimeSeriesCreateTable.scala
 ---
    @@ -354,21 +390,137 @@ class TestTimeSeriesCreateTable extends QueryTest 
with BeforeAndAfterAll {
         checkExistence(sql("DESC FORMATTED mainTable_agg1"), true, 
"maintable_age_sum")
       }
     
    +  test("test timeseries create table 32: should support if not exists, 
create when same table not exists") {
    +    sql("DROP DATAMAP IF EXISTS agg1_year ON TABLE mainTable")
    +    sql(
    +      s"""
    +         |CREATE DATAMAP if not exists agg1_year ON TABLE mainTable
    +         |USING '$timeSeries'
    +         |DMPROPERTIES (
    +         |   'event_time'='dataTime',
    +         |   'YEAR_GRANULARITY'='1')
    +         |AS SELECT dataTime, SUM(age) FROM mainTable
    +         |GROUP BY dataTime
    +        """.stripMargin)
    +    checkExistence(sql("SHOW DATAMAP ON TABLE mainTable"), true, 
"agg1_year")
    +    checkExistence(sql("DESC FORMATTED mainTable_agg1_year"), true, 
"maintable_age_sum")
    +  }
    +
       test("test timeseries create table 20: don't support 'create datamap if 
exists'") {
         val e: Exception = intercept[AnalysisException] {
           sql(
             s"""CREATE DATAMAP IF EXISTS agg2 ON TABLE mainTable
    -          | USING '$timeSeries'
    -          | DMPROPERTIES (
    -          |   'EVENT_TIME'='dataTime',
    -          |   'MONTH_GRANULARITY'='1')
    -          | AS SELECT dataTime, SUM(age) FROM mainTable
    -          | GROUP BY dataTime
    +           | USING '$timeSeries'
    +           | DMPROPERTIES (
    +           |   'EVENT_TIME'='dataTime',
    +           |   'MONTH_GRANULARITY'='1')
    +           | AS SELECT dataTime, SUM(age) FROM mainTable
    +           | GROUP BY dataTime
             """.stripMargin)
         }
         assert(e.getMessage.contains("identifier matching regex"))
       }
     
    +  test("test timeseries create table 26: test different data type") {
    +    sql("drop table if exists dataTable")
    +    sql(
    +      s"""
    +         | CREATE TABLE dataTable(
    +         | shortField SHORT,
    +         | booleanField BOOLEAN,
    +         | intField INT,
    +         | bigintField LONG,
    +         | doubleField DOUBLE,
    +         | stringField STRING,
    +         | decimalField DECIMAL(18,2),
    +         | charField CHAR(5),
    +         | floatField FLOAT,
    +         | dataTime timestamp
    +         | )
    +         | STORED BY 'carbondata'
    +       """.stripMargin)
    +
    +
    +    sql(
    +      s"""CREATE DATAMAP agg0_hour ON TABLE dataTable
    +         | USING '$timeSeries'
    +         | DMPROPERTIES (
    +         |   'event_time'='dataTime',
    +         |   'HOUR_GRANULARITY'='1')
    +         | AS SELECT
    +         |   dataTime,
    +         |   SUM(intField),
    +         |   shortField,
    +         |   booleanField,
    +         |   intField,
    +         |   bigintField,
    +         |   doubleField,
    +         |   stringField,
    +         |   decimalField,
    +         |   charField,
    +         |   floatField
    +         | FROM dataTable
    +         | GROUP BY
    +         |   dataTime,
    +         |   shortField,
    +         |   booleanField,
    +         |   intField,
    +         |   bigintField,
    +         |   doubleField,
    +         |   stringField,
    +         |   decimalField,
    +         |   charField,
    +         |   floatField
    +        """.stripMargin)
    +    checkExistence(sql("SHOW DATAMAP ON TABLE dataTable"), true, 
"datatable_agg0_hour")
    +    sql("DROP TABLE IF EXISTS dataTable")
    +  }
    +
    +  test("test timeseries create table 27: test data map name") {
    --- End diff --
    
    same as test timeseries create table 32. Please remove. avoid adding 
duplicate test cases


---

Reply via email to