This is an automated email from the ASF dual-hosted git repository.

yiguolei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 384a0c7aa7 [fix](testcases) Fix some unstable testcases. (#19956)
384a0c7aa7 is described below

commit 384a0c7aa7a48e72d443c8d8f3f99caee2d1e3f9
Author: zclllyybb <[email protected]>
AuthorDate: Wed May 24 09:52:02 2023 +0800

    [fix](testcases) Fix some unstable testcases. (#19956)
    
    case of test_string_concat_extremely_long_string will exceed our test 
limit. Move it to p2 so that it will be tested only in SelectDB test 
environment.
    Because we need to keep consistent with MySQL & avoid overflow. the q67 
must keep its behavior like now. When we fully apply nereids & decimalV3 then, 
it will be fixed automatically.
    In the parallel test, although all query stats were cleaned, the cases run 
in parallel will affect this. So we need to use a unique table for 
query_stats_test
    test_query_sys_tables didn't deal with some unstable situations. fixed it.
    temporarily disable unstable case analyze_test case for p0.
---
 .../nereids_p0/system/test_query_sys_tables.out    |  12 +--
 .../data/query_p0/stats/query_stats_test.out       |   4 +-
 .../pipeline/p0/conf/regression-conf.groovy        |   2 +-
 .../pipeline/p1/conf/regression-conf.groovy        |   2 +
 .../nereids_p0/system/test_query_sys_tables.groovy |  16 ++--
 .../suites/query_p0/stats/query_stats_test.groovy  |  40 +++++---
 .../query_p0/system/test_query_sys_tables.groovy   |  10 +-
 ...test_string_concat_extremely_long_string.groovy |   0
 regression-test/suites/tpcds_sf1_p2/load.groovy    | 102 +++++++++++++++++++++
 9 files changed, 156 insertions(+), 32 deletions(-)

diff --git a/regression-test/data/nereids_p0/system/test_query_sys_tables.out 
b/regression-test/data/nereids_p0/system/test_query_sys_tables.out
index 5e6e5e527d..766a8e29cd 100644
--- a/regression-test/data/nereids_p0/system/test_query_sys_tables.out
+++ b/regression-test/data/nereids_p0/system/test_query_sys_tables.out
@@ -20,14 +20,14 @@ internal    ccc     3       int     int(11) 10
 internal       ddd     4       smallint        smallint(6)     5
 
 -- !schemata --
-internal       test_query_sys_db_1     \N
-internal       test_query_sys_db_2     \N
-internal       test_query_sys_db_3     \N
+internal       test_query_sys_db_4     \N
+internal       test_query_sys_db_5     \N
+internal       test_query_sys_db_6     \N
 
 -- !tables --
-internal       test_query_sys_tb_1     BASE TABLE      0       \N      \N
-internal       test_query_sys_tb_2     BASE TABLE      0       \N      \N
-internal       test_query_sys_tb_3     BASE TABLE      0       \N      \N
+internal       test_query_sys_tb_4     BASE TABLE      0       \N      \N
+internal       test_query_sys_tb_5     BASE TABLE      0       \N      \N
+internal       test_query_sys_tb_6     BASE TABLE      0       \N      \N
 
 -- !session_variables --
 wait_timeout   30000
diff --git a/regression-test/data/query_p0/stats/query_stats_test.out 
b/regression-test/data/query_p0/stats/query_stats_test.out
index 350aa6d608..06d6727253 100644
--- a/regression-test/data/query_p0/stats/query_stats_test.out
+++ b/regression-test/data/query_p0/stats/query_stats_test.out
@@ -32,10 +32,10 @@ k12 0       0
 k13    0       0
 
 -- !sql --
-baseall_stat   2
+stats_table    2
 
 -- !sql --
-baseall_stat   k0      1       1
+stats_table    k0      1       1
        k1      1       0
        k2      1       1
        k3      0       0
diff --git a/regression-test/pipeline/p0/conf/regression-conf.groovy 
b/regression-test/pipeline/p0/conf/regression-conf.groovy
index e86b61a237..dfd0058092 100644
--- a/regression-test/pipeline/p0/conf/regression-conf.groovy
+++ b/regression-test/pipeline/p0/conf/regression-conf.groovy
@@ -48,7 +48,7 @@ testDirectories = ""
 // this groups will not be executed
 excludeGroups = ""
 // this suites will not be executed
-excludeSuites = "test_broker_load,test_spark_load"
+excludeSuites = "test_broker_load,test_spark_load,analyze_test"
 // this directories will not be executed
 excludeDirectories = ""
 
diff --git a/regression-test/pipeline/p1/conf/regression-conf.groovy 
b/regression-test/pipeline/p1/conf/regression-conf.groovy
index 253e5eee5e..e1452d81a4 100644
--- a/regression-test/pipeline/p1/conf/regression-conf.groovy
+++ b/regression-test/pipeline/p1/conf/regression-conf.groovy
@@ -40,6 +40,8 @@ dataPath = "${DORIS_HOME}/regression-test/data"
 testGroups = ""
 // empty suite will test all suite
 testSuites = ""
+// this suites will not be executed
+excludeSuites = "q67_ignore_temporarily"
 
 cacheDataPath="/data/regression/"
 
diff --git 
a/regression-test/suites/nereids_p0/system/test_query_sys_tables.groovy 
b/regression-test/suites/nereids_p0/system/test_query_sys_tables.groovy
index ea9c570134..7b96e1dc32 100644
--- a/regression-test/suites/nereids_p0/system/test_query_sys_tables.groovy
+++ b/regression-test/suites/nereids_p0/system/test_query_sys_tables.groovy
@@ -18,12 +18,12 @@
 suite("test_query_sys_tables", "query,p0") {
     sql "SET enable_nereids_planner=true"
     sql "SET enable_fallback_to_original_planner=false"
-    def dbName1 = "test_query_sys_db_1"
-    def dbName2 = "test_query_sys_db_2"
-    def dbName3 = "test_query_sys_db_3"
-    def tbName1 = "test_query_sys_tb_1"
-    def tbName2 = "test_query_sys_tb_2"
-    def tbName3 = "test_query_sys_tb_3"
+    def dbName1 = "test_query_sys_db_4"
+    def dbName2 = "test_query_sys_db_5"
+    def dbName3 = "test_query_sys_db_6"
+    def tbName1 = "test_query_sys_tb_4"
+    def tbName2 = "test_query_sys_tb_5"
+    def tbName3 = "test_query_sys_tb_6"
     sql("drop database IF EXISTS ${dbName1}")
     sql("drop database IF EXISTS ${dbName2}")
     sql("drop database IF EXISTS ${dbName3}")
@@ -103,7 +103,7 @@ suite("test_query_sys_tables", "query,p0") {
     sql("CREATE DATABASE IF NOT EXISTS ${dbName3}")
 
     sql("use information_schema")
-    qt_schemata("select CATALOG_NAME, SCHEMA_NAME, SQL_PATH from schemata 
where SCHEMA_NAME = '${dbName1}' or SCHEMA_NAME = '${dbName2}' or SCHEMA_NAME = 
'${dbName3}'");
+    qt_schemata("select CATALOG_NAME, SCHEMA_NAME, SQL_PATH from schemata 
where SCHEMA_NAME = '${dbName1}' or SCHEMA_NAME = '${dbName2}' or SCHEMA_NAME = 
'${dbName3}' order by SCHEMA_NAME");
 
     // test statistics
     // have no impl
@@ -162,7 +162,7 @@ suite("test_query_sys_tables", "query,p0") {
     """
 
     sql("use information_schema")
-    qt_tables("select TABLE_CATALOG, TABLE_NAME, TABLE_TYPE, AVG_ROW_LENGTH, 
MAX_DATA_LENGTH, INDEX_LENGTH from tables where TABLE_SCHEMA = '${dbName1}' or 
TABLE_SCHEMA = '${dbName2}' or TABLE_SCHEMA = '${dbName3}'");
+    qt_tables("select TABLE_CATALOG, TABLE_NAME, TABLE_TYPE, AVG_ROW_LENGTH, 
MAX_DATA_LENGTH, INDEX_LENGTH from tables where TABLE_SCHEMA = '${dbName1}' or 
TABLE_SCHEMA = '${dbName2}' or TABLE_SCHEMA = '${dbName3}' order by 
TABLE_NAME");
 
     // test variables
     // session_variables
diff --git a/regression-test/suites/query_p0/stats/query_stats_test.groovy 
b/regression-test/suites/query_p0/stats/query_stats_test.groovy
index a9faa0bee0..79656b13ac 100644
--- a/regression-test/suites/query_p0/stats/query_stats_test.groovy
+++ b/regression-test/suites/query_p0/stats/query_stats_test.groovy
@@ -16,25 +16,41 @@
 // under the License.
 
 suite("query_stats_test") {
-    sql "use test_query_db"
-    sql "create table baseall_stat like baseall"
-    sql "insert into baseall_stat select * from baseall"
-
+    def tbName = "stats_table"
+    sql """ DROP TABLE IF EXISTS ${tbName} """
+    sql """
+        CREATE TABLE IF NOT EXISTS ${tbName} (
+            `k0` boolean null comment "",
+            `k1` tinyint(4) null comment "",
+            `k2` smallint(6) null comment "",
+            `k3` int(11) null comment "",
+            `k4` bigint(20) null comment "",
+            `k5` decimal(9, 3) null comment "",
+            `k6` char(5) null comment "",
+            `k10` date null comment "",
+            `k11` datetime null comment "",
+            `k7` varchar(20) null comment "",
+            `k8` double max null comment "",
+            `k9` float sum null comment "",
+            `k12` string replace null comment "",
+            `k13` largeint(40) replace null comment ""
+        ) engine=olap
+        DISTRIBUTED BY HASH(`k1`) BUCKETS 1 properties("replication_num" = "1")
+        """
     sql "admin set frontend config (\"enable_query_hit_stats\"=\"true\");"
     sql "clean all query stats"
 
     explain {
-        sql("select k1 from baseall_stat where k1 = 1")
+        sql("select k1 from ${tbName} where k1 = 1")
     }
 
-    qt_sql "show query stats from baseall_stat"
+    qt_sql "show query stats from ${tbName}"
 
-    sql "select k1 from baseall_stat where k0 = 1"
-    sql "select k4 from baseall_stat where k2 = 1991"
+    sql "select k1 from ${tbName} where k0 = 1"
+    sql "select k4 from ${tbName} where k2 = 1991"
 
-    qt_sql "show query stats from baseall_stat"
-    qt_sql "show query stats from baseall_stat all"
-    qt_sql "show query stats from baseall_stat all verbose"
+    qt_sql "show query stats from ${tbName}"
+    qt_sql "show query stats from ${tbName} all"
+    qt_sql "show query stats from ${tbName} all verbose"
     sql "admin set frontend config (\"enable_query_hit_stats\"=\"false\");"
-    sql "drop table baseall_stat"
 }
diff --git 
a/regression-test/suites/query_p0/system/test_query_sys_tables.groovy 
b/regression-test/suites/query_p0/system/test_query_sys_tables.groovy
index 190c2c7475..7c35837398 100644
--- a/regression-test/suites/query_p0/system/test_query_sys_tables.groovy
+++ b/regression-test/suites/query_p0/system/test_query_sys_tables.groovy
@@ -101,7 +101,7 @@ suite("test_query_sys_tables", "query,p0") {
     sql("CREATE DATABASE IF NOT EXISTS ${dbName3}")
 
     sql("use information_schema")
-    qt_schemata("select CATALOG_NAME, SCHEMA_NAME, SQL_PATH from schemata 
where SCHEMA_NAME = '${dbName1}' or SCHEMA_NAME = '${dbName2}' or SCHEMA_NAME = 
'${dbName3}'");
+    qt_schemata("select CATALOG_NAME, SCHEMA_NAME, SQL_PATH from schemata 
where SCHEMA_NAME = '${dbName1}' or SCHEMA_NAME = '${dbName2}' or SCHEMA_NAME = 
'${dbName3}' order by SCHEMA_NAME");
 
     // test statistics
     // have no impl
@@ -160,7 +160,7 @@ suite("test_query_sys_tables", "query,p0") {
     """
 
     sql("use information_schema")
-    qt_tables("select TABLE_CATALOG, TABLE_NAME, TABLE_TYPE, AVG_ROW_LENGTH, 
MAX_DATA_LENGTH, INDEX_LENGTH from tables where TABLE_SCHEMA = '${dbName1}' or 
TABLE_SCHEMA = '${dbName2}' or TABLE_SCHEMA = '${dbName3}'");
+    qt_tables("select TABLE_CATALOG, TABLE_NAME, TABLE_TYPE, AVG_ROW_LENGTH, 
MAX_DATA_LENGTH, INDEX_LENGTH from tables where TABLE_SCHEMA = '${dbName1}' or 
TABLE_SCHEMA = '${dbName2}' or TABLE_SCHEMA = '${dbName3}' order by 
TABLE_NAME");
 
     // test variables
     // session_variables
@@ -174,7 +174,11 @@ suite("test_query_sys_tables", "query,p0") {
     qt_global_variables("select VARIABLE_NAME, VARIABLE_VALUE from 
global_variables where VARIABLE_NAME = 'wait_timeout'")
 
     // test user_privileges
-    sql("CREATE USER 'test_sys_tables'")
+    try {
+        sql("CREATE USER 'test_sys_tables'")
+    } catch (Exception e) {
+        assertTrue(e.getMessage().contains("already exist"), e.getMessage())
+    }
     sql("GRANT SELECT_PRIV ON *.*.* TO 'test_sys_tables'")
     sql("use information_schema")
     qt_user_privileges """
diff --git 
a/regression-test/suites/tpcds_sf1_p1/functions_test/test_string_concat_extremely_long_string.groovy
 
b/regression-test/suites/tpcds_sf1_p2/functions_test/test_string_concat_extremely_long_string.groovy
similarity index 100%
rename from 
regression-test/suites/tpcds_sf1_p1/functions_test/test_string_concat_extremely_long_string.groovy
rename to 
regression-test/suites/tpcds_sf1_p2/functions_test/test_string_concat_extremely_long_string.groovy
diff --git a/regression-test/suites/tpcds_sf1_p2/load.groovy 
b/regression-test/suites/tpcds_sf1_p2/load.groovy
new file mode 100644
index 0000000000..1b527dcc81
--- /dev/null
+++ b/regression-test/suites/tpcds_sf1_p2/load.groovy
@@ -0,0 +1,102 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+// The cases is copied from https://github.com/trinodb/trino/tree/master
+// /testing/trino-product-tests/src/main/resources/sql-tests/testcases/tpcds
+// and modified by Doris.
+suite("load") {
+    def tables=["store", "store_returns", "customer", "date_dim", "web_sales",
+                "catalog_sales", "store_sales", "item", "web_returns", 
"catalog_returns",
+                "catalog_page", "web_site", "customer_address", 
"customer_demographics",
+                "ship_mode", "promotion", "inventory", "time_dim", 
"income_band",
+                "call_center", "reason", "household_demographics", 
"warehouse", "web_page"]
+    def columnsMap = [
+        "item": """tmp_item_sk, tmp_item_id, tmp_rec_start_date, 
tmp_rec_end_date, tmp_item_desc,
+                tmp_current_price, tmp_wholesale_cost, tmp_brand_id, 
tmp_brand, tmp_class_id, tmp_class,
+                tmp_category_id, tmp_category, tmp_manufact_id, tmp_manufact, 
tmp_size, tmp_formulation,
+                tmp_color, tmp_units, tmp_container, tmp_manager_id, 
tmp_product_name,
+                i_item_sk=tmp_item_sk, i_item_id=tmp_item_id, 
i_rec_start_date=tmp_rec_start_date,
+                i_rec_end_date=tmp_rec_end_date, i_item_desc=tmp_item_desc, 
i_current_price=tmp_current_price,
+                i_wholesale_cost=tmp_wholesale_cost, i_brand_id=tmp_brand_id, 
i_brand=tmp_brand,
+                i_class_id=tmp_class_id, i_class=tmp_class, 
i_category_id=tmp_category_id,
+                i_category=nullif(tmp_category, ''), 
i_manufact_id=tmp_manufact_id, i_manufact=tmp_manufact,
+                i_size=tmp_size, i_formulation=tmp_formulation, 
i_color=tmp_color, i_units=tmp_units,
+                i_container=tmp_container, i_manager_id=tmp_manager_id, 
i_product_name=tmp_product_name""",
+
+        "customer_address": """tmp_address_sk, tmp_address_id, 
tmp_street_number, tmp_street_name, tmp_street_type, tmp_suite_number,
+                            tmp_city, tmp_county, tmp_state, tmp_zip, 
tmp_country, tmp_gmt_offset, tmp_location_type,
+                            ca_address_sk=tmp_address_sk, 
ca_address_id=tmp_address_id, ca_street_number=tmp_street_number,
+                            ca_street_name=tmp_street_name, 
ca_street_type=tmp_street_type, ca_suite_number=tmp_suite_number, 
ca_city=tmp_city,
+                            ca_county=nullif(tmp_county, ''), 
ca_state=tmp_state, ca_zip=tmp_zip, ca_country=tmp_country,
+                            ca_gmt_offset=tmp_gmt_offset, 
ca_location_type=tmp_location_type""",
+    ]
+
+    def specialTables = ["item", "customer_address"]
+
+    for (String table in tables) {
+        sql """ DROP TABLE IF EXISTS $table """
+    }
+
+    for (String table in tables) {
+        sql new File("""${context.file.parent}/ddl/${table}.sql""").text
+    }
+
+    sql "set exec_mem_limit=8G;"
+
+    for (String tableName in tables) {
+        streamLoad {
+            // you can skip db declaration, because a default db has already 
been
+            // specified in ${DORIS_HOME}/conf/regression-conf.groovy
+            // db 'regression_test'
+            table tableName
+
+            // default label is UUID:
+            // set 'label' UUID.randomUUID().toString()
+
+            // default column_separator is specify in doris fe config, usually 
is '\t'.
+            // this line change to ','
+            set 'column_separator', '|'
+            set 'compress_type', 'GZ'
+
+            if (specialTables.contains(tableName)) {
+            set "columns", columnsMap[tableName]
+            }
+
+
+            // relate to 
${DORIS_HOME}/regression-test/data/demo/streamload_input.csv.
+            // also, you can stream load a http stream, e.g. 
http://xxx/some.csv
+            file """${getS3Url()}/regression/tpcds/sf1/${tableName}.dat.gz"""
+
+            time 10000 // limit inflight 10s
+
+            // stream load action will check result, include Success status, 
and NumberTotalRows == NumberLoadedRows
+
+            // if declared a check callback, the default check condition will 
ignore.
+            // So you must check all condition
+            check { result, exception, startTime, endTime ->
+                if (exception != null) {
+                    throw exception
+                }
+                log.info("Stream load result: ${result}".toString())
+                def json = parseJson(result)
+                assertEquals("success", json.Status.toLowerCase())
+                assertEquals(json.NumberTotalRows, json.NumberLoadedRows)
+                assertTrue(json.NumberLoadedRows > 0 && json.LoadBytes > 0)
+            }
+        }
+    }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to