This is an automated email from the ASF dual-hosted git repository.

liaoxin pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-2.1 by this push:
     new 682cec9b9a5 [test](stream load) add cases to ensure the correctness of 
the document (#43016) (#44385)
682cec9b9a5 is described below

commit 682cec9b9a50a93f9b7d912a1863c05a0e9e4a97
Author: hui lai <lai...@selectdb.com>
AuthorDate: Fri Dec 6 11:04:51 2024 +0800

    [test](stream load) add cases to ensure the correctness of the document 
(#43016) (#44385)
    
    pick #43016
---
 .../import/import-way/stream-load-manual.md.out    | 154 +++++++
 .../import/import-way/streamload_example.csv       |  10 +
 .../import/import-way/streamload_example.json      |  12 +
 .../data-operate/import/import-way/test_array.csv  |  10 +
 .../data-operate/import/import-way/test_bitmap.csv |  10 +
 .../import/import-way/test_default.csv             |   1 +
 .../import/import-way/test_enclose_and_escape.csv  |   1 +
 .../data-operate/import/import-way/test_hll.csv    |  10 +
 .../data-operate/import/import-way/test_map.json   |  12 +
 .../import/import-way/test_merge_type.csv          |   2 +
 .../data-operate/import/import-way/test_seq.csv    |   1 +
 .../import/import-way/stream-load-manual.md.groovy | 492 +++++++++++++++++++++
 12 files changed, 715 insertions(+)

diff --git 
a/regression-test/data/doc/data-operate/import/import-way/stream-load-manual.md.out
 
b/regression-test/data/doc/data-operate/import/import-way/stream-load-manual.md.out
new file mode 100644
index 00000000000..c8793ff69a3
--- /dev/null
+++ 
b/regression-test/data/doc/data-operate/import/import-way/stream-load-manual.md.out
@@ -0,0 +1,154 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !sql1 --
+1      Emily   25
+2      Benjamin        35
+3      Olivia  28
+4      Alexander       60
+5      Ava     17
+6      William 69
+7      Sophia  32
+8      James   64
+9      Emma    37
+10     Liam    64
+
+-- !sql2 --
+1      Emily   25
+2      Benjamin        35
+3      Olivia  28
+4      Alexander       60
+5      Ava     17
+6      William 69
+7      Sophia  32
+8      James   64
+9      Emma    37
+10     Liam    64
+
+-- !sql3 --
+1      Emily   25
+2      Benjamin        35
+3      Olivia  28
+4      Alexander       60
+5      Ava     17
+6      William 69
+7      Sophia  32
+8      James   64
+9      Emma    37
+10     Liam    64
+
+-- !sql4 --
+1      Emily   25
+2      Benjamin        35
+3      Olivia  28
+4      Alexander       60
+5      Ava     17
+6      William 69
+7      Sophia  32
+8      James   64
+9      Emma    37
+10     Liam    64
+
+-- !sql5 --
+2      Benjamin        35
+4      Alexander       60
+6      William 69
+8      James   64
+9      Emma    37
+10     Liam    64
+
+-- !sql6 --
+2      Benjamin        35
+3      Olivia  28
+4      Alexander       60
+5      Ava     17
+6      William 69
+7      Sophia  32
+8      James   64
+9      Emma    37
+10     Liam    64
+
+-- !sql7 --
+3      Olivia  28
+4      Alexander       60
+5      Ava     17
+6      William 69
+7      Sophia  32
+8      James   64
+9      Emma    37
+10     Liam    64
+
+-- !sql7 --
+2      Benjamin        35
+3      Olivia  28
+4      Alexander       60
+5      Ava     17
+6      William 69
+7      Sophia  32
+8      James   64
+9      Emma    37
+10     Liam    64
+
+-- !sql8 --
+2      Benjamin        35
+3      Olivia  28
+4      Alexander       60
+5      Ava     17
+6      William 69
+7      Sophia  32
+8      James   64
+9      Emma    37
+10     Liam    64
+
+-- !sql9 --
+张三     30      上海市,黄浦区,'大沽路
+
+-- !sql10 --
+1      avc     \N
+
+-- !sql11 --
+1      Emily   [1, 2, 3, 4]
+2      Benjamin        [22, 45, 90, 12]
+3      Olivia  [23, 16, 19, 16]
+4      Alexander       [123, 234, 456]
+5      Ava     [12, 15, 789]
+6      William [57, 68, 97]
+7      Sophia  [46, 47, 49]
+8      James   [110, 127, 128]
+9      Emma    [19, 18, 123, 446]
+10     Liam    [89, 87, 96, 12]
+
+-- !sql12 --
+1      {"Emily":101, "age":25}
+2      {"Benjamin":102, "age":35}
+3      {"Olivia":103, "age":28}
+4      {"Alexander":104, "age":60}
+5      {"Ava":105, "age":17}
+6      {"William":106, "age":69}
+7      {"Sophia":107, "age":32}
+8      {"James":108, "age":64}
+9      {"Emma":109, "age":37}
+10     {"Liam":110, "age":64}
+
+-- !sql13 --
+1      koga    \N
+2      nijg    \N
+3      lojn    \N
+4      lofn    \N
+5      jfin    \N
+6      kon     \N
+7      nhga    \N
+8      nfubg   \N
+9      huang   \N
+10     buag    \N
+
+-- !sql14 --
+1001   koga    \N
+1002   nijg    \N
+1003   lojn    \N
+1004   lofn    \N
+1005   jfin    \N
+1006   kon     \N
+1007   nhga    \N
+1008   nfubg   \N
+1009   huang   \N
+1010   buag    \N
+
diff --git 
a/regression-test/data/doc/data-operate/import/import-way/streamload_example.csv
 
b/regression-test/data/doc/data-operate/import/import-way/streamload_example.csv
new file mode 100644
index 00000000000..9e401297ab2
--- /dev/null
+++ 
b/regression-test/data/doc/data-operate/import/import-way/streamload_example.csv
@@ -0,0 +1,10 @@
+1,Emily,25
+2,Benjamin,35
+3,Olivia,28
+4,Alexander,60
+5,Ava,17
+6,William,69
+7,Sophia,32
+8,James,64
+9,Emma,37
+10,Liam,64
\ No newline at end of file
diff --git 
a/regression-test/data/doc/data-operate/import/import-way/streamload_example.json
 
b/regression-test/data/doc/data-operate/import/import-way/streamload_example.json
new file mode 100644
index 00000000000..58d70e8880a
--- /dev/null
+++ 
b/regression-test/data/doc/data-operate/import/import-way/streamload_example.json
@@ -0,0 +1,12 @@
+[
+    {"userid":1,"username":"Emily","userage":25},
+    {"userid":2,"username":"Benjamin","userage":35},
+    {"userid":3,"username":"Olivia","userage":28},
+    {"userid":4,"username":"Alexander","userage":60},
+    {"userid":5,"username":"Ava","userage":17},
+    {"userid":6,"username":"William","userage":69},
+    {"userid":7,"username":"Sophia","userage":32},
+    {"userid":8,"username":"James","userage":64},
+    {"userid":9,"username":"Emma","userage":37},
+    {"userid":10,"username":"Liam","userage":64}
+    ]
\ No newline at end of file
diff --git 
a/regression-test/data/doc/data-operate/import/import-way/test_array.csv 
b/regression-test/data/doc/data-operate/import/import-way/test_array.csv
new file mode 100644
index 00000000000..21b07f8382c
--- /dev/null
+++ b/regression-test/data/doc/data-operate/import/import-way/test_array.csv
@@ -0,0 +1,10 @@
+1|Emily|[1,2,3,4]
+2|Benjamin|[22,45,90,12]
+3|Olivia|[23,16,19,16]
+4|Alexander|[123,234,456]
+5|Ava|[12,15,789]
+6|William|[57,68,97]
+7|Sophia|[46,47,49]
+8|James|[110,127,128]
+9|Emma|[19,18,123,446]
+10|Liam|[89,87,96,12]
\ No newline at end of file
diff --git 
a/regression-test/data/doc/data-operate/import/import-way/test_bitmap.csv 
b/regression-test/data/doc/data-operate/import/import-way/test_bitmap.csv
new file mode 100644
index 00000000000..2ce391c91cd
--- /dev/null
+++ b/regression-test/data/doc/data-operate/import/import-way/test_bitmap.csv
@@ -0,0 +1,10 @@
+1|koga|17723
+2|nijg|146285
+3|lojn|347890
+4|lofn|489871
+5|jfin|545679
+6|kon|676724
+7|nhga|767689
+8|nfubg|879878
+9|huang|969798
+10|buag|97997
\ No newline at end of file
diff --git 
a/regression-test/data/doc/data-operate/import/import-way/test_default.csv 
b/regression-test/data/doc/data-operate/import/import-way/test_default.csv
new file mode 100644
index 00000000000..2e617350a8d
--- /dev/null
+++ b/regression-test/data/doc/data-operate/import/import-way/test_default.csv
@@ -0,0 +1 @@
+1,avc,2024
\ No newline at end of file
diff --git 
a/regression-test/data/doc/data-operate/import/import-way/test_enclose_and_escape.csv
 
b/regression-test/data/doc/data-operate/import/import-way/test_enclose_and_escape.csv
new file mode 100644
index 00000000000..55d3a5f52e6
--- /dev/null
+++ 
b/regression-test/data/doc/data-operate/import/import-way/test_enclose_and_escape.csv
@@ -0,0 +1 @@
+张三,30,'上海市,黄浦区,\'大沽路'
\ No newline at end of file
diff --git 
a/regression-test/data/doc/data-operate/import/import-way/test_hll.csv 
b/regression-test/data/doc/data-operate/import/import-way/test_hll.csv
new file mode 100644
index 00000000000..d2cb9122771
--- /dev/null
+++ b/regression-test/data/doc/data-operate/import/import-way/test_hll.csv
@@ -0,0 +1,10 @@
+1001|koga
+1002|nijg
+1003|lojn
+1004|lofn
+1005|jfin
+1006|kon
+1007|nhga
+1008|nfubg
+1009|huang
+1010|buag
\ No newline at end of file
diff --git 
a/regression-test/data/doc/data-operate/import/import-way/test_map.json 
b/regression-test/data/doc/data-operate/import/import-way/test_map.json
new file mode 100644
index 00000000000..ca3f9b89bbb
--- /dev/null
+++ b/regression-test/data/doc/data-operate/import/import-way/test_map.json
@@ -0,0 +1,12 @@
+[
+    {"user_id":1,"namemap":{"Emily":101,"age":25}},
+    {"user_id":2,"namemap":{"Benjamin":102,"age":35}},
+    {"user_id":3,"namemap":{"Olivia":103,"age":28}},
+    {"user_id":4,"namemap":{"Alexander":104,"age":60}},
+    {"user_id":5,"namemap":{"Ava":105,"age":17}},
+    {"user_id":6,"namemap":{"William":106,"age":69}},
+    {"user_id":7,"namemap":{"Sophia":107,"age":32}},
+    {"user_id":8,"namemap":{"James":108,"age":64}},
+    {"user_id":9,"namemap":{"Emma":109,"age":37}},
+    {"user_id":10,"namemap":{"Liam":110,"age":64}}
+    ]
\ No newline at end of file
diff --git 
a/regression-test/data/doc/data-operate/import/import-way/test_merge_type.csv 
b/regression-test/data/doc/data-operate/import/import-way/test_merge_type.csv
new file mode 100644
index 00000000000..3e46af3bcff
--- /dev/null
+++ 
b/regression-test/data/doc/data-operate/import/import-way/test_merge_type.csv
@@ -0,0 +1,2 @@
+1,Emily,25
+2,Benjamin,35
\ No newline at end of file
diff --git 
a/regression-test/data/doc/data-operate/import/import-way/test_seq.csv 
b/regression-test/data/doc/data-operate/import/import-way/test_seq.csv
new file mode 100644
index 00000000000..a037137aaf3
--- /dev/null
+++ b/regression-test/data/doc/data-operate/import/import-way/test_seq.csv
@@ -0,0 +1 @@
+1,Emily,26
\ No newline at end of file
diff --git 
a/regression-test/suites/doc/data-operate/import/import-way/stream-load-manual.md.groovy
 
b/regression-test/suites/doc/data-operate/import/import-way/stream-load-manual.md.groovy
new file mode 100644
index 00000000000..f7967045250
--- /dev/null
+++ 
b/regression-test/suites/doc/data-operate/import/import-way/stream-load-manual.md.groovy
@@ -0,0 +1,492 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_stream_load_doc_case", "p0") {
+    def tableName = "test_stream_load_doc_case"
+
+    // case 1: csv format
+    sql """ DROP TABLE IF EXISTS ${tableName} """
+    sql """
+        CREATE TABLE ${tableName}(
+            user_id            BIGINT       NOT NULL COMMENT "用户 ID",
+            name               VARCHAR(20)           COMMENT "用户姓名",
+            age                INT                   COMMENT "用户年龄"
+        )
+        UNIQUE KEY(user_id)
+        DISTRIBUTED BY HASH(user_id) BUCKETS 10
+        PROPERTIES (
+        "replication_allocation" = "tag.location.default: 1",
+        "function_column.sequence_col" = 'age'
+        );
+    """
+    streamLoad {
+        table "${tableName}"
+
+        set 'column_separator', ','
+        set 'columns', 'user_id,name,age'
+
+        file 'streamload_example.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql1 "SELECT * FROM ${tableName} order by user_id"
+
+    // case 2: json format
+    sql """ truncate table ${tableName} """
+    streamLoad {
+        table "${tableName}"
+
+        set 'column_separator', ','
+        set 'columns', 'user_id,name,age'
+        set 'format', 'json'
+        set 'jsonpaths', '[\"$.userid\", \"$.username\", \"$.userage\"]'
+        set 'strip_outer_array', 'true'
+
+        file 'streamload_example.json'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql2 "SELECT * FROM ${tableName} order by user_id"
+
+    //case 3: timout
+    sql """ truncate table ${tableName} """
+    streamLoad {
+        table "${tableName}"
+
+        set 'column_separator', ','
+        set 'columns', 'user_id,name,age'
+
+        file 'streamload_example.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql3 "SELECT * FROM ${tableName} order by user_id"
+
+    //case4: max_filter_ratio
+    sql """ truncate table ${tableName} """
+    streamLoad {
+        table "${tableName}"
+
+        set 'column_separator', ','
+        set 'columns', 'user_id,name,age'
+        set 'max_filter_ratio', '0.4'
+
+        file 'streamload_example.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql4 "SELECT * FROM ${tableName} order by user_id"
+
+    // case5: where
+    sql """ truncate table ${tableName} """
+    streamLoad {
+        table "${tableName}"
+
+        set 'column_separator', ','
+        set 'columns', 'user_id,name,age'
+        set 'where', 'age>=35'
+
+        file 'streamload_example.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql5 "SELECT * FROM ${tableName} order by user_id"
+
+    //case6: csv_with_names
+    sql """ truncate table ${tableName} """
+    streamLoad {
+        table "${tableName}"
+
+        set 'column_separator', ','
+        set 'columns', 'user_id,name,age'
+        set 'format', 'csv_with_names'
+
+        file 'streamload_example.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql6 "SELECT * FROM ${tableName} order by user_id"
+
+    //case7: merge type
+    sql """ truncate table ${tableName} """
+    streamLoad {
+        table "${tableName}"
+
+        set 'column_separator', ','
+        set 'columns', 'user_id,name,age'
+
+        file 'streamload_example.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    streamLoad {
+        table "${tableName}"
+
+        set 'column_separator', ','
+        set 'columns', 'user_id,name,age'
+        set 'merge_type', 'delete'
+
+        file 'test_merge_type.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql7 "SELECT * FROM ${tableName} order by user_id"
+
+    sql """ truncate table ${tableName} """
+    streamLoad {
+        table "${tableName}"
+
+        set 'column_separator', ','
+        set 'columns', 'user_id,name,age'
+
+        file 'streamload_example.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    streamLoad {
+        table "${tableName}"
+
+        set 'column_separator', ','
+        set 'columns', 'user_id,name,age'
+        set 'merge_type', 'merge'
+        set 'delete', 'user_id=1'
+
+        file 'test_merge_type.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql7 "SELECT * FROM ${tableName} order by user_id"
+
+    //case 8: function_column.sequence_col
+    sql """ truncate table ${tableName} """
+    streamLoad {
+        table "${tableName}"
+
+        set 'column_separator', ','
+        set 'columns', 'user_id,name,age'
+
+        file 'streamload_example.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    streamLoad {
+        table "${tableName}"
+
+        set 'column_separator', ','
+        set 'columns', 'user_id,name,age'
+        set 'merge_type', 'delete'
+        set 'function_column.sequence_col', 'age'
+
+        file 'test_seq.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql8 "SELECT * FROM ${tableName} order by user_id"
+
+    // case 9: enclose escape
+    def tableName1 = "test_stream_load_doc_case1"
+    sql """ DROP TABLE IF EXISTS ${tableName1} """
+    sql """
+        CREATE TABLE ${tableName1}(
+            username           VARCHAR(20)      NOT NULL,
+            age                INT,
+            address            VARCHAR(50)
+        )
+        DUPLICATE KEY(username)
+        DISTRIBUTED BY HASH(username) BUCKETS 10
+        PROPERTIES ("replication_allocation" = "tag.location.default: 1");
+    """
+    streamLoad {
+        table "${tableName1}"
+
+        set 'column_separator', ','
+        set 'enclose', '\''
+        set 'escape', '\\'
+
+        file 'test_enclose_and_escape.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql9 "SELECT * FROM ${tableName1} order by username"
+
+    // case 10: DEFAULT CURRENT_TIMESTAMP
+    def tableName2 = "test_stream_load_doc_case2"
+    sql """ DROP TABLE IF EXISTS ${tableName2} """
+    sql """
+        CREATE TABLE ${tableName2}(
+            `id` bigint(30) NOT NULL,
+            `order_code` varchar(30) DEFAULT NULL COMMENT '',
+            `create_time` datetimev2(3) DEFAULT CURRENT_TIMESTAMP
+        )
+        DUPLICATE KEY(id)
+        DISTRIBUTED BY HASH(id) BUCKETS 10
+        PROPERTIES ("replication_allocation" = "tag.location.default: 1");
+    """
+    streamLoad {
+        table "${tableName2}"
+
+        set 'column_separator', ','
+        set 'colunms', 'id, order_code, create_time=CURRENT_TIMESTAMP()'
+
+        file 'test_default.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql10 "SELECT * FROM ${tableName2} order by id"
+
+    //case 11: array
+    def tableName3 = "test_stream_load_doc_case3"
+    sql """ DROP TABLE IF EXISTS ${tableName3} """
+    sql """
+        CREATE TABLE ${tableName3}(
+            typ_id     BIGINT          NOT NULL COMMENT "ID",
+            name       VARCHAR(20)     NULL     COMMENT "名称",
+            arr        ARRAY<int(10)>  NULL     COMMENT "数组"
+        )
+        DUPLICATE KEY(typ_id)
+        DISTRIBUTED BY HASH(typ_id) BUCKETS 10
+        PROPERTIES ("replication_allocation" = "tag.location.default: 1");
+    """
+    streamLoad {
+        table "${tableName3}"
+
+        set 'column_separator', '|'
+
+        file 'test_array.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql11 "SELECT * FROM ${tableName3} order by typ_id"
+
+    //case 12: map
+    def tableName4 = "test_stream_load_doc_case4"
+    sql """ DROP TABLE IF EXISTS ${tableName4} """
+    sql """
+        CREATE TABLE ${tableName4}(
+            user_id            BIGINT       NOT NULL COMMENT "ID",
+            namemap            Map<STRING, INT>  NULL     COMMENT "名称"
+        )
+        DUPLICATE KEY(user_id)
+        DISTRIBUTED BY HASH(user_id) BUCKETS 10
+        PROPERTIES ("replication_allocation" = "tag.location.default: 1");
+    """
+    streamLoad {
+        table "${tableName4}"
+
+        set 'format', 'json'
+        set 'strip_outer_array', 'true'
+
+        file 'test_map.json'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql12 "SELECT * FROM ${tableName4} order by user_id"
+
+    //case 13:bitmap
+    def tableName5 = "test_stream_load_doc_case5"
+    sql """ DROP TABLE IF EXISTS ${tableName5} """
+    sql """
+        CREATE TABLE ${tableName5}(
+            typ_id     BIGINT                NULL   COMMENT "ID",
+            hou        VARCHAR(10)           NULL   COMMENT "one",
+            arr        BITMAP  BITMAP_UNION  NOT NULL   COMMENT "two"
+        )
+        AGGREGATE KEY(typ_id,hou)
+        DISTRIBUTED BY HASH(typ_id,hou) BUCKETS 10
+        PROPERTIES ("replication_allocation" = "tag.location.default: 1");
+    """
+    streamLoad {
+        table "${tableName5}"
+
+        set 'columns', 'typ_id,hou,arr,arr=to_bitmap(arr)'
+        set 'column_separator', '|'
+
+        file 'test_bitmap.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql13 "SELECT * FROM ${tableName5} order by typ_id"
+
+    //case14: hll
+    def tableName6 = "test_stream_load_doc_case6"
+    sql """ DROP TABLE IF EXISTS ${tableName6} """
+    sql """
+        CREATE TABLE ${tableName6}(
+            typ_id           BIGINT          NULL   COMMENT "ID",
+            typ_name         VARCHAR(10)     NULL   COMMENT "NAME",
+            pv               hll hll_union   NOT NULL   COMMENT "hll"
+        )
+        AGGREGATE KEY(typ_id,typ_name)
+        DISTRIBUTED BY HASH(typ_id) BUCKETS 10
+        PROPERTIES ("replication_allocation" = "tag.location.default: 1");
+    """
+    streamLoad {
+        table "${tableName6}"
+
+        set 'columns', 'typ_id,typ_name,pv=hll_hash(typ_id)'
+        set 'column_separator', '|'
+
+        file 'test_hll.csv'
+        time 10000 // limit inflight 10s
+
+        check { result, exception, startTime, endTime ->
+            if (exception != null) {
+                throw exception
+            }
+            log.info("Stream load result: ${result}".toString())
+            def json = parseJson(result)
+            assertEquals("success", json.Status.toLowerCase())
+        }
+    }
+    qt_sql14 "SELECT * FROM ${tableName6} order by typ_id"
+}
\ No newline at end of file


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to