This is an automated email from the ASF dual-hosted git repository.
yiguolei pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-2.1 by this push:
new 903f1d5ca2c [fix](partial update) Support partial update when the date
default value is 'current_date'. This PR is a extension of PR #32926. (#33394)
(#33625)
903f1d5ca2c is described below
commit 903f1d5ca2cbe95a5c0e180dd164af2af86f0ca9
Author: abmdocrt <[email protected]>
AuthorDate: Sun Apr 14 09:56:42 2024 +0800
[fix](partial update) Support partial update when the date default value is
'current_date'. This PR is a extension of PR #32926. (#33394) (#33625)
---
be/src/olap/rowset/segment_v2/segment_writer.cpp | 24 ++++++++-
.../rowset/segment_v2/vertical_segment_writer.cpp | 24 ++++++++-
.../partial_update/test_partial_update.out | 24 +++++++++
.../partial_update/test_partial_update.groovy | 57 +++++++++++++++++++++-
4 files changed, 126 insertions(+), 3 deletions(-)
diff --git a/be/src/olap/rowset/segment_v2/segment_writer.cpp
b/be/src/olap/rowset/segment_v2/segment_writer.cpp
index 6228e781f79..725fa18b4a0 100644
--- a/be/src/olap/rowset/segment_v2/segment_writer.cpp
+++ b/be/src/olap/rowset/segment_v2/segment_writer.cpp
@@ -695,7 +695,29 @@ Status
SegmentWriter::fill_missing_columns(vectorized::MutableColumns& mutable_f
for (auto i = 0; i < cids_missing.size(); ++i) {
const auto& column = _tablet_schema->column(cids_missing[i]);
if (column.has_default_value()) {
- auto default_value =
_tablet_schema->column(cids_missing[i]).default_value();
+ std::string default_value;
+ if (UNLIKELY(_tablet_schema->column(cids_missing[i]).type() ==
+ FieldType::OLAP_FIELD_TYPE_DATETIMEV2 &&
+
to_lower(_tablet_schema->column(cids_missing[i]).default_value())
+
.find(to_lower("CURRENT_TIMESTAMP")) !=
+ std::string::npos)) {
+ DateV2Value<DateTimeV2ValueType> dtv;
+
dtv.from_unixtime(_opts.rowset_ctx->partial_update_info->timestamp_ms / 1000,
+
_opts.rowset_ctx->partial_update_info->timezone);
+ default_value = dtv.debug_string();
+ } else if (UNLIKELY(
+
_tablet_schema->column(cids_missing[i]).type() ==
+ FieldType::OLAP_FIELD_TYPE_DATEV2 &&
+
to_lower(_tablet_schema->column(cids_missing[i]).default_value())
+
.find(to_lower("CURRENT_DATE")) !=
+ std::string::npos)) {
+ DateV2Value<DateV2ValueType> dv;
+
dv.from_unixtime(_opts.rowset_ctx->partial_update_info->timestamp_ms / 1000,
+
_opts.rowset_ctx->partial_update_info->timezone);
+ default_value = dv.debug_string();
+ } else {
+ default_value =
_tablet_schema->column(cids_missing[i]).default_value();
+ }
vectorized::ReadBuffer
rb(const_cast<char*>(default_value.c_str()),
default_value.size());
RETURN_IF_ERROR(old_value_block.get_by_position(i).type->from_string(
diff --git a/be/src/olap/rowset/segment_v2/vertical_segment_writer.cpp
b/be/src/olap/rowset/segment_v2/vertical_segment_writer.cpp
index 4b48dd959a6..1e348821475 100644
--- a/be/src/olap/rowset/segment_v2/vertical_segment_writer.cpp
+++ b/be/src/olap/rowset/segment_v2/vertical_segment_writer.cpp
@@ -625,7 +625,29 @@ Status VerticalSegmentWriter::_fill_missing_columns(
for (auto i = 0; i < missing_cids.size(); ++i) {
const auto& column = _tablet_schema->column(missing_cids[i]);
if (column.has_default_value()) {
- auto default_value =
_tablet_schema->column(missing_cids[i]).default_value();
+ std::string default_value;
+ if (UNLIKELY(_tablet_schema->column(missing_cids[i]).type() ==
+ FieldType::OLAP_FIELD_TYPE_DATETIMEV2 &&
+
to_lower(_tablet_schema->column(missing_cids[i]).default_value())
+
.find(to_lower("CURRENT_TIMESTAMP")) !=
+ std::string::npos)) {
+ DateV2Value<DateTimeV2ValueType> dtv;
+
dtv.from_unixtime(_opts.rowset_ctx->partial_update_info->timestamp_ms / 1000,
+
_opts.rowset_ctx->partial_update_info->timezone);
+ default_value = dtv.debug_string();
+ } else if (UNLIKELY(
+
_tablet_schema->column(missing_cids[i]).type() ==
+ FieldType::OLAP_FIELD_TYPE_DATEV2 &&
+
to_lower(_tablet_schema->column(missing_cids[i]).default_value())
+
.find(to_lower("CURRENT_DATE")) !=
+ std::string::npos)) {
+ DateV2Value<DateV2ValueType> dv;
+
dv.from_unixtime(_opts.rowset_ctx->partial_update_info->timestamp_ms / 1000,
+
_opts.rowset_ctx->partial_update_info->timezone);
+ default_value = dv.debug_string();
+ } else {
+ default_value =
_tablet_schema->column(missing_cids[i]).default_value();
+ }
vectorized::ReadBuffer
rb(const_cast<char*>(default_value.c_str()),
default_value.size());
RETURN_IF_ERROR(old_value_block.get_by_position(i).type->from_string(
diff --git
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update.out
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update.out
index a2411896063..f827086539d 100644
---
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update.out
+++
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update.out
@@ -35,6 +35,18 @@
3 "stranger" 500 \N 4321
4 "foreigner" 600 \N 4321
+-- !select_timestamp --
+1
+
+-- !select_timestamp2 --
+11
+
+-- !select_date --
+1
+
+-- !select_date2 --
+2
+
-- !select_default --
1 doris 200 123 1
2 doris2 400 223 1
@@ -71,3 +83,15 @@
3 "stranger" 500 \N 4321
4 "foreigner" 600 \N 4321
+-- !select_timestamp --
+1
+
+-- !select_timestamp2 --
+11
+
+-- !select_date --
+1
+
+-- !select_date2 --
+2
+
diff --git
a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update.groovy
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update.groovy
index 4470ec06d16..a21b3dda741 100644
---
a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update.groovy
+++
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update.groovy
@@ -185,8 +185,63 @@ suite("test_primary_key_partial_update", "p0") {
select * from ${tableName} order by id;
"""
- // drop drop
+ // drop table
sql """ DROP TABLE IF EXISTS ${tableName} """
+
+ sql """ CREATE TABLE ${tableName} (
+ `name` VARCHAR(600) NULL,
+ `userid` INT NOT NULL,
+ `seq` BIGINT NOT NULL AUTO_INCREMENT(1),
+ `ctime` DATETIME(3) DEFAULT CURRENT_TIMESTAMP(3),
+ `rtime` DATETIME(3) NOT NULL DEFAULT
CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3),
+ `corp_name` VARCHAR(600) NOT NULL
+ ) ENGINE = OLAP UNIQUE KEY(`name`, `userid`) COMMENT
'OLAP' DISTRIBUTED BY HASH(`name`) BUCKETS 10
+ PROPERTIES ("replication_num" = "1",
+ "enable_unique_key_merge_on_write" =
"true",
+ "store_row_column" = "${use_row_store}");
"""
+
+ sql "set enable_unique_key_partial_update=true;"
+ sql "set enable_insert_strict=false;"
+
+ sql "INSERT INTO ${tableName}(`name`, `userid`, `corp_name`)
VALUES ('test1', 1234567, 'A');"
+
+ qt_select_timestamp "select count(*) from ${tableName} where
`ctime` > \"1970-01-01\""
+
+ sql "set time_zone = 'America/New_York'"
+
+ Thread.sleep(5000)
+
+ sql "INSERT INTO ${tableName}(`name`, `userid`, `corp_name`)
VALUES ('test2', 1234567, 'A');"
+
+ qt_select_timestamp2 "SELECT ABS(TIMESTAMPDIFF(HOUR, MIN(ctime),
MAX(ctime))) AS time_difference_hours FROM ${tableName};"
+
+ // drop table
+ sql """ DROP TABLE IF EXISTS ${tableName} """
+
+ sql """ SET enable_nereids_planner=true; """
+ sql """ CREATE TABLE ${tableName} (
+ `name` VARCHAR(600) NULL,
+ `userid` INT NOT NULL,
+ `seq` BIGINT NOT NULL AUTO_INCREMENT(1),
+ `ctime` DATE DEFAULT CURRENT_DATE,
+ `corp_name` VARCHAR(600) NOT NULL
+ ) ENGINE = OLAP UNIQUE KEY(`name`, `userid`) COMMENT
'OLAP' DISTRIBUTED BY HASH(`name`) BUCKETS 10
+ PROPERTIES ("replication_num" = "1",
+ "enable_unique_key_merge_on_write" =
"true",
+ "store_row_column" = "${use_row_store}");
"""
+
+ sql "set enable_unique_key_partial_update=true;"
+ sql "set enable_insert_strict=false;"
+
+ sql "INSERT INTO ${tableName}(`name`, `userid`, `corp_name`)
VALUES ('test1', 1234567, 'A');"
+
+ qt_select_date "select count(*) from ${tableName} where `ctime` >
\"1970-01-01\""
+
+ sql "set time_zone = 'America/New_York'"
+
+ sql "INSERT INTO ${tableName}(`name`, `userid`, `corp_name`)
VALUES ('test2', 1234567, 'B');"
+
+ qt_select_date2 "select count(*) from ${tableName} where `ctime` >
\"1970-01-01\""
}
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]