This is an automated email from the ASF dual-hosted git repository.
yiguolei pushed a change to branch 2.1_38012
in repository https://gitbox.apache.org/repos/asf/doris.git
omit 03fdb45c682 [fix](compatibility) type toSql should return lowercase
string (#38012)
add ee47ef88260 [Fix](regression) fix regression sql which has schema
change (#37941) (#38456)
add 96413e679d5 [branch-2.1](mtmv) Support read sync materialized view in
async materialized view (#38462)
add 46dfb65b6b7 [fix](auth)fix show routine load db is null (#38574)
add 1c176db0106 [bugfix](paimon)add support for 'in' and 'not in' for 2.1
#38390 (#38576)
add ef8a1918c3c [case][fix](iceberg)move rest cases from p2 to p0 and fix
iceberg version issue for 2.1 (#37898) (#38589)
add b21b906306d [Fix](outfile) FE check the hdfs URI of outfile (#38602)
add c8037ca833b [fix](regression-test) fix `test_numbers` test (#38600)
add 098ac67a75b [fix](catalog)fix db name may be null in NotificationEvent
(#38421) (#38596)
add 4c330e3fc63 [Fix](test) fix pull up literal predicate regression
(#38564)
add fa45f3b95a0 [feature](paimon/iceberg)add a docker that can directly
pull up all the relevant environments of paimon/iceberg/doris for 2.1 (#38009)
(#38588)
add 6bd93b119fa [pick](cast)Feature cast complexttype2 json (#38632)
add 184b8cbbe4f [pick](json)fix jsonb deseriaze (#38630)
add 9f1e41c6232 [Cherry-pick](branch-2.1) Pick "[Enhancement](audit log)
Add print audit log sesssion variable #38419" (#38624)
add 41fa7bc9fdc [bugfix](paimon)Fixed the reading of timestamp with time
zone type data for 2.1 (#37716) (#38592)
add 27de814b0fc [fix](compatibility) type toSql should return lowercase
string (#38012)
This update added new revisions after undoing existing revisions.
That is to say, some revisions that were in the old version of the
branch are not in the new version. This situation occurs
when a user --force pushes a change and generates a repository
containing something like this:
* -- * -- B -- O -- O -- O (03fdb45c682)
\
N -- N -- N refs/heads/2.1_38012 (27de814b0fc)
You should already have received notification emails for all of the O
revisions, and so the following emails describe only the N revisions
from the common base, B.
Any revisions marked "omit" are not gone; other references still
refer to them. Any revisions marked "discard" are gone forever.
No new revisions were added by this update.
Summary of changes:
be/src/vec/exec/scan/vfile_scanner.cpp | 15 +-
be/src/vec/functions/function_cast.h | 1 +
.../docker-compose/iceberg/iceberg.yaml.tpl | 1 +
.../docker-compose/iceberg/spark-init-iceberg.sql | 25 +-
.../docker-compose/iceberg/tools/save_docker.sh | 4 +
.../org/apache/doris/paimon/PaimonColumnValue.java | 30 +-
.../org/apache/doris/paimon/PaimonJniScanner.java | 14 +-
.../main/java/org/apache/doris/catalog/Type.java | 3 +
.../java/org/apache/doris/analysis/CastExpr.java | 4 +
.../org/apache/doris/analysis/OutFileClause.java | 8 +
.../apache/doris/analysis/ShowRoutineLoadStmt.java | 4 +-
.../datasource/hive/event/MetastoreEvent.java | 5 +-
.../datasource/iceberg/source/IcebergScanNode.java | 9 +-
.../datasource/paimon/PaimonExternalTable.java | 25 +-
.../paimon/source/PaimonPredicateConverter.java | 39 +++
.../datasource/paimon/source/PaimonScanNode.java | 23 +-
.../main/java/org/apache/doris/load/ExportMgr.java | 4 +-
.../exploration/mv/MaterializedViewUtils.java | 9 +
.../nereids/rules/expression/check/CheckCast.java | 22 ++
.../doris/nereids/trees/expressions/Cast.java | 2 +-
.../org/apache/doris/qe/MysqlConnectProcessor.java | 28 +-
.../java/org/apache/doris/qe/SessionVariable.java | 10 +
.../doris/analysis/CancelExportStmtTest.java | 26 --
.../outfile/hdfs/test_outfile_hdfs.out} | 64 +++++
.../iceberg/test_gen_iceberg_by_api.out | 10 +
.../iceberg/test_iceberg_equality_delete.out | 41 +++
.../iceberg/test_iceberg_position_delete.out} | 0
.../iceberg/test_iceberg_time_travel.out | 46 +++
.../test_iceberg_upper_case_column_name.out} | 44 ---
.../paimon/paimon_timestamp_types.out | 139 +++++++++
.../hive/test_upper_case_column_name.out | 44 ---
.../iceberg/iceberg_equality_delete.out | 109 -------
...est_external_catalog_iceberg_hadoop_catalog.out | 26 --
.../test_external_catalog_icebergv2_nereids.out | 74 -----
.../data/jsonb_p0/test_jsonb_unescaped.csv | 5 +
.../data/jsonb_p0/test_jsonb_unescaped.json | 5 +
.../jsonb_p0/test_jsonb_with_unescaped_string.out | 15 +
.../infer_predicate/pull_up_predicate_literal.out | 14 +-
.../query_p0/cast/test_complextype_to_json.out | 67 +++++
.../data/rollup_p0/test_create_mv_and_mtmv.out | 14 +
.../doris/regression/action/WaitForAction.groovy | 12 +-
.../ddl_p0/test_create_table_like_nereids.groovy | 13 +-
.../outfile/hdfs/test_outfile_hdfs.groovy | 97 +++++++
.../suites/export_p2/test_export_with_hdfs.groovy | 118 --------
.../hive/test_upper_case_column_name.groovy | 32 ---
.../iceberg/test_gen_iceberg_by_api.groovy | 192 +++++++++++++
.../iceberg/test_iceberg_equality_delete.groovy | 152 ++++++++++
.../iceberg/test_iceberg_position_delete.groovy} | 42 +--
.../iceberg/test_iceberg_time_travel.groovy | 55 ++++
.../test_iceberg_upper_case_column_name.groovy} | 75 ++---
.../paimon/paimon_timestamp_types.groovy | 224 +++++++++++++--
.../paimon/test_paimon_predict.groovy | 127 +++++++++
.../external_table_p0/tvf/test_numbers.groovy | 2 +-
.../hive/test_upper_case_column_name.groovy | 32 ---
.../iceberg/iceberg_equality_delete.groovy | 57 ----
..._external_catalog_iceberg_hadoop_catalog.groovy | 49 ----
.../test_external_catalog_icebergv2_nereids.groovy | 84 ------
.../test_jsonb_with_unescaped_string.groovy | 99 +++++++
.../query_p0/cast/test_complextype_to_json.groovy | 119 ++++++++
.../rollup_p0/test_create_mv_and_mtmv.groovy | 109 +++++++
samples/datalake/iceberg_and_paimon/README.md | 279 ++++++++++++++++++
.../data/flink-conf/flink-conf.yaml | 312 +++++++++++++++++++++
.../data/flink-conf/log4j-cli.properties | 67 +++++
.../data/flink-conf/log4j-console.properties | 70 +++++
.../data/flink-conf/log4j-session.properties | 42 +++
.../data/flink-conf/log4j.properties | 61 ++++
.../data/flink-conf/logback-console.xml | 67 +++++
.../data/flink-conf/logback-session.xml | 39 +++
.../iceberg_and_paimon/data/flink-conf/logback.xml | 58 ++++
.../iceberg_and_paimon/data/flink-conf/masters | 7 +-
.../iceberg_and_paimon/data/flink-conf/workers | 7 +-
.../iceberg_and_paimon/data/flink-conf/zoo.cfg | 36 +++
.../data/spark-conf/fairscheduler.xml.template | 31 ++
.../data/spark-conf/log4j2.properties.template | 69 +++++
.../data/spark-conf/metrics.properties.template | 210 ++++++++++++++
.../data/spark-conf/spark-defaults.conf | 43 +++
.../data/spark-conf/spark-defaults.conf.template | 27 ++
.../data/spark-conf/spark-env.sh.template | 81 ++++++
.../data/spark-conf/workers.template | 19 ++
.../data/table/customer/000000_0} | Bin
.../data/table/customer/000001_0} | Bin
.../data/table/customer/000002_0} | Bin
.../data/table/customer/000003_0} | Bin
.../datalake/iceberg_and_paimon/docker-compose.env | 9 +-
.../datalake/iceberg_and_paimon/docker-compose.yml | 173 ++++++++++++
.../iceberg_and_paimon/scripts/start_doris.sh | 60 ++++
.../datalake/iceberg_and_paimon/sql/init_doris.sql | 21 ++
.../iceberg_and_paimon/sql/init_tables.sql | 53 ++++
.../iceberg_and_paimon/sql/prepare_data.sql | 8 +
samples/datalake/iceberg_and_paimon/start_all.sh | 121 ++++++++
.../iceberg_and_paimon/start_doris_client.sh | 9 +-
.../iceberg_and_paimon/start_flink_client.sh | 8 +-
.../start_spark_iceberg_client.sh | 8 +-
.../start_spark_paimon_client.sh | 8 +-
.../datalake/iceberg_and_paimon/stop_all.sh | 8 +-
95 files changed, 3822 insertions(+), 907 deletions(-)
rename regression-test/data/{export_p2/test_export_with_hdfs.out =>
export_p0/outfile/hdfs/test_outfile_hdfs.out} (56%)
create mode 100644
regression-test/data/external_table_p0/iceberg/test_gen_iceberg_by_api.out
create mode 100644
regression-test/data/external_table_p0/iceberg/test_iceberg_equality_delete.out
rename
regression-test/data/{external_table_p2/iceberg/iceberg_position_delete.out =>
external_table_p0/iceberg/test_iceberg_position_delete.out} (100%)
create mode 100644
regression-test/data/external_table_p0/iceberg/test_iceberg_time_travel.out
copy
regression-test/data/{external_table_p2/hive/test_upper_case_column_name.out =>
external_table_p0/iceberg/test_iceberg_upper_case_column_name.out} (58%)
create mode 100644
regression-test/data/external_table_p0/paimon/paimon_timestamp_types.out
delete mode 100644
regression-test/data/external_table_p2/iceberg/iceberg_equality_delete.out
delete mode 100644
regression-test/data/external_table_p2/iceberg/test_external_catalog_iceberg_hadoop_catalog.out
delete mode 100644
regression-test/data/external_table_p2/iceberg/test_external_catalog_icebergv2_nereids.out
create mode 100644 regression-test/data/jsonb_p0/test_jsonb_unescaped.csv
create mode 100644 regression-test/data/jsonb_p0/test_jsonb_unescaped.json
create mode 100644
regression-test/data/jsonb_p0/test_jsonb_with_unescaped_string.out
create mode 100644
regression-test/data/query_p0/cast/test_complextype_to_json.out
create mode 100644 regression-test/data/rollup_p0/test_create_mv_and_mtmv.out
create mode 100644
regression-test/suites/export_p0/outfile/hdfs/test_outfile_hdfs.groovy
delete mode 100644
regression-test/suites/export_p2/test_export_with_hdfs.groovy
create mode 100644
regression-test/suites/external_table_p0/iceberg/test_gen_iceberg_by_api.groovy
create mode 100644
regression-test/suites/external_table_p0/iceberg/test_iceberg_equality_delete.groovy
rename
regression-test/suites/{external_table_p2/iceberg/iceberg_position_delete.groovy
=> external_table_p0/iceberg/test_iceberg_position_delete.groovy} (92%)
create mode 100644
regression-test/suites/external_table_p0/iceberg/test_iceberg_time_travel.groovy
copy
regression-test/suites/{external_table_p2/hive/test_upper_case_column_name.groovy
=> external_table_p0/iceberg/test_iceberg_upper_case_column_name.groovy} (50%)
create mode 100644
regression-test/suites/external_table_p0/paimon/test_paimon_predict.groovy
delete mode 100644
regression-test/suites/external_table_p2/iceberg/iceberg_equality_delete.groovy
delete mode 100644
regression-test/suites/external_table_p2/iceberg/test_external_catalog_iceberg_hadoop_catalog.groovy
delete mode 100644
regression-test/suites/external_table_p2/iceberg/test_external_catalog_icebergv2_nereids.groovy
create mode 100644
regression-test/suites/jsonb_p0/test_jsonb_with_unescaped_string.groovy
create mode 100644
regression-test/suites/query_p0/cast/test_complextype_to_json.groovy
create mode 100644
regression-test/suites/rollup_p0/test_create_mv_and_mtmv.groovy
create mode 100644 samples/datalake/iceberg_and_paimon/README.md
create mode 100644
samples/datalake/iceberg_and_paimon/data/flink-conf/flink-conf.yaml
create mode 100644
samples/datalake/iceberg_and_paimon/data/flink-conf/log4j-cli.properties
create mode 100644
samples/datalake/iceberg_and_paimon/data/flink-conf/log4j-console.properties
create mode 100644
samples/datalake/iceberg_and_paimon/data/flink-conf/log4j-session.properties
create mode 100644
samples/datalake/iceberg_and_paimon/data/flink-conf/log4j.properties
create mode 100644
samples/datalake/iceberg_and_paimon/data/flink-conf/logback-console.xml
create mode 100644
samples/datalake/iceberg_and_paimon/data/flink-conf/logback-session.xml
create mode 100644
samples/datalake/iceberg_and_paimon/data/flink-conf/logback.xml
copy docker/thirdparties/docker-compose/iceberg/tools/save_docker.sh =>
samples/datalake/iceberg_and_paimon/data/flink-conf/masters (79%)
copy docker/thirdparties/docker-compose/iceberg/tools/save_docker.sh =>
samples/datalake/iceberg_and_paimon/data/flink-conf/workers (79%)
create mode 100644 samples/datalake/iceberg_and_paimon/data/flink-conf/zoo.cfg
create mode 100644
samples/datalake/iceberg_and_paimon/data/spark-conf/fairscheduler.xml.template
create mode 100644
samples/datalake/iceberg_and_paimon/data/spark-conf/log4j2.properties.template
create mode 100644
samples/datalake/iceberg_and_paimon/data/spark-conf/metrics.properties.template
create mode 100755
samples/datalake/iceberg_and_paimon/data/spark-conf/spark-defaults.conf
create mode 100644
samples/datalake/iceberg_and_paimon/data/spark-conf/spark-defaults.conf.template
create mode 100755
samples/datalake/iceberg_and_paimon/data/spark-conf/spark-env.sh.template
create mode 100644
samples/datalake/iceberg_and_paimon/data/spark-conf/workers.template
copy samples/datalake/{hudi/data/customer/000000_0.parquet =>
iceberg_and_paimon/data/table/customer/000000_0} (100%)
copy samples/datalake/{hudi/data/customer/000001_0.parquet =>
iceberg_and_paimon/data/table/customer/000001_0} (100%)
copy samples/datalake/{hudi/data/customer/000002_0.parquet =>
iceberg_and_paimon/data/table/customer/000002_0} (100%)
copy samples/datalake/{hudi/data/customer/000003_0.parquet =>
iceberg_and_paimon/data/table/customer/000003_0} (100%)
copy docker/thirdparties/docker-compose/iceberg/tools/save_docker.sh =>
samples/datalake/iceberg_and_paimon/docker-compose.env (80%)
create mode 100644 samples/datalake/iceberg_and_paimon/docker-compose.yml
create mode 100644 samples/datalake/iceberg_and_paimon/scripts/start_doris.sh
create mode 100644 samples/datalake/iceberg_and_paimon/sql/init_doris.sql
create mode 100644 samples/datalake/iceberg_and_paimon/sql/init_tables.sql
create mode 100644 samples/datalake/iceberg_and_paimon/sql/prepare_data.sql
create mode 100644 samples/datalake/iceberg_and_paimon/start_all.sh
copy docker/thirdparties/docker-compose/iceberg/tools/save_docker.sh =>
samples/datalake/iceberg_and_paimon/start_doris_client.sh (79%)
copy docker/thirdparties/docker-compose/iceberg/tools/save_docker.sh =>
samples/datalake/iceberg_and_paimon/start_flink_client.sh (79%)
copy docker/thirdparties/docker-compose/iceberg/tools/save_docker.sh =>
samples/datalake/iceberg_and_paimon/start_spark_iceberg_client.sh (79%)
copy docker/thirdparties/docker-compose/iceberg/tools/save_docker.sh =>
samples/datalake/iceberg_and_paimon/start_spark_paimon_client.sh (79%)
copy docker/thirdparties/docker-compose/iceberg/tools/save_docker.sh =>
samples/datalake/iceberg_and_paimon/stop_all.sh (79%)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]