This is an automated email from the ASF dual-hosted git repository.
dataroaring pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new 0f3f3b75b6f Revert "[ci](perf) add required (#30155)" (#30165)
0f3f3b75b6f is described below
commit 0f3f3b75b6f5c8826387068f5dcc55de1b268318
Author: shuke <[email protected]>
AuthorDate: Sun Jan 21 09:04:23 2024 +0800
Revert "[ci](perf) add required (#30155)" (#30165)
This reverts commit f9c274c338afbe14f97cc16d1327e9a548e524b3.
---
.github/workflows/comment-to-trigger-teamcity.yml | 21 +++-
regression-test/pipeline/common/doris-utils.sh | 12 +-
regression-test/pipeline/common/github-utils.sh | 25 ++++
regression-test/pipeline/performance/run-tpcds.sh | 1 -
regression-test/pipeline/performance/run-tpch.sh | 1 -
regression-test/pipeline/tpch/tpch-sf100/clean.sh | 40 +++++++
.../pipeline/tpch/tpch-sf100/conf/be.conf | 85 +++++++++++++
.../pipeline/tpch/tpch-sf100/conf/external.json | 26 ++++
.../pipeline/tpch/tpch-sf100/conf/fe.conf | 78 ++++++++++++
.../pipeline/tpch/tpch-sf100/conf/odbcinst.ini | 43 +++++++
regression-test/pipeline/tpch/tpch-sf100/deploy.sh | 119 +++++++++++++++++++
.../pipeline/tpch/tpch-sf100/prepare.sh | 69 +++++++++++
regression-test/pipeline/tpch/tpch-sf100/run.sh | 131 +++++++++++++++++++++
13 files changed, 642 insertions(+), 9 deletions(-)
diff --git a/.github/workflows/comment-to-trigger-teamcity.yml
b/.github/workflows/comment-to-trigger-teamcity.yml
index 8ae0832fc9e..0fdd80ba28f 100644
--- a/.github/workflows/comment-to-trigger-teamcity.yml
+++ b/.github/workflows/comment-to-trigger-teamcity.yml
@@ -45,6 +45,7 @@ jobs:
"${COMMENT_BODY}" == *'run p1'* ||
"${COMMENT_BODY}" == *'run external'* ||
"${COMMENT_BODY}" == *'run pipelinex_p0'* ||
+ "${COMMENT_BODY}" == *'run clickbench'* ||
"${COMMENT_BODY}" == *'run arm'* ||
"${COMMENT_BODY}" == *'run performance'* ]]; then
echo "comment_trigger=true" | tee -a "$GITHUB_OUTPUT"
@@ -62,7 +63,7 @@ jobs:
echo "TARGET_BRANCH='${TARGET_BRANCH}'" | tee -a "$GITHUB_OUTPUT"
echo "COMMENT_BODY='${COMMENT_BODY}'" | tee -a "$GITHUB_OUTPUT"
- reg="run
(buildall|compile|p0|p1|feut|beut|external|pipelinex_p0|arm|performance)(
[1-9]*[0-9]+)*"
+ reg="run
(buildall|compile|p0|p1|feut|beut|external|clickbench|pipelinex_p0|arm|performance)(
[1-9]*[0-9]+)*"
COMMENT_TRIGGER_TYPE="$(echo -e "${COMMENT_BODY}" | xargs | grep -E
"${reg}" | awk -F' ' '{print $2}' | sed -n 1p | sed 's/\r//g')"
COMMENT_REPEAT_TIMES="$(echo -e "${COMMENT_BODY}" | xargs | grep -E
"${reg}" | awk -F' ' '{print $3}' | sed -n 1p | sed 's/\r//g')"
echo "COMMENT_TRIGGER_TYPE=${COMMENT_TRIGGER_TYPE}" | tee -a
"$GITHUB_OUTPUT"
@@ -89,6 +90,11 @@ jobs:
else
echo "changed_be_ut=false" | tee -a "$GITHUB_OUTPUT"
fi
+ if file_changed_ckb; then
+ echo "changed_ckb=true" | tee -a "$GITHUB_OUTPUT"
+ else
+ echo "changed_ckb=false" | tee -a "$GITHUB_OUTPUT"
+ fi
if file_changed_regression_p0; then
echo "changed_p0=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_external=true" | tee -a "$GITHUB_OUTPUT"
@@ -114,6 +120,7 @@ jobs:
echo "INFO: failed to _get_pr_changed_files, default trigger all"
echo "changed_fe_ut=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_be_ut=true" | tee -a "$GITHUB_OUTPUT"
+ echo "changed_ckb=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_p0=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_external=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_pipelinex_p0=true" | tee -a "$GITHUB_OUTPUT"
@@ -236,6 +243,18 @@ jobs:
"arm" \
"${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
+ - name: "Trigger or Skip clickbench"
+ if: ${{ fromJSON(steps.parse.outputs.comment_trigger) &&
contains(fromJSON('["clickbench", "buildall"]'),
steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
+ run: |
+ source ./regression-test/pipeline/common/teamcity-utils.sh
+ set -x
+ trigger_or_skip_build \
+ "${{ steps.changes.outputs.changed_ckb }}" \
+ "${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
+ "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
+ "clickbench" \
+ "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
+
- name: "Trigger or Skip performance"
if: ${{ fromJSON(steps.parse.outputs.comment_trigger) &&
contains(fromJSON('["performance", "buildall"]'),
steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
run: |
diff --git a/regression-test/pipeline/common/doris-utils.sh
b/regression-test/pipeline/common/doris-utils.sh
index 327f00beac1..47bcedad351 100644
--- a/regression-test/pipeline/common/doris-utils.sh
+++ b/regression-test/pipeline/common/doris-utils.sh
@@ -394,18 +394,18 @@ archive_doris_logs() {
print_doris_fe_log() {
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
- echo -e "\n\n\n\nWARNING: --------------------tail -n 100
${DORIS_HOME}/fe/log/fe.out--------------------"
+ echo "WARNING: --------------------tail -n 100
${DORIS_HOME}/fe/log/fe.out--------------------"
tail -n 100 "${DORIS_HOME}"/fe/log/fe.out
- echo -e "\n\n\n\nWARNING: --------------------tail -n 100
${DORIS_HOME}/fe/log/fe.log--------------------"
+ echo "WARNING: --------------------tail -n 100
${DORIS_HOME}/fe/log/fe.log--------------------"
tail -n 100 "${DORIS_HOME}"/fe/log/fe.log
- echo -e "WARNING: ----------------------------------------\n\n\n\n"
+ echo "WARNING: ----------------------------------------"
}
print_doris_be_log() {
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
- echo -e "\n\n\n\nWARNING: --------------------tail -n 100
${DORIS_HOME}/be/log/be.out--------------------"
+ echo "WARNING: --------------------tail -n 100
${DORIS_HOME}/be/log/be.out--------------------"
tail -n 100 "${DORIS_HOME}"/be/log/be.out
- echo -e "\n\n\n\nWARNING: --------------------tail -n 100
${DORIS_HOME}/be/log/be.INFO--------------------"
+ echo "WARNING: --------------------tail -n 100
${DORIS_HOME}/be/log/be.INFO--------------------"
tail -n 100 "${DORIS_HOME}"/be/log/be.INFO
- echo -e "WARNING: ----------------------------------------\n\n\n\n"
+ echo "WARNING: ----------------------------------------"
}
diff --git a/regression-test/pipeline/common/github-utils.sh
b/regression-test/pipeline/common/github-utils.sh
index 5a9d2ebb7b3..317667d15b2 100644
--- a/regression-test/pipeline/common/github-utils.sh
+++ b/regression-test/pipeline/common/github-utils.sh
@@ -304,6 +304,31 @@ file_changed_regression_p1() {
file_changed_regression_p0
}
+file_changed_ckb() {
+ local all_files
+ all_files=$(cat all_files)
+ if _only_modified_regression_conf; then echo "return no need" && return 1;
fi
+ if [[ -z ${all_files} ]]; then echo "return need" && return 0; fi
+ for af in ${all_files}; do
+ if [[ "${af}" == 'be'* ]] ||
+ [[ "${af}" == 'bin'* ]] ||
+ [[ "${af}" == 'conf'* ]] ||
+ [[ "${af}" == 'fe'* ]] ||
+ [[ "${af}" == 'gensrc'* ]] ||
+ [[ "${af}" == 'thirdparty'* ]] ||
+ [[ "${af}" == 'build.sh' ]] ||
+ [[ "${af}" == 'env.sh' ]] ||
+ [[ "${af}" == 'regression-test/pipeline/common/github-utils.sh' ]]
||
+ [[ "${af}" == 'regression-test/pipeline/common/doris-utils.sh' ]]
||
+ [[ "${af}" == 'regression-test/pipeline/common/oss-utils.sh' ]] ||
+ [[ "${af}" == 'tools/tpch-tools/bin/run-tpch-queries.sh' ]] ||
+ [[ "${af}" == 'regression-test/pipeline/tpch/tpch-sf100/'* ]]; then
+ echo "clickbench performance related file changed, return need" &&
return 0
+ fi
+ done
+ echo "return no need" && return 1
+}
+
file_changed_performance() {
local all_files
all_files=$(cat all_files)
diff --git a/regression-test/pipeline/performance/run-tpcds.sh
b/regression-test/pipeline/performance/run-tpcds.sh
index 0ca31c57e74..d054b85ee4d 100644
--- a/regression-test/pipeline/performance/run-tpcds.sh
+++ b/regression-test/pipeline/performance/run-tpcds.sh
@@ -125,7 +125,6 @@ exit_flag=0
echo "#### 3. run tpcds-sf${SF} query"
set_session_variable runtime_filter_mode global
bash
"${teamcity_build_checkoutDir}"/tools/tpcds-tools/bin/run-tpcds-queries.sh -s
"${SF}" | tee "${teamcity_build_checkoutDir}"/run-tpcds-queries.log
- echo
cold_run_time_threshold=${cold_run_time_threshold_master:-315000} # ms
hot_run_time_threshold=${hot_run_time_threshold_master:-190000} # ms
if [[ "${target_branch}" == "branch-2.0" ]]; then
diff --git a/regression-test/pipeline/performance/run-tpch.sh
b/regression-test/pipeline/performance/run-tpch.sh
index f1e4c8df2a2..1620c5f2922 100644
--- a/regression-test/pipeline/performance/run-tpch.sh
+++ b/regression-test/pipeline/performance/run-tpch.sh
@@ -117,7 +117,6 @@ exit_flag=0
echo "#### 3. run tpch-sf${SF} query"
set_session_variable runtime_filter_mode global
bash
"${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/run-tpch-queries.sh -s
"${SF}" | tee "${teamcity_build_checkoutDir}"/run-tpch-queries.log
- echo
cold_run_time_threshold=${cold_run_time_threshold_master:-120000} # ms
hot_run_time_threshold=${hot_run_time_threshold_master:-42000} # ms
if [[ "${target_branch}" == "branch-2.0" ]]; then
diff --git a/regression-test/pipeline/tpch/tpch-sf100/clean.sh
b/regression-test/pipeline/tpch/tpch-sf100/clean.sh
new file mode 100644
index 00000000000..16fa490ef9c
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/clean.sh
@@ -0,0 +1,40 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Build Step: Command Line
+: <<EOF
+#!/bin/bash
+
+# Execute step even if some of the previous steps failed
+teamcity_build_checkoutDir="%teamcity.build.checkoutDir%"
+if [[ -f
"${teamcity_build_checkoutDir:-}"/regression-test/pipeline/tpch/tpch-sf100/clean.sh
]]; then
+ cd
"${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/
+ bash -x clean.sh
+else
+ echo "Build Step file missing:
regression-test/pipeline/tpch/tpch-sf100/clean.sh" && exit 1
+fi
+EOF
+
+## clean.sh content ##
+
+# stop_doris
+source ../../common/doris-utils.sh
+
+DORIS_HOME="${teamcity_build_checkoutDir}/output"
+export DORIS_HOME
+stop_doris
diff --git a/regression-test/pipeline/tpch/tpch-sf100/conf/be.conf
b/regression-test/pipeline/tpch/tpch-sf100/conf/be.conf
new file mode 100644
index 00000000000..cc23ae153bf
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/conf/be.conf
@@ -0,0 +1,85 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+CUR_DATE=`date +%Y%m%d-%H%M%S`
+
+PPROF_TMPDIR="$DORIS_HOME/log/"
+
+JAVA_OPTS="-Xmx1024m -DlogPath=$DORIS_HOME/log/jni.log
-Xloggc:$DORIS_HOME/log/be.gc.log.$CUR_DATE
-Djavax.security.auth.useSubjectCredsOnly=false -Dsun.security.krb5.debug=true
-Dsun.java.command=DorisBE -XX:-CriticalJNINatives -DJDBC_MIN_POOL=1
-DJDBC_MAX_POOL=100 -DJDBC_MAX_IDLE_TIME=300000 -DJDBC_MAX_WAIT_TIME=5000"
+
+# For jdk 9+, this JAVA_OPTS will be used as default JVM options
+JAVA_OPTS_FOR_JDK_9="-Xmx1024m -DlogPath=$DORIS_HOME/log/jni.log
-Xlog:gc:$DORIS_HOME/log/be.gc.log.$CUR_DATE
-Djavax.security.auth.useSubjectCredsOnly=false -Dsun.security.krb5.debug=true
-Dsun.java.command=DorisBE -XX:-CriticalJNINatives -DJDBC_MIN_POOL=1
-DJDBC_MAX_POOL=100 -DJDBC_MAX_IDLE_TIME=300000 -DJDBC_MAX_WAIT_TIME=5000"
+
+# since 1.2, the JAVA_HOME need to be set to run BE process.
+# JAVA_HOME=/path/to/jdk/
+
+#
https://github.com/apache/doris/blob/master/docs/zh-CN/community/developer-guide/debug-tool.md#jemalloc-heap-profile
+# https://jemalloc.net/jemalloc.3.html
+JEMALLOC_CONF="percpu_arena:percpu,background_thread:true,metadata_thp:auto,muzzy_decay_ms:15000,dirty_decay_ms:15000,oversize_threshold:0,lg_tcache_max:20,prof:false,lg_prof_interval:32,lg_prof_sample:19,prof_gdump:false,prof_accum:false,prof_leak:false,prof_final:false"
+JEMALLOC_PROF_PRFIX=""
+
+# INFO, WARNING, ERROR, FATAL
+sys_log_level = INFO
+
+# ports for admin, web, heartbeat service
+be_port = 9060
+webserver_port = 8040
+heartbeat_service_port = 9050
+brpc_port = 8060
+arrow_flight_sql_port = -1
+
+# HTTPS configures
+enable_https = false
+# path of certificate in PEM format.
+ssl_certificate_path = "$DORIS_HOME/conf/cert.pem"
+# path of private key in PEM format.
+ssl_private_key_path = "$DORIS_HOME/conf/key.pem"
+
+
+# Choose one if there are more than one ip except loopback address.
+# Note that there should at most one ip match this list.
+# If no ip match this rule, will choose one randomly.
+# use CIDR format, e.g. 10.10.10.0/24 or IP format, e.g. 10.10.10.1
+# Default value is empty.
+# priority_networks = 10.10.10.0/24;192.168.0.0/16
+
+# data root path, separate by ';'
+# You can specify the storage type for each root path, HDD (cold data) or SSD
(hot data)
+# eg:
+# storage_root_path = /home/disk1/doris;/home/disk2/doris;/home/disk2/doris
+# storage_root_path =
/home/disk1/doris,medium:SSD;/home/disk2/doris,medium:SSD;/home/disk2/doris,medium:HDD
+# /home/disk2/doris,medium:HDD(default)
+#
+# you also can specify the properties by setting '<property>:<value>',
separate by ','
+# property 'medium' has a higher priority than the extension of path
+#
+# Default value is ${DORIS_HOME}/storage, you should create it by hand.
+# storage_root_path = ${DORIS_HOME}/storage
+
+# Default dirs to put jdbc drivers,default value is ${DORIS_HOME}/jdbc_drivers
+# jdbc_drivers_dir = ${DORIS_HOME}/jdbc_drivers
+
+# Advanced configurations
+# sys_log_dir = ${DORIS_HOME}/log
+# sys_log_roll_mode = SIZE-MB-1024
+# sys_log_roll_num = 10
+# sys_log_verbose_modules = *
+# log_buffer_level = -1
+# palo_cgroups
+
+priority_networks=172.16.0.0/24
+storage_root_path=/mnt/datadisk0/doris-storage
diff --git a/regression-test/pipeline/tpch/tpch-sf100/conf/external.json
b/regression-test/pipeline/tpch/tpch-sf100/conf/external.json
new file mode 100644
index 00000000000..9461d836e67
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/conf/external.json
@@ -0,0 +1,26 @@
+[
+ {
+ "file": "docker/thirdparties/docker-compose/mysql/mysql-5.7.env",
+ "replacements": {
+ "DOCKER_MYSQL_57_EXTERNAL_PORT": 7111
+ }
+ },
+ {
+ "file": "docker/thirdparties/docker-compose/postgresql/postgresql-14.env",
+ "replacements": {
+ "DOCKER_PG_14_EXTERNAL_PORT": 7121
+ }
+ },
+ {
+ "file": "docker/thirdparties/docker-compose/hive/gen_env.sh",
+ "replacements": {
+ "FS_PORT": 7131,
+ "HMS_PORT": 7141
+ }
+ }, {
+ "file": "docker/thirdparties/start-thirdparties-docker.sh",
+ "replacements": {
+ "CONTAINER_UID": "doris-regression-fakeid-fakecommit"
+ }
+ }
+]
diff --git a/regression-test/pipeline/tpch/tpch-sf100/conf/fe.conf
b/regression-test/pipeline/tpch/tpch-sf100/conf/fe.conf
new file mode 100644
index 00000000000..7c02d3898dc
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/conf/fe.conf
@@ -0,0 +1,78 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+#####################################################################
+## The uppercase properties are read and exported by bin/start_fe.sh.
+## To see all Frontend configurations,
+## see fe/src/org/apache/doris/common/Config.java
+#####################################################################
+
+CUR_DATE=`date +%Y%m%d-%H%M%S`
+
+# the output dir of stderr and stdout
+LOG_DIR = ${DORIS_HOME}/log
+
+JAVA_OPTS="-Dsun.security.krb5.debug=true
-Djavax.security.auth.useSubjectCredsOnly=false -Xss4m -Xmx8192m -XX:+UseMembar
-XX:SurvivorRatio=8 -XX:MaxTenuringThreshold=7 -XX:+PrintGCDateStamps
-XX:+PrintGCDetails -XX:+UseConcMarkSweepGC -XX:+UseParNewGC
-XX:+CMSClassUnloadingEnabled -XX:-CMSParallelRemarkEnabled
-XX:CMSInitiatingOccupancyFraction=80 -XX:SoftRefLRUPolicyMSPerMB=0
-Xloggc:$DORIS_HOME/log/fe.gc.log.$CUR_DATE"
+
+# For jdk 9+, this JAVA_OPTS will be used as default JVM options
+JAVA_OPTS_FOR_JDK_9="-Dsun.security.krb5.debug=true
-Djavax.security.auth.useSubjectCredsOnly=false -Xss4m -Xmx8192m
-XX:SurvivorRatio=8 -XX:MaxTenuringThreshold=7 -XX:+CMSClassUnloadingEnabled
-XX:-CMSParallelRemarkEnabled -XX:CMSInitiatingOccupancyFraction=80
-XX:SoftRefLRUPolicyMSPerMB=0
-Xlog:gc*:$DORIS_HOME/log/fe.gc.log.$CUR_DATE:time"
+
+##
+## the lowercase properties are read by main program.
+##
+
+# INFO, WARN, ERROR, FATAL
+sys_log_level = INFO
+
+# NORMAL, BRIEF, ASYNC
+sys_log_mode = NORMAL
+
+# store metadata, must be created before start FE.
+# Default value is ${DORIS_HOME}/doris-meta
+# meta_dir = ${DORIS_HOME}/doris-meta
+
+# Default dirs to put jdbc drivers,default value is ${DORIS_HOME}/jdbc_drivers
+# jdbc_drivers_dir = ${DORIS_HOME}/jdbc_drivers
+
+http_port = 8030
+rpc_port = 9020
+query_port = 9030
+edit_log_port = 9010
+arrow_flight_sql_port = -1
+
+# Choose one if there are more than one ip except loopback address.
+# Note that there should at most one ip match this list.
+# If no ip match this rule, will choose one randomly.
+# use CIDR format, e.g. 10.10.10.0/24 or IP format, e.g. 10.10.10.1
+# Default value is empty.
+# priority_networks = 10.10.10.0/24;192.168.0.0/16
+
+# Advanced configurations
+# log_roll_size_mb = 1024
+# sys_log_dir = ${DORIS_HOME}/log
+# sys_log_roll_num = 10
+# sys_log_verbose_modules = org.apache.doris
+# audit_log_dir = ${DORIS_HOME}/log
+# audit_log_modules = slow_query, query
+# audit_log_roll_num = 10
+# meta_delay_toleration_second = 10
+# qe_max_connection = 1024
+# qe_query_timeout_second = 300
+# qe_slow_log_ms = 5000
+
+priority_networks=172.16.0.0/24
+meta_dir=/mnt/datadisk0/doris-meta
diff --git a/regression-test/pipeline/tpch/tpch-sf100/conf/odbcinst.ini
b/regression-test/pipeline/tpch/tpch-sf100/conf/odbcinst.ini
new file mode 100644
index 00000000000..41e21f92277
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/conf/odbcinst.ini
@@ -0,0 +1,43 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Example driver definitions, you should not use the example odbc driver
+# before you prepare env in your server
+
+# Driver from the postgresql-odbc package
+# Setup from the unixODBC package
+[PostgreSQL]
+Description = ODBC for PostgreSQL
+Driver = /usr/lib/psqlodbc.so
+Setup = /usr/lib/libodbcpsqlS.so
+FileUsage = 1
+
+
+# Driver from the mysql-connector-odbc package
+# Setup from the unixODBC package
+[MySQL ODBC 8.0 Unicode Driver]
+Description = ODBC for MySQL
+Driver = /usr/lib64/libmyodbc8w.so
+FileUsage = 1
+
+# Driver from the oracle-connector-odbc package
+# Setup from the unixODBC package
+[Oracle 19 ODBC driver]
+Description=Oracle ODBC driver for Oracle 19
+Driver=/usr/lib/libsqora.so.19.1
+
+
diff --git a/regression-test/pipeline/tpch/tpch-sf100/deploy.sh
b/regression-test/pipeline/tpch/tpch-sf100/deploy.sh
new file mode 100644
index 00000000000..64d816f0aca
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/deploy.sh
@@ -0,0 +1,119 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Build Step: Command Line
+: <<EOF
+#!/bin/bash
+
+teamcity_build_checkoutDir="%teamcity.build.checkoutDir%"
+if [[ -f
"${teamcity_build_checkoutDir:-}"/regression-test/pipeline/tpch/tpch-sf100/deploy.sh
]]; then
+ cd
"${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/
+ bash -x deploy.sh
+else
+ echo "Build Step file missing:
regression-test/pipeline/tpch/tpch-sf100/deploy.sh" && exit 1
+fi
+EOF
+
+## deploy.sh content ##
+
+# download_oss_file
+source ../../common/oss-utils.sh
+# start_doris_fe, get_doris_conf_value, start_doris_be, stop_doris,
+# print_doris_fe_log, print_doris_be_log, archive_doris_logs
+source ../../common/doris-utils.sh
+
+echo "#### Check env"
+if [[ -z "${teamcity_build_checkoutDir}" ||
+ -z "${pull_request_id}" ||
+ -z "${commit_id}" ]]; then
+ echo "ERROR: env teamcity_build_checkoutDir or pull_request_id or
commit_id not set"
+ exit 1
+fi
+if ${DEBUG:-false}; then
+ pull_request_id="26465"
+ commit_id="a532f7113f463e144e83918a37288f2649448482"
+fi
+
+echo "#### Deploy Doris ####"
+DORIS_HOME="${teamcity_build_checkoutDir}/output"
+export DORIS_HOME
+exit_flag=0
+need_backup_doris_logs=false
+
+echo "#### 1. try to kill old doris process and remove old doris binary"
+stop_doris && rm -rf output
+
+echo "#### 2. download doris binary tar ball"
+cd "${teamcity_build_checkoutDir}" || exit 1
+if download_oss_file "${pull_request_id:-}_${commit_id:-}.tar.gz"; then
+ if ! command -v pigz >/dev/null; then sudo apt install -y pigz; fi
+ tar -I pigz -xf "${pull_request_id:-}_${commit_id:-}.tar.gz"
+ if [[ -d output && -d output/fe && -d output/be ]]; then
+ echo "INFO: be version: $(./output/be/lib/doris_be --version)"
+ rm -rf "${pull_request_id}_${commit_id}.tar.gz"
+ fi
+else
+ echo "ERROR: download compiled binary failed" && exit 1
+fi
+
+echo "#### 3. copy conf from regression-test/pipeline/tpch/tpch-sf100/conf/"
+rm -f "${DORIS_HOME}"/fe/conf/fe_custom.conf
"${DORIS_HOME}"/be/conf/be_custom.conf
+if [[ -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/fe.conf
&&
+ -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/be.conf
]]; then
+ cp -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/fe.conf
"${DORIS_HOME}"/fe/conf/
+ cp -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/be.conf
"${DORIS_HOME}"/be/conf/
+else
+ echo "ERROR: doris conf file missing in
${teamcity_build_checkoutDir}/regression-test/pipeline/tpch/tpch-sf100/conf/"
+ exit 1
+fi
+
+echo "#### 4. start Doris"
+meta_dir=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf meta_dir)
+storage_root_path=$(get_doris_conf_value "${DORIS_HOME}"/be/conf/be.conf
storage_root_path)
+mkdir -p "${meta_dir}"
+mkdir -p "${storage_root_path}"
+if ! start_doris_fe; then
+ echo "ERROR: Start doris fe failed."
+ print_doris_fe_log
+ need_backup_doris_logs=true
+ exit_flag=1
+fi
+if ! start_doris_be; then
+ echo "ERROR: Start doris be failed."
+ print_doris_be_log
+ need_backup_doris_logs=true
+ exit_flag=1
+fi
+
+# wait 10s for doris totally started, otherwize may encounter the error below,
+# ERROR 1105 (HY000) at line 102: errCode = 2, detailMessage = Failed to find
enough backend, please check the replication num,replication tag and storage
medium.
+sleep 10s
+
+echo "#### 5. set session variables"
+echo "TODO"
+
+echo "#### 6. check if need backup doris logs"
+if ${need_backup_doris_logs}; then
+ print_doris_fe_log
+ print_doris_be_log
+ if file_name=$(archive_doris_logs
"${pull_request_id}_${commit_id}_doris_logs.tar.gz"); then
+ upload_doris_log_to_oss "${file_name}"
+ fi
+fi
+
+exit "${exit_flag}"
diff --git a/regression-test/pipeline/tpch/tpch-sf100/prepare.sh
b/regression-test/pipeline/tpch/tpch-sf100/prepare.sh
new file mode 100644
index 00000000000..07959a03d6f
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/prepare.sh
@@ -0,0 +1,69 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Build Step: Command Line
+: <<EOF
+#!/bin/bash
+
+teamcity_build_checkoutDir="%teamcity.build.checkoutDir%"
+if [[ -f
"${teamcity_build_checkoutDir:-}"/regression-test/pipeline/tpch/tpch-sf100/prepare.sh
]]; then
+ cd
"${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/
+ bash -x prepare.sh
+else
+ echo "Build Step file missing:
regression-test/pipeline/tpch/tpch-sf100/prepare.sh" && exit 1
+fi
+EOF
+
+## run.sh content ##
+
+echo "#### Check env"
+if [[ -z "${commit_id_from_trigger}" || -z ${commit_id:-} || -z
${pull_request_id:-} ]]; then
+ echo "ERROR: env commit_id_from_trigger or commit_id or pull_request_id
not set" && exit 1
+else
+ commit_id_from_checkout=${commit_id}
+fi
+if ${DEBUG:-false}; then commit_id_from_trigger=${commit_id}; fi
+
+echo "Prepare to run tpch sf100 test"
+
+echo "#### 1. check if need run"
+if [[ "${commit_id_from_trigger}" != "${commit_id_from_checkout}" ]]; then
+ echo -e "目前是在 clickbench 流水线 compile 完后触发本 tpch 流水线的,
+有可能 pr 在 clickbench 流水线还在跑的时候新提交了commit,
+这时候 tpch 流水线 checkout 出来的 commit 就不是触发时的传过来的 commit了,
+这种情况不需要跑"
+ echo -e "ERROR: PR(${pull_request_id}),
+ the lastest commit id
+ ${commit_id_from_checkout}
+ not equail to the commit_id_from_trigger
+ ${commit_id_from_trigger}
+ commit_id_from_trigger is outdate"
+ exit 1
+fi
+
+echo "#### 2. check if depending files exist"
+if ! [[ -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/fe.conf
&&
+ -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/be.conf
&&
+ -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/deploy.sh
&&
+ -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/run.sh
&&
+ -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/oss-utils.sh &&
+ -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
&&
+ -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/github-utils.sh
&&
+ -f
"${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/run-tpch-queries.sh ]];
then
+ echo "ERROR: depending files missing" && exit 1
+fi
diff --git a/regression-test/pipeline/tpch/tpch-sf100/run.sh
b/regression-test/pipeline/tpch/tpch-sf100/run.sh
new file mode 100644
index 00000000000..d9252f742c1
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/run.sh
@@ -0,0 +1,131 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Build Step: Command Line
+: <<EOF
+#!/bin/bash
+
+teamcity_build_checkoutDir="%teamcity.build.checkoutDir%"
+if [[ -f
"${teamcity_build_checkoutDir:-}"/regression-test/pipeline/tpch/tpch-sf100/run.sh
]]; then
+ cd
"${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/
+ bash -x run.sh
+else
+ echo "Build Step file missing:
regression-test/pipeline/tpch/tpch-sf100/run.sh" && exit 1
+fi
+EOF
+
+## run.sh content ##
+
+# check_tpch_table_rows, stop_doris, set_session_variable
+source ../../common/doris-utils.sh
+# create_an_issue_comment
+source ../../common/github-utils.sh
+# upload_doris_log_to_oss
+source ../../common/oss-utils.sh
+
+echo "#### Check env"
+if [[ -z "${teamcity_build_checkoutDir}" ||
+ -z "${pull_request_id}" ||
+ -z "${commit_id}" ]]; then
+ echo "ERROR: env teamcity_build_checkoutDir or pull_request_id or
commit_id not set"
+ exit 1
+fi
+
+echo "#### Run tpch-sf100 test on Doris ####"
+DORIS_HOME="${teamcity_build_checkoutDir}/output"
+exit_flag=0
+
+check_tpch_result() {
+ log_file="$1"
+ if ! grep '^Total cold run time' "${log_file}" || ! grep '^Total hot run
time' "${log_file}"; then
+ echo "ERROR: can not find 'Total hot run time' in '${log_file}'"
+ return 1
+ else
+ cold_run_time=$(grep '^Total cold run time' "${log_file}" | awk
'{print $5}')
+ hot_run_time=$(grep '^Total hot run time' "${log_file}" | awk '{print
$5}')
+ fi
+ # 单位是毫秒
+ cold_run_time_threshold=${cold_run_time_threshold:-50000}
+ hot_run_time_threshold=${hot_run_time_threshold:-42000}
+ if [[ ${cold_run_time} -gt 50000 || ${hot_run_time} -gt 42000 ]]; then
+ echo "ERROR:
+ cold_run_time ${cold_run_time} is great than the threshold
${cold_run_time_threshold},
+ or, hot_run_time ${hot_run_time} is great than the threshold
${hot_run_time_threshold}"
+ return 1
+ else
+ echo "INFO:
+ cold_run_time ${cold_run_time} is less than the threshold
${cold_run_time_threshold},
+ or, hot_run_time ${hot_run_time} is less than the threshold
${hot_run_time_threshold}"
+ fi
+}
+
+(
+ set -e
+ shopt -s inherit_errexit
+
+ echo "#### 1. check if need to load data"
+ SF="100" # SCALE FACTOR
+ if ${DEBUG:-false}; then SF="1"; fi
+ db_name="tpch_sf${SF}"
+ sed -i "s|^export DB=.*$|export DB='${db_name}'|g" \
+
"${teamcity_build_checkoutDir}"/tools/tpch-tools/conf/doris-cluster.conf
+ if ! check_tpch_table_rows "${db_name}" "${SF}"; then
+ echo "ERROR: check_tpch_table_rows failed." && exit 1
+ fi
+
+ echo "#### 2. run tpch-sf${SF} query"
+ set_session_variable runtime_filter_mode global
+ sed -i "s|^SCALE_FACTOR=[0-9]\+$|SCALE_FACTOR=${SF}|g"
"${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/run-tpch-queries.sh
+ bash
"${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/run-tpch-queries.sh | tee
"${teamcity_build_checkoutDir}"/run-tpch-queries.log
+ if ! check_tpch_result
"${teamcity_build_checkoutDir}"/run-tpch-queries.log; then exit 1; fi
+ line_end=$(sed -n '/^Total hot run time/='
"${teamcity_build_checkoutDir}"/run-tpch-queries.log)
+ line_begin=$((line_end - 23))
+ comment_body="Tpch sf${SF} test result on commit ${commit_id:-}, data
reload: ${data_reload:-"false"}
+
+run tpch-sf${SF} query with default conf and session variables
+$(sed -n "${line_begin},${line_end}p"
"${teamcity_build_checkoutDir}"/run-tpch-queries.log)"
+
+ echo "#### 3. run tpch-sf${SF} query with runtime_filter_mode=off"
+ set_session_variable runtime_filter_mode off
+ bash
"${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/run-tpch-queries.sh | tee
"${teamcity_build_checkoutDir}"/run-tpch-queries.log
+ if ! grep '^Total hot run time'
"${teamcity_build_checkoutDir}"/run-tpch-queries.log >/dev/null; then exit 1; fi
+ line_end=$(sed -n '/^Total hot run time/='
"${teamcity_build_checkoutDir}"/run-tpch-queries.log)
+ line_begin=$((line_end - 23))
+ comment_body="${comment_body}
+
+run tpch-sf${SF} query with default conf and set session variable
runtime_filter_mode=off
+$(sed -n "${line_begin},${line_end}p"
"${teamcity_build_checkoutDir}"/run-tpch-queries.log)"
+
+ echo "#### 4. comment result on tpch"
+ comment_body=$(echo "${comment_body}" | sed -e
':a;N;$!ba;s/\t/\\t/g;s/\n/\\n/g') # 将所有的 Tab字符替换为\t 换行符替换为\n
+ create_an_issue_comment_tpch "${pull_request_id:-}" "${comment_body}"
+
+ stop_doris
+)
+exit_flag="$?"
+
+echo "#### 5. check if need backup doris logs"
+if [[ ${exit_flag} != "0" ]]; then
+ print_doris_fe_log
+ print_doris_be_log
+ if file_name=$(archive_doris_logs
"${pull_request_id}_${commit_id}_doris_logs.tar.gz"); then
+ upload_doris_log_to_oss "${file_name}"
+ fi
+fi
+
+exit "${exit_flag}"
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]