diff --git a/be/src/common/config.h b/be/src/common/config.h
index b6f8d2c2fb..493f9342aa 100644
--- a/be/src/common/config.h
+++ b/be/src/common/config.h
@@ -110,7 +110,7 @@ DECLARE_Int32(brpc_num_threads);
// If no ip match this rule, will choose one randomly.
DECLARE_String(priority_networks);
-// performance moderate or or compact, only tcmalloc compile
+// performance moderate or compact, only tcmalloc compile
DECLARE_String(memory_mode);
// process memory limit specified as number of bytes
diff --git a/regression-test/pipeline/common/doris-utils.sh b/regression-test/pipeline/common/doris-utils.sh
new file mode 100644
index 0000000000..dddc1fc5f5
--- /dev/null
+++ b/regression-test/pipeline/common/doris-utils.sh
@@ -0,0 +1,259 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+function get_doris_conf_value() {
+ local conf_file="$1"
+ local conf_key="$2"
+ if [[ -z "${conf_key}" ]]; then return 1; fi
+
+ local conf_value
+ if line="$(grep "^${conf_key}" "${conf_file}")"; then
+ conf_value="${line#*=}" #取第一个等号后面的子串为value
+ conf_value="$(echo "${conf_value}" | xargs)" #去掉前导和尾随空格
+ echo "${conf_value}"
+ return 0
+ else
+ echo "ERROR: can not find ${conf_key} in ${conf_file}"
+ return 1
+ fi
+}
+
+function set_doris_conf_value() {
+ local conf_file="$1"
+ local conf_key="$2"
+ local conf_value="$3"
+ if [[ -z "${conf_value}" ]]; then return 1; fi
+
+ local origin_conf_value
+ if origin_conf_value="$(get_conf_value "${conf_file}" "${conf_key}")"; then
+ echo "origin_conf_value is ${origin_conf_value}"
+ sed -i "/^${conf_key}/d" "${conf_file}"
+ fi
+ echo "${conf_key}=${conf_value}" | tee -a "${conf_file}"
+}
+
+# set -x
+# get_doris_conf_value "$1" "$2"
+# set_doris_conf_value "$1" "$2" "$3"
+
+function start_doris_fe() {
+ if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+ if ! java -version >/dev/null; then sudo apt install openjdk-8-jdk -y >/dev/null; fi
+ JAVA_HOME="$(find /usr/lib/jvm -maxdepth 1 -type d -name 'java-8-*' | sed -n '1p')"
+ export JAVA_HOME
+ "${DORIS_HOME}"/fe/bin/start_fe.sh --daemon
+
+ if ! mysql --version >/dev/null; then sudo apt install -y mysql-client; fi
+ query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf query_port)
+ cl="mysql -h127.0.0.1 -P${query_port} -uroot "
+ local i=1
+ while [[ $((i++)) -lt 60 ]]; do
+ fe_version=$(${cl} -e 'show frontends\G' 2>/dev/null | grep -i version | cut -d: -f2)
+ if [[ -n "${fe_version}" ]] && [[ "${fe_version}" != "NULL" ]]; then
+ echo "INFO: doris fe started, fe version: ${fe_version}" && return 0
+ else
+ echo "${i}/60, Wait for Frontend ready, sleep 2 seconds ..." && sleep 2
+ fi
+ done
+ if [[ ${i} -ge 60 ]]; then echo "ERROR: Start Doris Frontend Failed after 2 mins wait..." && return 1; fi
+
+}
+
+function start_doris_be() {
+ if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+ if ! java -version >/dev/null; then sudo apt install openjdk-8-jdk -y >/dev/null; fi
+ JAVA_HOME="$(find /usr/lib/jvm -maxdepth 1 -type d -name 'java-8-*' | sed -n '1p')"
+ export JAVA_HOME
+ sysctl -w vm.max_map_count=2000000 &&
+ ulimit -n 200000 &&
+ ulimit -c unlimited &&
+ swapoff -a &&
+ "${DORIS_HOME}"/be/bin/start_be.sh --daemon
+
+ sleep 2
+ local i=1
+ while [[ $((i++)) -lt 5 ]]; do
+ if ! pgrep -fia doris_be >/dev/null; then
+ echo "ERROR: start doris be failed." && return 1
+ else
+ sleep 2
+ fi
+ done
+ if [[ ${i} -ge 5 ]]; then
+ echo "INFO: doris be started, be version: $("${DORIS_HOME}"/be/lib/doris_be --version)"
+ fi
+}
+
+function add_doris_be_to_fe() {
+ if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+ if ! mysql --version >/dev/null; then sudo apt install -y mysql-client; fi
+ query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf query_port)
+ heartbeat_service_port=$(get_doris_conf_value "${DORIS_HOME}"/be/conf/be.conf heartbeat_service_port)
+ cl="mysql -h127.0.0.1 -P${query_port} -uroot "
+ if ${cl} -e "ALTER SYSTEM ADD BACKEND '127.0.0.1:${heartbeat_service_port}';"; then echo; else echo; fi
+
+ i=1
+ while [[ $((i++)) -lt 60 ]]; do
+ if be_ready_count=$(${cl} -e 'show backends\G' | grep -c 'Alive: true') &&
+ [[ ${be_ready_count} -eq 1 ]]; then
+ echo -e "INFO: add doris be success, be version: \n$(${cl} -e 'show backends\G' | grep 'Version')" && break
+ else
+ echo 'Wait for Backends ready, sleep 2 seconds ...' && sleep 2
+ fi
+ done
+ if [[ ${i} -eq 60 ]]; then echo "ERROR: Add Doris Backend Failed after 2 mins wait..." && return 1; fi
+}
+
+function stop_doris() {
+ if "${DORIS_HOME}"/fe/bin/stop_fe.sh &&
+ "${DORIS_HOME}"/be/bin/stop_be.sh; then
+ echo "INFO: normally stoped doris"
+ else
+ pgrep -fi doris | xargs kill -9
+ echo "WARNING: force stoped doris"
+ fi
+}
+
+function check_tpch_table_rows() {
+ if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+ db_name="$1"
+ scale_factor="$2"
+ if [[ -z "${scale_factor}" ]]; then return 1; fi
+
+ query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf query_port)
+ cl="mysql -h127.0.0.1 -P${query_port} -uroot "
+ declare -A table_rows
+ if [[ "${scale_factor}" == "100" ]]; then
+ table_rows=(['region']=5 ['nation']=25 ['supplier']=1000000 ['customer']=15000000 ['part']=20000000 ['partsupp']=80000000 ['orders']=150000000 ['lineitem']=600037902)
+ else
+ table_rows=(['region']=5 ['nation']=25 ['supplier']=10000 ['customer']=150000 ['part']=200000 ['partsupp']=800000 ['orders']=1500000 ['lineitem']=6001215)
+ fi
+ for table in ${!table_rows[*]}; do
+ rows_actual=$(${cl} -D"${db_name}" -e"SELECT count(*) FROM ${table}" | sed -n '2p')
+ rows_expect=${table_rows[${table}]}
+ if [[ ${rows_actual} -ne ${rows_expect} ]]; then
+ echo "WARNING: ${table} actual rows: ${rows_actual}, expect rows: ${rows_expect}" && return 1
+ fi
+ done
+}
+
+get_session_variable() {
+ if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+ usage="
+ usage:
+ get_session_variable SESSION_VARIABLE
+ return the value of the SESSION_VARIABLE
+ "
+ if [[ -z "$1" ]]; then echo "${usage}" && return 1; else sv="$1"; fi
+
+ query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf query_port)
+ cl="mysql -h127.0.0.1 -P${query_port} -uroot "
+
+ if ret=$(${cl} -e"show variables like '${sv}'\G" | grep " Value: "); then
+ echo "${ret/*Value: /}"
+ else
+ return 1
+ fi
+}
+
+set_session_variables_from_file() {
+ usage="
+ usage:
+ set_session_variables_from_file FILE
+ FILE content lile '
+ session_variable_key session_variable_value
+ ...
+ '
+ "
+ if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+ if [[ -z "$1" ]]; then echo "${usage}" && return 1; else sv_file="$1"; fi
+
+ query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf query_port)
+ cl="mysql -h127.0.0.1 -P${query_port} -uroot "
+
+ ret=0
+ while read -r sv; do
+ if [[ "${sv}" == "#"* ]]; then continue; fi
+ k=$(echo "${sv}" | awk '{print $1}')
+ v=$(echo "${sv}" | awk '{print $2}' | tr '[:upper:]' '[:lower:]')
+ if ${cl} -e"set global ${k}=${v};"; then
+ if [[ "$(get_session_variable "${k}" | tr '[:upper:]' '[:lower:]')" == "${v}" ]]; then
+ echo "INFO: set global ${k}=${v};"
+ else
+ echo "ERROR: set global ${k}=${v};" && ret=1
+ fi
+ else
+ ret=1
+ fi
+ done <"${sv_file}"
+ return "${ret}"
+}
+
+set_session_variable() {
+ if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+ k="$1"
+ v="$2"
+ if [[ -z "${v}" ]]; then return 1; fi
+ query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf query_port)
+ cl="mysql -h127.0.0.1 -P${query_port} -uroot "
+ if ${cl} -e"set global ${k}=${v};"; then
+ if [[ "$(get_session_variable "${k}" | tr '[:upper:]' '[:lower:]')" == "${v}" ]]; then
+ echo "INFO: set global ${k}=${v};"
+ else
+ echo "ERROR: set global ${k}=${v};" && return 1
+ fi
+ else
+ return 1
+ fi
+}
+
+archive_doris_logs() {
+ if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+ archive_name="$1"
+ if [[ -z ${archive_name} ]]; then echo "ERROR: archive file name required" && return 1; fi
+ if tar -I pigz \
+ --directory "${DORIS_HOME}" \
+ --absolute-names \
+ -cf "${DORIS_HOME}/${archive_name}" \
+ "${DORIS_HOME}"/fe/conf \
+ "${DORIS_HOME}"/fe/log \
+ "${DORIS_HOME}"/be/conf \
+ "${DORIS_HOME}"/be/log; then
+ echo "${DORIS_HOME}/${archive_name}"
+ else
+ return 1
+ fi
+}
+
+print_doris_fe_log() {
+ if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+ echo "WARNING: --------------------tail -n 100 ${DORIS_HOME}/fe/log/fe.out--------------------"
+ tail -n 100 "${DORIS_HOME}"/fe/log/fe.out
+ echo "WARNING: --------------------tail -n 100 ${DORIS_HOME}/fe/log/fe.log--------------------"
+ tail -n 100 "${DORIS_HOME}"/fe/log/fe.log
+ echo "WARNING: ----------------------------------------"
+}
+
+print_doris_be_log() {
+ if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+ echo "WARNING: --------------------tail -n 100 ${DORIS_HOME}/be/log/be.out--------------------"
+ tail -n 100 "${DORIS_HOME}"/be/log/be.out
+ echo "WARNING: --------------------tail -n 100 ${DORIS_HOME}/be/log/be.INFO--------------------"
+ tail -n 100 "${DORIS_HOME}"/be/log/be.INFO
+ echo "WARNING: ----------------------------------------"
+}
diff --git a/regression-test/pipeline/common/github-utils.sh b/regression-test/pipeline/common/github-utils.sh
new file mode 100644
index 0000000000..c8c5ba213f
--- /dev/null
+++ b/regression-test/pipeline/common/github-utils.sh
@@ -0,0 +1,65 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+function create_an_issue_comment() {
+ local ISSUE_NUMBER="$1"
+ local COMMENT_BODY="$2"
+ if [[ -z "${COMMENT_BODY}" ]]; then return 1; fi
+ if [[ -z "${GITHUB_TOKEN}" ]]; then return 1; fi
+
+ local OWNER='apache'
+ local REPO='doris'
+ COMMENT_BODY=$(echo "${COMMENT_BODY}" | sed -e ':a;N;$!ba;s/\t/\\t/g;s/\n/\\n/g') # 将所有的 Tab字符替换为\t 换行符替换为\n
+ if ret=$(curl -s \
+ -X POST \
+ -H "Accept: application/vnd.github+json" \
+ -H "Authorization: Bearer ${GITHUB_TOKEN:-}" \
+ -H "X-GitHub-Api-Version: 2022-11-28" \
+ https://api.github.com/repos/"${OWNER}"/"${REPO}"/issues/"${ISSUE_NUMBER}"/comments \
+ -d "{\"body\": \"${COMMENT_BODY}\"}"); then
+ if echo "${ret}" | grep "Problems parsing JSON"; then
+ is_succ=false
+ else
+ is_succ=true
+ fi
+ else
+ is_succ=false
+ fi
+
+ if ${is_succ}; then
+ echo -e "\033[32m Create issue(${ISSUE_NUMBER}) comment SUCCESS... \033[0m" && return 0
+ else
+ echo -e "\033[31m Create issue(${ISSUE_NUMBER}) comment FAIL... \033[0m" && return 1
+ fi
+}
+
+function create_an_issue_comment_tpch() {
+ local ISSUE_NUMBER="$1"
+ local COMMENT_BODY="$2"
+ local machine='aliyun_ecs.c7a.8xlarge_32C64G'
+ COMMENT_BODY="
+
+TPC-H test result on machine: '${machine}'
+
+\`\`\`
+${COMMENT_BODY}
+\`\`\`
+
+"
+ create_an_issue_comment "${ISSUE_NUMBER}" "${COMMENT_BODY}"
+}
diff --git a/regression-test/pipeline/common/oss-utils.sh b/regression-test/pipeline/common/oss-utils.sh
new file mode 100644
index 0000000000..cc036fb573
--- /dev/null
+++ b/regression-test/pipeline/common/oss-utils.sh
@@ -0,0 +1,105 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+function install_ossutil() {
+ if command -v ossutil >/dev/null; then return 0; fi
+ if [[ -z ${OSS_accessKeyID} || -z ${OSS_accessKeySecret} ]]; then
+ echo "ERROR: env OSS_accessKeyID or OSS_accessKeySecret not set."
+ return 1
+ fi
+ curl https://gosspublic.alicdn.com/ossutil/install.sh | sudo bash
+ echo "[Credentials]
+language=EN
+endpoint=oss-cn-hongkong-internal.aliyuncs.com
+accessKeyID=${OSS_accessKeyID:-}
+accessKeySecret=${OSS_accessKeySecret:-}
+" >~/.ossutilconfig
+}
+
+function check_oss_file_exist() {
+ if [[ -z ${OSS_accessKeyID} || -z ${OSS_accessKeySecret} ]]; then
+ echo "ERROR: env OSS_accessKeyID and OSS_accessKeySecret not set"
+ return 1
+ fi
+ # Check if the file exists.
+ # file_name like ${pull_request_id}_${commit_id}.tar.gz
+ local file_name="$1"
+ OSS_DIR="${OSS_DIR:-"oss://opensource-pipeline/compile-release"}"
+ install_ossutil
+ if ossutil stat \
+ -i "${OSS_accessKeyID}" \
+ -k "${OSS_accessKeySecret}" \
+ "${OSS_DIR}/${file_name}"; then
+ echo "INFO: ${file_name} file exists." && return 0
+ else
+ echo "ERROR: ${file_name} file not exits." && return 1
+ fi
+}
+
+function download_oss_file() {
+ # file_name like ${pull_request_id}_${commit_id}.tar.gz
+ local file_name="$1"
+ if ! check_oss_file_exist "${file_name}"; then return 1; fi
+ OSS_DIR="${OSS_DIR:-"oss://opensource-pipeline/compile-release"}"
+ install_ossutil
+ if ossutil cp -f \
+ "${OSS_DIR}/${file_name}" \
+ "${file_name}"; then
+ echo "INFO: download ${file_name} success" && return 0
+ else
+ echo "ERROR: download ${file_name} fail" && return 1
+ fi
+}
+
+function upload_file_to_oss() {
+ if [[ -z ${OSS_accessKeyID} || -z ${OSS_accessKeySecret} ]]; then
+ echo "ERROR: env OSS_accessKeyID and OSS_accessKeySecret not set"
+ return 1
+ fi
+ if [[ ! -f "$1" ]] || [[ "$1" != "/"* ]]; then
+ echo "ERROR: '$1' is not an absolute path"
+ return 1
+ fi
+ # file_name like ${pull_request_id}_${commit_id}.tar.gz
+ local file_name
+ local dir_name
+ dir_name="$(dirname "${1}")"
+ file_name="$(basename "${1}")"
+ OSS_DIR="${OSS_DIR:-"oss://opensource-pipeline/compile-release"}"
+ OSS_URL_PREFIX="${OSS_URL_PREFIX:-"http://opensource-pipeline.oss-cn-hongkong.aliyuncs.com/compile-release"}"
+ install_ossutil
+ cd "${dir_name}" || return 1
+ if ossutil cp -f \
+ -i "${OSS_accessKeyID}" \
+ -k "${OSS_accessKeySecret}" \
+ "${file_name}" \
+ "${OSS_DIR}/${file_name}"; then
+ if ! check_oss_file_exist "${file_name}"; then return 1; fi
+ cd - || return 1
+ echo "INFO: success to upload ${file_name} to ${OSS_URL_PREFIX}/${file_name}" && return 0
+ else
+ cd - || return 1
+ echo "ERROR: upload ${file_name} fail" && return 1
+ fi
+}
+
+function upload_doris_log_to_oss() {
+ OSS_DIR="oss://opensource-pipeline/regression"
+ OSS_URL_PREFIX="http://opensource-pipeline.oss-cn-hongkong.aliyuncs.com/regression"
+ upload_file_to_oss "$1"
+}
diff --git a/regression-test/pipeline/tpch/tpch-sf100/clean.sh b/regression-test/pipeline/tpch/tpch-sf100/clean.sh
new file mode 100644
index 0000000000..16fa490ef9
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/clean.sh
@@ -0,0 +1,40 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Build Step: Command Line
+: <:', separate by ','
+# property 'medium' has a higher priority than the extension of path
+#
+# Default value is ${DORIS_HOME}/storage, you should create it by hand.
+# storage_root_path = ${DORIS_HOME}/storage
+
+# Default dirs to put jdbc drivers,default value is ${DORIS_HOME}/jdbc_drivers
+# jdbc_drivers_dir = ${DORIS_HOME}/jdbc_drivers
+
+# Advanced configurations
+# sys_log_dir = ${DORIS_HOME}/log
+# sys_log_roll_mode = SIZE-MB-1024
+# sys_log_roll_num = 10
+# sys_log_verbose_modules = *
+# log_buffer_level = -1
+# palo_cgroups
+
+priority_networks=172.16.0.0/24
+storage_root_path=/mnt/datadisk0/doris-storage
diff --git a/regression-test/pipeline/tpch/tpch-sf100/conf/external.json b/regression-test/pipeline/tpch/tpch-sf100/conf/external.json
new file mode 100644
index 0000000000..9461d836e6
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/conf/external.json
@@ -0,0 +1,26 @@
+[
+ {
+ "file": "docker/thirdparties/docker-compose/mysql/mysql-5.7.env",
+ "replacements": {
+ "DOCKER_MYSQL_57_EXTERNAL_PORT": 7111
+ }
+ },
+ {
+ "file": "docker/thirdparties/docker-compose/postgresql/postgresql-14.env",
+ "replacements": {
+ "DOCKER_PG_14_EXTERNAL_PORT": 7121
+ }
+ },
+ {
+ "file": "docker/thirdparties/docker-compose/hive/gen_env.sh",
+ "replacements": {
+ "FS_PORT": 7131,
+ "HMS_PORT": 7141
+ }
+ }, {
+ "file": "docker/thirdparties/start-thirdparties-docker.sh",
+ "replacements": {
+ "CONTAINER_UID": "doris-regression-fakeid-fakecommit"
+ }
+ }
+]
diff --git a/regression-test/pipeline/tpch/tpch-sf100/conf/fe.conf b/regression-test/pipeline/tpch/tpch-sf100/conf/fe.conf
new file mode 100644
index 0000000000..7c02d3898d
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/conf/fe.conf
@@ -0,0 +1,78 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+#####################################################################
+## The uppercase properties are read and exported by bin/start_fe.sh.
+## To see all Frontend configurations,
+## see fe/src/org/apache/doris/common/Config.java
+#####################################################################
+
+CUR_DATE=`date +%Y%m%d-%H%M%S`
+
+# the output dir of stderr and stdout
+LOG_DIR = ${DORIS_HOME}/log
+
+JAVA_OPTS="-Dsun.security.krb5.debug=true -Djavax.security.auth.useSubjectCredsOnly=false -Xss4m -Xmx8192m -XX:+UseMembar -XX:SurvivorRatio=8 -XX:MaxTenuringThreshold=7 -XX:+PrintGCDateStamps -XX:+PrintGCDetails -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+CMSClassUnloadingEnabled -XX:-CMSParallelRemarkEnabled -XX:CMSInitiatingOccupancyFraction=80 -XX:SoftRefLRUPolicyMSPerMB=0 -Xloggc:$DORIS_HOME/log/fe.gc.log.$CUR_DATE"
+
+# For jdk 9+, this JAVA_OPTS will be used as default JVM options
+JAVA_OPTS_FOR_JDK_9="-Dsun.security.krb5.debug=true -Djavax.security.auth.useSubjectCredsOnly=false -Xss4m -Xmx8192m -XX:SurvivorRatio=8 -XX:MaxTenuringThreshold=7 -XX:+CMSClassUnloadingEnabled -XX:-CMSParallelRemarkEnabled -XX:CMSInitiatingOccupancyFraction=80 -XX:SoftRefLRUPolicyMSPerMB=0 -Xlog:gc*:$DORIS_HOME/log/fe.gc.log.$CUR_DATE:time"
+
+##
+## the lowercase properties are read by main program.
+##
+
+# INFO, WARN, ERROR, FATAL
+sys_log_level = INFO
+
+# NORMAL, BRIEF, ASYNC
+sys_log_mode = NORMAL
+
+# store metadata, must be created before start FE.
+# Default value is ${DORIS_HOME}/doris-meta
+# meta_dir = ${DORIS_HOME}/doris-meta
+
+# Default dirs to put jdbc drivers,default value is ${DORIS_HOME}/jdbc_drivers
+# jdbc_drivers_dir = ${DORIS_HOME}/jdbc_drivers
+
+http_port = 8030
+rpc_port = 9020
+query_port = 9030
+edit_log_port = 9010
+arrow_flight_sql_port = -1
+
+# Choose one if there are more than one ip except loopback address.
+# Note that there should at most one ip match this list.
+# If no ip match this rule, will choose one randomly.
+# use CIDR format, e.g. 10.10.10.0/24 or IP format, e.g. 10.10.10.1
+# Default value is empty.
+# priority_networks = 10.10.10.0/24;192.168.0.0/16
+
+# Advanced configurations
+# log_roll_size_mb = 1024
+# sys_log_dir = ${DORIS_HOME}/log
+# sys_log_roll_num = 10
+# sys_log_verbose_modules = org.apache.doris
+# audit_log_dir = ${DORIS_HOME}/log
+# audit_log_modules = slow_query, query
+# audit_log_roll_num = 10
+# meta_delay_toleration_second = 10
+# qe_max_connection = 1024
+# qe_query_timeout_second = 300
+# qe_slow_log_ms = 5000
+
+priority_networks=172.16.0.0/24
+meta_dir=/mnt/datadisk0/doris-meta
diff --git a/regression-test/pipeline/tpch/tpch-sf100/conf/odbcinst.ini b/regression-test/pipeline/tpch/tpch-sf100/conf/odbcinst.ini
new file mode 100644
index 0000000000..41e21f9227
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/conf/odbcinst.ini
@@ -0,0 +1,43 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Example driver definitions, you should not use the example odbc driver
+# before you prepare env in your server
+
+# Driver from the postgresql-odbc package
+# Setup from the unixODBC package
+[PostgreSQL]
+Description = ODBC for PostgreSQL
+Driver = /usr/lib/psqlodbc.so
+Setup = /usr/lib/libodbcpsqlS.so
+FileUsage = 1
+
+
+# Driver from the mysql-connector-odbc package
+# Setup from the unixODBC package
+[MySQL ODBC 8.0 Unicode Driver]
+Description = ODBC for MySQL
+Driver = /usr/lib64/libmyodbc8w.so
+FileUsage = 1
+
+# Driver from the oracle-connector-odbc package
+# Setup from the unixODBC package
+[Oracle 19 ODBC driver]
+Description=Oracle ODBC driver for Oracle 19
+Driver=/usr/lib/libsqora.so.19.1
+
+
diff --git a/regression-test/pipeline/tpch/tpch-sf100/conf/regression-conf.groovy b/regression-test/pipeline/tpch/tpch-sf100/conf/regression-conf.groovy
new file mode 100644
index 0000000000..364a7103fe
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/conf/regression-conf.groovy
@@ -0,0 +1,111 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+/* ******* Do not commit this file unless you know what you are doing ******* */
+
+// **Note**: default db will be create if not exist
+defaultDb = "regression_test"
+
+jdbcUrl = "jdbc:mysql://172.19.0.2:9131/?useLocalSessionState=true&allowLoadLocalInfile=true"
+targetJdbcUrl = "jdbc:mysql://172.19.0.2:9131/?useLocalSessionState=true&allowLoadLocalInfile=true"
+jdbcUser = "root"
+jdbcPassword = ""
+
+feSourceThriftAddress = "127.0.0.1:9020"
+feTargetThriftAddress = "127.0.0.1:9020"
+feSyncerUser = "root"
+feSyncerPassword = ""
+
+feHttpAddress = "172.19.0.2:8131"
+feHttpUser = "root"
+feHttpPassword = ""
+
+// set DORIS_HOME by system properties
+// e.g. java -DDORIS_HOME=./
+suitePath = "${DORIS_HOME}/regression-test/suites"
+dataPath = "${DORIS_HOME}/regression-test/data"
+pluginPath = "${DORIS_HOME}/regression-test/plugins"
+realDataPath = "${DORIS_HOME}/regression-test/realdata"
+// sf1DataPath can be url like "https://doris-community-test-1308700295.cos.ap-hongkong.myqcloud.com" or local path like "/data"
+//sf1DataPath = "https://doris-community-test-1308700295.cos.ap-hongkong.myqcloud.com"
+
+// will test /.groovy
+// empty group will test all group
+testGroups = ""
+// empty suite will test all suite
+testSuites = ""
+// empty directories will test all directories
+testDirectories = ""
+
+// this groups will not be executed
+excludeGroups = ""
+// this suites will not be executed
+
+excludeSuites = "test_sql_block_rule,test_profile,test_spark_load,test_refresh_mtmv,test_bitmap_filter,test_jdbc_query_mysql"
+
+// this directories will not be executed
+excludeDirectories = "workload_manager_p1,fault_injection_p0"
+
+customConf1 = "test_custom_conf_value"
+
+// for test csv with header
+enableHdfs=false // set to true if hdfs is ready
+hdfsFs = "hdfs://127.0.0.1:9000"
+hdfsUser = "doris-test"
+hdfsPasswd = ""
+brokerName = "broker_name"
+
+// broker load test config
+enableBrokerLoad=true
+
+// jdbc connector test config
+// To enable jdbc test, you need first start mysql/pg container.
+// See `docker/thirdparties/start-thirdparties-docker.sh`
+enableJdbcTest=false
+mysql_57_port=7111
+pg_14_port=7121
+mariadb_10_port=3326
+// hive catalog test config
+// To enable jdbc test, you need first start hive container.
+// See `docker/thirdparties/start-thirdparties-docker.sh`
+enableHiveTest=false
+hms_port=7141
+hiveServerPort=10000
+
+// kafka test config
+// to enable kafka test, you need firstly to start kafka container
+// See `docker/thirdparties/start-thirdparties-docker.sh`
+enableKafkaTest=true
+kafka_port=19193
+
+// iceberg test config
+iceberg_rest_uri_port=18181
+
+enableEsTest=false
+es_6_port=19200
+es_7_port=29200
+es_8_port=39200
+
+cacheDataPath = "/data/regression/"
+
+s3Endpoint = "cos.ap-hongkong.myqcloud.com"
+s3BucketName = "doris-build-hk-1308700295"
+s3Region = "ap-hongkong"
+
+max_failure_num=50
+
+externalEnvIp="127.0.0.1"
diff --git a/regression-test/pipeline/tpch/tpch-sf100/deploy.sh b/regression-test/pipeline/tpch/tpch-sf100/deploy.sh
new file mode 100644
index 0000000000..1ff17ebfba
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/deploy.sh
@@ -0,0 +1,132 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Build Step: Command Line
+: </dev/null; then sudo apt install -y pigz; fi
+ tar -I pigz -xf "${pull_request_id:-}_${commit_id:-}.tar.gz"
+ if [[ -d output && -d output/fe && -d output/be ]]; then
+ echo "INFO: be version: $(./output/be/lib/doris_be --version)"
+ rm -rf "${pull_request_id}_${commit_id}.tar.gz"
+ fi
+else
+ echo "ERROR: download compiled binary failed" && exit 1
+fi
+
+echo "#### 3. copy conf from regression-test/pipeline/tpch/tpch-sf100/conf/"
+rm -f "${DORIS_HOME}"/fe/conf/fe_custom.conf "${DORIS_HOME}"/be/conf/be_custom.conf
+if [[ -f "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/fe.conf &&
+ -f "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/be.conf ]]; then
+ cp -f "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/fe.conf "${DORIS_HOME}"/fe/conf/
+ cp -f "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/be.conf "${DORIS_HOME}"/be/conf/
+else
+ echo "ERROR: doris conf file missing in ${teamcity_build_checkoutDir}/regression-test/pipeline/tpch/tpch-sf100/conf/"
+ exit 1
+fi
+
+echo "#### 4. start Doris"
+meta_dir=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf meta_dir)
+storage_root_path=$(get_doris_conf_value "${DORIS_HOME}"/be/conf/be.conf storage_root_path)
+mkdir -p "${meta_dir}"
+mkdir -p "${storage_root_path}"
+if ! start_doris_fe; then
+ echo "WARNING: Start doris fe failed at first time"
+ print_doris_fe_log
+ echo "WARNING: delete meta_dir and storage_root_path, then retry"
+ rm -rf "${meta_dir:?}/"*
+ rm -rf "${storage_root_path:?}/"*
+ if ! start_doris_fe; then
+ need_backup_doris_logs=true
+ exit_flag=1
+ fi
+fi
+if ! start_doris_be; then
+ echo "WARNING: Start doris be failed at first time"
+ print_doris_be_log
+ echo "WARNING: delete storage_root_path, then retry"
+ rm -rf "${storage_root_path:?}/"*
+ if ! start_doris_be; then
+ need_backup_doris_logs=true
+ exit_flag=1
+ fi
+fi
+if ! add_doris_be_to_fe; then
+ need_backup_doris_logs=true
+ exit_flag=1
+else
+ # wait 10s for doris totally started, otherwize may encounter the error below,
+ # ERROR 1105 (HY000) at line 102: errCode = 2, detailMessage = Failed to find enough backend, please check the replication num,replication tag and storage medium.
+ sleep 10s
+fi
+
+echo "#### 5. set session variables"
+echo "TODO"
+
+echo "#### 6. check if need backup doris logs"
+if ${need_backup_doris_logs}; then
+ print_doris_fe_log
+ print_doris_be_log
+ if archive_doris_logs "${DORIS_HOME}/${pull_request_id}_${commit_id}_doris_logs.tar.gz"; then
+ upload_doris_log_to_oss "${DORIS_HOME}/${pull_request_id}_${commit_id}_doris_logs.tar.gz"
+ fi
+fi
+
+exit "${exit_flag}"
diff --git a/regression-test/pipeline/tpch/tpch-sf100/prepare.sh b/regression-test/pipeline/tpch/tpch-sf100/prepare.sh
new file mode 100644
index 0000000000..5376f7c730
--- /dev/null
+++ b/regression-test/pipeline/tpch/tpch-sf100/prepare.sh
@@ -0,0 +1,71 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Build Step: Command Line
+: </dev/null; then exit 1; fi
+ line_end=$(sed -n '/^Total hot run time/=' "${teamcity_build_checkoutDir}"/run-tpch-queries.log)
+ line_begin=$((line_end - 23))
+ comment_body="Tpch sf${SF} test result on commit ${commit_id:-}, data reload: ${data_reload:-"false"}
+
+run tpch-sf${SF} query with default conf and session variables
+$(sed -n "${line_begin},${line_end}p" "${teamcity_build_checkoutDir}"/run-tpch-queries.log)"
+
+ echo "#### 3. run tpch-sf${SF} query with runtime_filter_mode=off"
+ set_session_variable runtime_filter_mode off
+ bash "${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/run-tpch-queries.sh | tee "${teamcity_build_checkoutDir}"/run-tpch-queries.log
+ if ! grep '^Total hot run time' "${teamcity_build_checkoutDir}"/run-tpch-queries.log >/dev/null; then exit 1; fi
+ line_end=$(sed -n '/^Total hot run time/=' "${teamcity_build_checkoutDir}"/run-tpch-queries.log)
+ line_begin=$((line_end - 23))
+ comment_body="${comment_body}
+
+run tpch-sf${SF} query with default conf and set session variable runtime_filter_mode=off
+$(sed -n "${line_begin},${line_end}p" "${teamcity_build_checkoutDir}"/run-tpch-queries.log)"
+
+ echo "#### 4. comment result on tpch"
+ comment_body=$(echo "${comment_body}" | sed -e ':a;N;$!ba;s/\t/\\t/g;s/\n/\\n/g') # 将所有的 Tab字符替换为\t 换行符替换为\n
+ create_an_issue_comment_tpch "${pull_request_id:-}" "${comment_body}"
+
+ stop_doris
+)
+exit_flag="$?"
+
+echo "#### 5. check if need backup doris logs"
+if [[ ${exit_flag} != "0" ]]; then
+ print_doris_fe_log
+ print_doris_be_log
+ if archive_doris_logs "${DORIS_HOME}/${pull_request_id}_${commit_id}_doris_logs.tar.gz"; then
+ upload_doris_log_to_oss "${DORIS_HOME}/${pull_request_id}_${commit_id}_doris_logs.tar.gz"
+ fi
+fi
+
+exit "${exit_flag}"
diff --git a/tools/tpch-tools/bin/create-tpch-tables.sh b/tools/tpch-tools/bin/create-tpch-tables.sh
index 8b9635a4ce..0dd8cd81de 100755
--- a/tools/tpch-tools/bin/create-tpch-tables.sh
+++ b/tools/tpch-tools/bin/create-tpch-tables.sh
@@ -106,7 +106,7 @@ echo "SF: ${SCALE_FACTOR}"
mysql -h"${FE_HOST}" -u"${USER}" -P"${FE_QUERY_PORT}" -e "CREATE DATABASE IF NOT EXISTS ${DB}"
if [[ ${SCALE_FACTOR} -eq 1 ]]; then
- echo "Run SQLs from ${CURDIR}/../ddl/create-tpch-tables.sql"
+ echo "Run SQLs from ${CURDIR}/../ddl/create-tpch-tables-sf1.sql"
mysql -h"${FE_HOST}" -u"${USER}" -P"${FE_QUERY_PORT}" -D"${DB}" <"${CURDIR}"/../ddl/create-tpch-tables-sf1.sql
elif [[ ${SCALE_FACTOR} -eq 100 ]]; then
echo "Run SQLs from ${CURDIR}/../ddl/create-tpch-tables-sf100.sql"
diff --git a/tools/tpch-tools/bin/load-tpch-data.sh b/tools/tpch-tools/bin/load-tpch-data.sh
index 3e4e7f6bfb..c56d2ea71a 100755
--- a/tools/tpch-tools/bin/load-tpch-data.sh
+++ b/tools/tpch-tools/bin/load-tpch-data.sh
@@ -116,49 +116,49 @@ echo "DB: ${DB}"
function load_region() {
echo "$*"
- curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
+ curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: r_regionkey, r_name, r_comment, temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/region/_stream_load
}
function load_nation() {
echo "$*"
- curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
+ curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: n_nationkey, n_name, n_regionkey, n_comment, temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/nation/_stream_load
}
function load_supplier() {
echo "$*"
- curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
+ curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: s_suppkey, s_name, s_address, s_nationkey, s_phone, s_acctbal, s_comment, temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/supplier/_stream_load
}
function load_customer() {
echo "$*"
- curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
+ curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: c_custkey, c_name, c_address, c_nationkey, c_phone, c_acctbal, c_mktsegment, c_comment, temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/customer/_stream_load
}
function load_part() {
echo "$*"
- curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
+ curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: p_partkey, p_name, p_mfgr, p_brand, p_type, p_size, p_container, p_retailprice, p_comment, temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/part/_stream_load
}
function load_partsupp() {
echo "$*"
- curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
+ curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: ps_partkey, ps_suppkey, ps_availqty, ps_supplycost, ps_comment, temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/partsupp/_stream_load
}
function load_orders() {
echo "$*"
- curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
+ curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: o_orderkey, o_custkey, o_orderstatus, o_totalprice, o_orderdate, o_orderpriority, o_clerk, o_shippriority, o_comment, temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/orders/_stream_load
}
function load_lineitem() {
echo "$*"
- curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
+ curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: l_orderkey, l_partkey, l_suppkey, l_linenumber, l_quantity, l_extendedprice, l_discount, l_tax, l_returnflag,l_linestatus, l_shipdate,l_commitdate,l_receiptdate,l_shipinstruct,l_shipmode,l_comment,temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/lineitem/_stream_load
}
diff --git a/tools/tpch-tools/bin/run-tpch-queries.sh b/tools/tpch-tools/bin/run-tpch-queries.sh
index ce27cf18eb..d88062183c 100755
--- a/tools/tpch-tools/bin/run-tpch-queries.sh
+++ b/tools/tpch-tools/bin/run-tpch-queries.sh
@@ -180,5 +180,6 @@ for i in ${query_array[@]}; do
done
echo "Total cold run time: ${cold_run_sum} ms"
+# tpch 流水线依赖这个'Total hot run time'字符串
echo "Total hot run time: ${best_hot_run_sum} ms"
echo 'Finish tpch queries.'