[ci](perf) add new pipeline of tpch-sf100 (#26334)

* [ci](perf) add new pipeline of tpch-sf100
Co-authored-by: stephen <hello-stephen@qq.com>
This commit is contained in:
Dongyang Li
2023-11-08 15:32:02 +08:00
committed by GitHub
parent a3666aa87e
commit f018b00646
16 changed files with 1169 additions and 10 deletions

View File

@ -110,7 +110,7 @@ DECLARE_Int32(brpc_num_threads);
// If no ip match this rule, will choose one randomly.
DECLARE_String(priority_networks);
// performance moderate or or compact, only tcmalloc compile
// performance moderate or compact, only tcmalloc compile
DECLARE_String(memory_mode);
// process memory limit specified as number of bytes

View File

@ -0,0 +1,259 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
function get_doris_conf_value() {
local conf_file="$1"
local conf_key="$2"
if [[ -z "${conf_key}" ]]; then return 1; fi
local conf_value
if line="$(grep "^${conf_key}" "${conf_file}")"; then
conf_value="${line#*=}" #取第一个等号后面的子串为value
conf_value="$(echo "${conf_value}" | xargs)" #去掉前导和尾随空格
echo "${conf_value}"
return 0
else
echo "ERROR: can not find ${conf_key} in ${conf_file}"
return 1
fi
}
function set_doris_conf_value() {
local conf_file="$1"
local conf_key="$2"
local conf_value="$3"
if [[ -z "${conf_value}" ]]; then return 1; fi
local origin_conf_value
if origin_conf_value="$(get_conf_value "${conf_file}" "${conf_key}")"; then
echo "origin_conf_value is ${origin_conf_value}"
sed -i "/^${conf_key}/d" "${conf_file}"
fi
echo "${conf_key}=${conf_value}" | tee -a "${conf_file}"
}
# set -x
# get_doris_conf_value "$1" "$2"
# set_doris_conf_value "$1" "$2" "$3"
function start_doris_fe() {
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
if ! java -version >/dev/null; then sudo apt install openjdk-8-jdk -y >/dev/null; fi
JAVA_HOME="$(find /usr/lib/jvm -maxdepth 1 -type d -name 'java-8-*' | sed -n '1p')"
export JAVA_HOME
"${DORIS_HOME}"/fe/bin/start_fe.sh --daemon
if ! mysql --version >/dev/null; then sudo apt install -y mysql-client; fi
query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf query_port)
cl="mysql -h127.0.0.1 -P${query_port} -uroot "
local i=1
while [[ $((i++)) -lt 60 ]]; do
fe_version=$(${cl} -e 'show frontends\G' 2>/dev/null | grep -i version | cut -d: -f2)
if [[ -n "${fe_version}" ]] && [[ "${fe_version}" != "NULL" ]]; then
echo "INFO: doris fe started, fe version: ${fe_version}" && return 0
else
echo "${i}/60, Wait for Frontend ready, sleep 2 seconds ..." && sleep 2
fi
done
if [[ ${i} -ge 60 ]]; then echo "ERROR: Start Doris Frontend Failed after 2 mins wait..." && return 1; fi
}
function start_doris_be() {
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
if ! java -version >/dev/null; then sudo apt install openjdk-8-jdk -y >/dev/null; fi
JAVA_HOME="$(find /usr/lib/jvm -maxdepth 1 -type d -name 'java-8-*' | sed -n '1p')"
export JAVA_HOME
sysctl -w vm.max_map_count=2000000 &&
ulimit -n 200000 &&
ulimit -c unlimited &&
swapoff -a &&
"${DORIS_HOME}"/be/bin/start_be.sh --daemon
sleep 2
local i=1
while [[ $((i++)) -lt 5 ]]; do
if ! pgrep -fia doris_be >/dev/null; then
echo "ERROR: start doris be failed." && return 1
else
sleep 2
fi
done
if [[ ${i} -ge 5 ]]; then
echo "INFO: doris be started, be version: $("${DORIS_HOME}"/be/lib/doris_be --version)"
fi
}
function add_doris_be_to_fe() {
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
if ! mysql --version >/dev/null; then sudo apt install -y mysql-client; fi
query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf query_port)
heartbeat_service_port=$(get_doris_conf_value "${DORIS_HOME}"/be/conf/be.conf heartbeat_service_port)
cl="mysql -h127.0.0.1 -P${query_port} -uroot "
if ${cl} -e "ALTER SYSTEM ADD BACKEND '127.0.0.1:${heartbeat_service_port}';"; then echo; else echo; fi
i=1
while [[ $((i++)) -lt 60 ]]; do
if be_ready_count=$(${cl} -e 'show backends\G' | grep -c 'Alive: true') &&
[[ ${be_ready_count} -eq 1 ]]; then
echo -e "INFO: add doris be success, be version: \n$(${cl} -e 'show backends\G' | grep 'Version')" && break
else
echo 'Wait for Backends ready, sleep 2 seconds ...' && sleep 2
fi
done
if [[ ${i} -eq 60 ]]; then echo "ERROR: Add Doris Backend Failed after 2 mins wait..." && return 1; fi
}
function stop_doris() {
if "${DORIS_HOME}"/fe/bin/stop_fe.sh &&
"${DORIS_HOME}"/be/bin/stop_be.sh; then
echo "INFO: normally stoped doris"
else
pgrep -fi doris | xargs kill -9
echo "WARNING: force stoped doris"
fi
}
function check_tpch_table_rows() {
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
db_name="$1"
scale_factor="$2"
if [[ -z "${scale_factor}" ]]; then return 1; fi
query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf query_port)
cl="mysql -h127.0.0.1 -P${query_port} -uroot "
declare -A table_rows
if [[ "${scale_factor}" == "100" ]]; then
table_rows=(['region']=5 ['nation']=25 ['supplier']=1000000 ['customer']=15000000 ['part']=20000000 ['partsupp']=80000000 ['orders']=150000000 ['lineitem']=600037902)
else
table_rows=(['region']=5 ['nation']=25 ['supplier']=10000 ['customer']=150000 ['part']=200000 ['partsupp']=800000 ['orders']=1500000 ['lineitem']=6001215)
fi
for table in ${!table_rows[*]}; do
rows_actual=$(${cl} -D"${db_name}" -e"SELECT count(*) FROM ${table}" | sed -n '2p')
rows_expect=${table_rows[${table}]}
if [[ ${rows_actual} -ne ${rows_expect} ]]; then
echo "WARNING: ${table} actual rows: ${rows_actual}, expect rows: ${rows_expect}" && return 1
fi
done
}
get_session_variable() {
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
usage="
usage:
get_session_variable SESSION_VARIABLE
return the value of the SESSION_VARIABLE
"
if [[ -z "$1" ]]; then echo "${usage}" && return 1; else sv="$1"; fi
query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf query_port)
cl="mysql -h127.0.0.1 -P${query_port} -uroot "
if ret=$(${cl} -e"show variables like '${sv}'\G" | grep " Value: "); then
echo "${ret/*Value: /}"
else
return 1
fi
}
set_session_variables_from_file() {
usage="
usage:
set_session_variables_from_file FILE
FILE content lile '
session_variable_key session_variable_value
...
'
"
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
if [[ -z "$1" ]]; then echo "${usage}" && return 1; else sv_file="$1"; fi
query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf query_port)
cl="mysql -h127.0.0.1 -P${query_port} -uroot "
ret=0
while read -r sv; do
if [[ "${sv}" == "#"* ]]; then continue; fi
k=$(echo "${sv}" | awk '{print $1}')
v=$(echo "${sv}" | awk '{print $2}' | tr '[:upper:]' '[:lower:]')
if ${cl} -e"set global ${k}=${v};"; then
if [[ "$(get_session_variable "${k}" | tr '[:upper:]' '[:lower:]')" == "${v}" ]]; then
echo "INFO: set global ${k}=${v};"
else
echo "ERROR: set global ${k}=${v};" && ret=1
fi
else
ret=1
fi
done <"${sv_file}"
return "${ret}"
}
set_session_variable() {
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
k="$1"
v="$2"
if [[ -z "${v}" ]]; then return 1; fi
query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf query_port)
cl="mysql -h127.0.0.1 -P${query_port} -uroot "
if ${cl} -e"set global ${k}=${v};"; then
if [[ "$(get_session_variable "${k}" | tr '[:upper:]' '[:lower:]')" == "${v}" ]]; then
echo "INFO: set global ${k}=${v};"
else
echo "ERROR: set global ${k}=${v};" && return 1
fi
else
return 1
fi
}
archive_doris_logs() {
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
archive_name="$1"
if [[ -z ${archive_name} ]]; then echo "ERROR: archive file name required" && return 1; fi
if tar -I pigz \
--directory "${DORIS_HOME}" \
--absolute-names \
-cf "${DORIS_HOME}/${archive_name}" \
"${DORIS_HOME}"/fe/conf \
"${DORIS_HOME}"/fe/log \
"${DORIS_HOME}"/be/conf \
"${DORIS_HOME}"/be/log; then
echo "${DORIS_HOME}/${archive_name}"
else
return 1
fi
}
print_doris_fe_log() {
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
echo "WARNING: --------------------tail -n 100 ${DORIS_HOME}/fe/log/fe.out--------------------"
tail -n 100 "${DORIS_HOME}"/fe/log/fe.out
echo "WARNING: --------------------tail -n 100 ${DORIS_HOME}/fe/log/fe.log--------------------"
tail -n 100 "${DORIS_HOME}"/fe/log/fe.log
echo "WARNING: ----------------------------------------"
}
print_doris_be_log() {
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
echo "WARNING: --------------------tail -n 100 ${DORIS_HOME}/be/log/be.out--------------------"
tail -n 100 "${DORIS_HOME}"/be/log/be.out
echo "WARNING: --------------------tail -n 100 ${DORIS_HOME}/be/log/be.INFO--------------------"
tail -n 100 "${DORIS_HOME}"/be/log/be.INFO
echo "WARNING: ----------------------------------------"
}

View File

@ -0,0 +1,65 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
function create_an_issue_comment() {
local ISSUE_NUMBER="$1"
local COMMENT_BODY="$2"
if [[ -z "${COMMENT_BODY}" ]]; then return 1; fi
if [[ -z "${GITHUB_TOKEN}" ]]; then return 1; fi
local OWNER='apache'
local REPO='doris'
COMMENT_BODY=$(echo "${COMMENT_BODY}" | sed -e ':a;N;$!ba;s/\t/\\t/g;s/\n/\\n/g') # 将所有的 Tab字符替换为\t 换行符替换为\n
if ret=$(curl -s \
-X POST \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GITHUB_TOKEN:-}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/"${OWNER}"/"${REPO}"/issues/"${ISSUE_NUMBER}"/comments \
-d "{\"body\": \"${COMMENT_BODY}\"}"); then
if echo "${ret}" | grep "Problems parsing JSON"; then
is_succ=false
else
is_succ=true
fi
else
is_succ=false
fi
if ${is_succ}; then
echo -e "\033[32m Create issue(${ISSUE_NUMBER}) comment SUCCESS... \033[0m" && return 0
else
echo -e "\033[31m Create issue(${ISSUE_NUMBER}) comment FAIL... \033[0m" && return 1
fi
}
function create_an_issue_comment_tpch() {
local ISSUE_NUMBER="$1"
local COMMENT_BODY="$2"
local machine='aliyun_ecs.c7a.8xlarge_32C64G'
COMMENT_BODY="
<details>
<summary>TPC-H test result on machine: '${machine}'</summary>
\`\`\`
${COMMENT_BODY}
\`\`\`
</details>
"
create_an_issue_comment "${ISSUE_NUMBER}" "${COMMENT_BODY}"
}

View File

@ -0,0 +1,105 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
function install_ossutil() {
if command -v ossutil >/dev/null; then return 0; fi
if [[ -z ${OSS_accessKeyID} || -z ${OSS_accessKeySecret} ]]; then
echo "ERROR: env OSS_accessKeyID or OSS_accessKeySecret not set."
return 1
fi
curl https://gosspublic.alicdn.com/ossutil/install.sh | sudo bash
echo "[Credentials]
language=EN
endpoint=oss-cn-hongkong-internal.aliyuncs.com
accessKeyID=${OSS_accessKeyID:-}
accessKeySecret=${OSS_accessKeySecret:-}
" >~/.ossutilconfig
}
function check_oss_file_exist() {
if [[ -z ${OSS_accessKeyID} || -z ${OSS_accessKeySecret} ]]; then
echo "ERROR: env OSS_accessKeyID and OSS_accessKeySecret not set"
return 1
fi
# Check if the file exists.
# file_name like ${pull_request_id}_${commit_id}.tar.gz
local file_name="$1"
OSS_DIR="${OSS_DIR:-"oss://opensource-pipeline/compile-release"}"
install_ossutil
if ossutil stat \
-i "${OSS_accessKeyID}" \
-k "${OSS_accessKeySecret}" \
"${OSS_DIR}/${file_name}"; then
echo "INFO: ${file_name} file exists." && return 0
else
echo "ERROR: ${file_name} file not exits." && return 1
fi
}
function download_oss_file() {
# file_name like ${pull_request_id}_${commit_id}.tar.gz
local file_name="$1"
if ! check_oss_file_exist "${file_name}"; then return 1; fi
OSS_DIR="${OSS_DIR:-"oss://opensource-pipeline/compile-release"}"
install_ossutil
if ossutil cp -f \
"${OSS_DIR}/${file_name}" \
"${file_name}"; then
echo "INFO: download ${file_name} success" && return 0
else
echo "ERROR: download ${file_name} fail" && return 1
fi
}
function upload_file_to_oss() {
if [[ -z ${OSS_accessKeyID} || -z ${OSS_accessKeySecret} ]]; then
echo "ERROR: env OSS_accessKeyID and OSS_accessKeySecret not set"
return 1
fi
if [[ ! -f "$1" ]] || [[ "$1" != "/"* ]]; then
echo "ERROR: '$1' is not an absolute path"
return 1
fi
# file_name like ${pull_request_id}_${commit_id}.tar.gz
local file_name
local dir_name
dir_name="$(dirname "${1}")"
file_name="$(basename "${1}")"
OSS_DIR="${OSS_DIR:-"oss://opensource-pipeline/compile-release"}"
OSS_URL_PREFIX="${OSS_URL_PREFIX:-"http://opensource-pipeline.oss-cn-hongkong.aliyuncs.com/compile-release"}"
install_ossutil
cd "${dir_name}" || return 1
if ossutil cp -f \
-i "${OSS_accessKeyID}" \
-k "${OSS_accessKeySecret}" \
"${file_name}" \
"${OSS_DIR}/${file_name}"; then
if ! check_oss_file_exist "${file_name}"; then return 1; fi
cd - || return 1
echo "INFO: success to upload ${file_name} to ${OSS_URL_PREFIX}/${file_name}" && return 0
else
cd - || return 1
echo "ERROR: upload ${file_name} fail" && return 1
fi
}
function upload_doris_log_to_oss() {
OSS_DIR="oss://opensource-pipeline/regression"
OSS_URL_PREFIX="http://opensource-pipeline.oss-cn-hongkong.aliyuncs.com/regression"
upload_file_to_oss "$1"
}

View File

@ -0,0 +1,40 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Build Step: Command Line
: <<EOF
#!/bin/bash
# Execute step even if some of the previous steps failed
teamcity_build_checkoutDir="%teamcity.build.checkoutDir%"
if [[ -f "${teamcity_build_checkoutDir:-}"/regression-test/pipeline/tpch/tpch-sf100/clean.sh ]]; then
cd "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/
bash -x clean.sh
else
echo "Build Step file missing: regression-test/pipeline/tpch/tpch-sf100/clean.sh" && exit 1
fi
EOF
## clean.sh content ##
# stop_doris
source ../../common/doris-utils.sh
DORIS_HOME="${teamcity_build_checkoutDir}/output"
export DORIS_HOME
stop_doris

View File

@ -0,0 +1,85 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
CUR_DATE=`date +%Y%m%d-%H%M%S`
PPROF_TMPDIR="$DORIS_HOME/log/"
JAVA_OPTS="-Xmx1024m -DlogPath=$DORIS_HOME/log/jni.log -Xloggc:$DORIS_HOME/log/be.gc.log.$CUR_DATE -Djavax.security.auth.useSubjectCredsOnly=false -Dsun.security.krb5.debug=true -Dsun.java.command=DorisBE -XX:-CriticalJNINatives -DJDBC_MIN_POOL=1 -DJDBC_MAX_POOL=100 -DJDBC_MAX_IDLE_TIME=300000 -DJDBC_MAX_WAIT_TIME=5000"
# For jdk 9+, this JAVA_OPTS will be used as default JVM options
JAVA_OPTS_FOR_JDK_9="-Xmx1024m -DlogPath=$DORIS_HOME/log/jni.log -Xlog:gc:$DORIS_HOME/log/be.gc.log.$CUR_DATE -Djavax.security.auth.useSubjectCredsOnly=false -Dsun.security.krb5.debug=true -Dsun.java.command=DorisBE -XX:-CriticalJNINatives -DJDBC_MIN_POOL=1 -DJDBC_MAX_POOL=100 -DJDBC_MAX_IDLE_TIME=300000 -DJDBC_MAX_WAIT_TIME=5000"
# since 1.2, the JAVA_HOME need to be set to run BE process.
# JAVA_HOME=/path/to/jdk/
# https://github.com/apache/doris/blob/master/docs/zh-CN/community/developer-guide/debug-tool.md#jemalloc-heap-profile
# https://jemalloc.net/jemalloc.3.html
JEMALLOC_CONF="percpu_arena:percpu,background_thread:true,metadata_thp:auto,muzzy_decay_ms:15000,dirty_decay_ms:15000,oversize_threshold:0,lg_tcache_max:20,prof:false,lg_prof_interval:32,lg_prof_sample:19,prof_gdump:false,prof_accum:false,prof_leak:false,prof_final:false"
JEMALLOC_PROF_PRFIX=""
# INFO, WARNING, ERROR, FATAL
sys_log_level = INFO
# ports for admin, web, heartbeat service
be_port = 9060
webserver_port = 8040
heartbeat_service_port = 9050
brpc_port = 8060
arrow_flight_sql_port = -1
# HTTPS configures
enable_https = false
# path of certificate in PEM format.
ssl_certificate_path = "$DORIS_HOME/conf/cert.pem"
# path of private key in PEM format.
ssl_private_key_path = "$DORIS_HOME/conf/key.pem"
# Choose one if there are more than one ip except loopback address.
# Note that there should at most one ip match this list.
# If no ip match this rule, will choose one randomly.
# use CIDR format, e.g. 10.10.10.0/24 or IP format, e.g. 10.10.10.1
# Default value is empty.
# priority_networks = 10.10.10.0/24;192.168.0.0/16
# data root path, separate by ';'
# You can specify the storage type for each root path, HDD (cold data) or SSD (hot data)
# eg:
# storage_root_path = /home/disk1/doris;/home/disk2/doris;/home/disk2/doris
# storage_root_path = /home/disk1/doris,medium:SSD;/home/disk2/doris,medium:SSD;/home/disk2/doris,medium:HDD
# /home/disk2/doris,medium:HDD(default)
#
# you also can specify the properties by setting '<property>:<value>', separate by ','
# property 'medium' has a higher priority than the extension of path
#
# Default value is ${DORIS_HOME}/storage, you should create it by hand.
# storage_root_path = ${DORIS_HOME}/storage
# Default dirs to put jdbc drivers,default value is ${DORIS_HOME}/jdbc_drivers
# jdbc_drivers_dir = ${DORIS_HOME}/jdbc_drivers
# Advanced configurations
# sys_log_dir = ${DORIS_HOME}/log
# sys_log_roll_mode = SIZE-MB-1024
# sys_log_roll_num = 10
# sys_log_verbose_modules = *
# log_buffer_level = -1
# palo_cgroups
priority_networks=172.16.0.0/24
storage_root_path=/mnt/datadisk0/doris-storage

View File

@ -0,0 +1,26 @@
[
{
"file": "docker/thirdparties/docker-compose/mysql/mysql-5.7.env",
"replacements": {
"DOCKER_MYSQL_57_EXTERNAL_PORT": 7111
}
},
{
"file": "docker/thirdparties/docker-compose/postgresql/postgresql-14.env",
"replacements": {
"DOCKER_PG_14_EXTERNAL_PORT": 7121
}
},
{
"file": "docker/thirdparties/docker-compose/hive/gen_env.sh",
"replacements": {
"FS_PORT": 7131,
"HMS_PORT": 7141
}
}, {
"file": "docker/thirdparties/start-thirdparties-docker.sh",
"replacements": {
"CONTAINER_UID": "doris-regression-fakeid-fakecommit"
}
}
]

View File

@ -0,0 +1,78 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#####################################################################
## The uppercase properties are read and exported by bin/start_fe.sh.
## To see all Frontend configurations,
## see fe/src/org/apache/doris/common/Config.java
#####################################################################
CUR_DATE=`date +%Y%m%d-%H%M%S`
# the output dir of stderr and stdout
LOG_DIR = ${DORIS_HOME}/log
JAVA_OPTS="-Dsun.security.krb5.debug=true -Djavax.security.auth.useSubjectCredsOnly=false -Xss4m -Xmx8192m -XX:+UseMembar -XX:SurvivorRatio=8 -XX:MaxTenuringThreshold=7 -XX:+PrintGCDateStamps -XX:+PrintGCDetails -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+CMSClassUnloadingEnabled -XX:-CMSParallelRemarkEnabled -XX:CMSInitiatingOccupancyFraction=80 -XX:SoftRefLRUPolicyMSPerMB=0 -Xloggc:$DORIS_HOME/log/fe.gc.log.$CUR_DATE"
# For jdk 9+, this JAVA_OPTS will be used as default JVM options
JAVA_OPTS_FOR_JDK_9="-Dsun.security.krb5.debug=true -Djavax.security.auth.useSubjectCredsOnly=false -Xss4m -Xmx8192m -XX:SurvivorRatio=8 -XX:MaxTenuringThreshold=7 -XX:+CMSClassUnloadingEnabled -XX:-CMSParallelRemarkEnabled -XX:CMSInitiatingOccupancyFraction=80 -XX:SoftRefLRUPolicyMSPerMB=0 -Xlog:gc*:$DORIS_HOME/log/fe.gc.log.$CUR_DATE:time"
##
## the lowercase properties are read by main program.
##
# INFO, WARN, ERROR, FATAL
sys_log_level = INFO
# NORMAL, BRIEF, ASYNC
sys_log_mode = NORMAL
# store metadata, must be created before start FE.
# Default value is ${DORIS_HOME}/doris-meta
# meta_dir = ${DORIS_HOME}/doris-meta
# Default dirs to put jdbc drivers,default value is ${DORIS_HOME}/jdbc_drivers
# jdbc_drivers_dir = ${DORIS_HOME}/jdbc_drivers
http_port = 8030
rpc_port = 9020
query_port = 9030
edit_log_port = 9010
arrow_flight_sql_port = -1
# Choose one if there are more than one ip except loopback address.
# Note that there should at most one ip match this list.
# If no ip match this rule, will choose one randomly.
# use CIDR format, e.g. 10.10.10.0/24 or IP format, e.g. 10.10.10.1
# Default value is empty.
# priority_networks = 10.10.10.0/24;192.168.0.0/16
# Advanced configurations
# log_roll_size_mb = 1024
# sys_log_dir = ${DORIS_HOME}/log
# sys_log_roll_num = 10
# sys_log_verbose_modules = org.apache.doris
# audit_log_dir = ${DORIS_HOME}/log
# audit_log_modules = slow_query, query
# audit_log_roll_num = 10
# meta_delay_toleration_second = 10
# qe_max_connection = 1024
# qe_query_timeout_second = 300
# qe_slow_log_ms = 5000
priority_networks=172.16.0.0/24
meta_dir=/mnt/datadisk0/doris-meta

View File

@ -0,0 +1,43 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Example driver definitions, you should not use the example odbc driver
# before you prepare env in your server
# Driver from the postgresql-odbc package
# Setup from the unixODBC package
[PostgreSQL]
Description = ODBC for PostgreSQL
Driver = /usr/lib/psqlodbc.so
Setup = /usr/lib/libodbcpsqlS.so
FileUsage = 1
# Driver from the mysql-connector-odbc package
# Setup from the unixODBC package
[MySQL ODBC 8.0 Unicode Driver]
Description = ODBC for MySQL
Driver = /usr/lib64/libmyodbc8w.so
FileUsage = 1
# Driver from the oracle-connector-odbc package
# Setup from the unixODBC package
[Oracle 19 ODBC driver]
Description=Oracle ODBC driver for Oracle 19
Driver=/usr/lib/libsqora.so.19.1

View File

@ -0,0 +1,111 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/* ******* Do not commit this file unless you know what you are doing ******* */
// **Note**: default db will be create if not exist
defaultDb = "regression_test"
jdbcUrl = "jdbc:mysql://172.19.0.2:9131/?useLocalSessionState=true&allowLoadLocalInfile=true"
targetJdbcUrl = "jdbc:mysql://172.19.0.2:9131/?useLocalSessionState=true&allowLoadLocalInfile=true"
jdbcUser = "root"
jdbcPassword = ""
feSourceThriftAddress = "127.0.0.1:9020"
feTargetThriftAddress = "127.0.0.1:9020"
feSyncerUser = "root"
feSyncerPassword = ""
feHttpAddress = "172.19.0.2:8131"
feHttpUser = "root"
feHttpPassword = ""
// set DORIS_HOME by system properties
// e.g. java -DDORIS_HOME=./
suitePath = "${DORIS_HOME}/regression-test/suites"
dataPath = "${DORIS_HOME}/regression-test/data"
pluginPath = "${DORIS_HOME}/regression-test/plugins"
realDataPath = "${DORIS_HOME}/regression-test/realdata"
// sf1DataPath can be url like "https://doris-community-test-1308700295.cos.ap-hongkong.myqcloud.com" or local path like "/data"
//sf1DataPath = "https://doris-community-test-1308700295.cos.ap-hongkong.myqcloud.com"
// will test <group>/<suite>.groovy
// empty group will test all group
testGroups = ""
// empty suite will test all suite
testSuites = ""
// empty directories will test all directories
testDirectories = ""
// this groups will not be executed
excludeGroups = ""
// this suites will not be executed
excludeSuites = "test_sql_block_rule,test_profile,test_spark_load,test_refresh_mtmv,test_bitmap_filter,test_jdbc_query_mysql"
// this directories will not be executed
excludeDirectories = "workload_manager_p1,fault_injection_p0"
customConf1 = "test_custom_conf_value"
// for test csv with header
enableHdfs=false // set to true if hdfs is ready
hdfsFs = "hdfs://127.0.0.1:9000"
hdfsUser = "doris-test"
hdfsPasswd = ""
brokerName = "broker_name"
// broker load test config
enableBrokerLoad=true
// jdbc connector test config
// To enable jdbc test, you need first start mysql/pg container.
// See `docker/thirdparties/start-thirdparties-docker.sh`
enableJdbcTest=false
mysql_57_port=7111
pg_14_port=7121
mariadb_10_port=3326
// hive catalog test config
// To enable jdbc test, you need first start hive container.
// See `docker/thirdparties/start-thirdparties-docker.sh`
enableHiveTest=false
hms_port=7141
hiveServerPort=10000
// kafka test config
// to enable kafka test, you need firstly to start kafka container
// See `docker/thirdparties/start-thirdparties-docker.sh`
enableKafkaTest=true
kafka_port=19193
// iceberg test config
iceberg_rest_uri_port=18181
enableEsTest=false
es_6_port=19200
es_7_port=29200
es_8_port=39200
cacheDataPath = "/data/regression/"
s3Endpoint = "cos.ap-hongkong.myqcloud.com"
s3BucketName = "doris-build-hk-1308700295"
s3Region = "ap-hongkong"
max_failure_num=50
externalEnvIp="127.0.0.1"

View File

@ -0,0 +1,132 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Build Step: Command Line
: <<EOF
#!/bin/bash
teamcity_build_checkoutDir="%teamcity.build.checkoutDir%"
if [[ -f "${teamcity_build_checkoutDir:-}"/regression-test/pipeline/tpch/tpch-sf100/deploy.sh ]]; then
cd "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/
bash -x deploy.sh
else
echo "Build Step file missing: regression-test/pipeline/tpch/tpch-sf100/deploy.sh" && exit 1
fi
EOF
## deploy.sh content ##
# download_oss_file
source ../../common/oss-utils.sh
# start_doris_fe, get_doris_conf_value, start_doris_be, stop_doris,
# print_doris_fe_log, print_doris_be_log, archive_doris_logs
source ../../common/doris-utils.sh
echo "#### Check env"
if [[ -z "${teamcity_build_checkoutDir}" ||
-z "${pull_request_id}" ||
-z "${commit_id}" ]]; then
echo "ERROR: env teamcity_build_checkoutDir or pull_request_id or commit_id not set"
exit 1
fi
if ${DEBUG:-false}; then
pull_request_id="26344"
commit_id="97ee15f75e88f5af6de308d948361eaa7c261602"
fi
echo "#### Deploy Doris ####"
DORIS_HOME="${teamcity_build_checkoutDir}/output"
export DORIS_HOME
exit_flag=0
need_backup_doris_logs=false
echo "#### 1. try to kill old doris process and remove old doris binary"
stop_doris && rm -rf output
echo "#### 2. download doris binary tar ball"
cd "${teamcity_build_checkoutDir}" || exit 1
if download_oss_file "${pull_request_id:-}_${commit_id:-}.tar.gz"; then
if ! command -v pigz >/dev/null; then sudo apt install -y pigz; fi
tar -I pigz -xf "${pull_request_id:-}_${commit_id:-}.tar.gz"
if [[ -d output && -d output/fe && -d output/be ]]; then
echo "INFO: be version: $(./output/be/lib/doris_be --version)"
rm -rf "${pull_request_id}_${commit_id}.tar.gz"
fi
else
echo "ERROR: download compiled binary failed" && exit 1
fi
echo "#### 3. copy conf from regression-test/pipeline/tpch/tpch-sf100/conf/"
rm -f "${DORIS_HOME}"/fe/conf/fe_custom.conf "${DORIS_HOME}"/be/conf/be_custom.conf
if [[ -f "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/fe.conf &&
-f "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/be.conf ]]; then
cp -f "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/fe.conf "${DORIS_HOME}"/fe/conf/
cp -f "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/be.conf "${DORIS_HOME}"/be/conf/
else
echo "ERROR: doris conf file missing in ${teamcity_build_checkoutDir}/regression-test/pipeline/tpch/tpch-sf100/conf/"
exit 1
fi
echo "#### 4. start Doris"
meta_dir=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf meta_dir)
storage_root_path=$(get_doris_conf_value "${DORIS_HOME}"/be/conf/be.conf storage_root_path)
mkdir -p "${meta_dir}"
mkdir -p "${storage_root_path}"
if ! start_doris_fe; then
echo "WARNING: Start doris fe failed at first time"
print_doris_fe_log
echo "WARNING: delete meta_dir and storage_root_path, then retry"
rm -rf "${meta_dir:?}/"*
rm -rf "${storage_root_path:?}/"*
if ! start_doris_fe; then
need_backup_doris_logs=true
exit_flag=1
fi
fi
if ! start_doris_be; then
echo "WARNING: Start doris be failed at first time"
print_doris_be_log
echo "WARNING: delete storage_root_path, then retry"
rm -rf "${storage_root_path:?}/"*
if ! start_doris_be; then
need_backup_doris_logs=true
exit_flag=1
fi
fi
if ! add_doris_be_to_fe; then
need_backup_doris_logs=true
exit_flag=1
else
# wait 10s for doris totally started, otherwize may encounter the error below,
# ERROR 1105 (HY000) at line 102: errCode = 2, detailMessage = Failed to find enough backend, please check the replication num,replication tag and storage medium.
sleep 10s
fi
echo "#### 5. set session variables"
echo "TODO"
echo "#### 6. check if need backup doris logs"
if ${need_backup_doris_logs}; then
print_doris_fe_log
print_doris_be_log
if archive_doris_logs "${DORIS_HOME}/${pull_request_id}_${commit_id}_doris_logs.tar.gz"; then
upload_doris_log_to_oss "${DORIS_HOME}/${pull_request_id}_${commit_id}_doris_logs.tar.gz"
fi
fi
exit "${exit_flag}"

View File

@ -0,0 +1,71 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Build Step: Command Line
: <<EOF
#!/bin/bash
teamcity_build_checkoutDir="%teamcity.build.checkoutDir%"
if [[ -f "${teamcity_build_checkoutDir:-}"/regression-test/pipeline/tpch/tpch-sf100/prepare.sh ]]; then
cd "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/
bash -x prepare.sh
else
echo "Build Step file missing: regression-test/pipeline/tpch/tpch-sf100/prepare.sh" && exit 1
fi
EOF
## run.sh content ##
echo "#### Check env"
if [[ -z "${commit_id_from_trigger}" || -z ${commit_id:-} || -z ${pull_request_id:-} ]]; then
echo "ERROR: env commit_id_from_trigger or commit_id or pull_request_id not set" && exit 1
else
commit_id_from_checkout=${commit_id}
fi
if ${DEBUG:-false}; then commit_id_from_trigger=${commit_id}; fi
echo "Prepare to run tpch sf100 test"
echo "#### 1. check if need run"
if [[ "${commit_id_from_trigger}" != "${commit_id_from_checkout}" ]]; then
echo -e "目前是在 clickbench 流水线 compile 完后触发本 tpch 流水线的,
有可能 pr 在 clickbench 流水线还在跑的时候新提交了commit,
这时候 tpch 流水线 checkout 出来的 commit 就不是触发时的传过来的 commit了,
这种情况不需要跑"
echo -e "ERROR: PR(${pull_request_id}),
the lastest commit id
${commit_id_from_checkout}
not equail to the commit_id_from_trigger
${commit_id_from_trigger}
commit_id_from_trigger is outdate"
exit 1
fi
echo "#### 2. check if depending files exist"
if ! [[ -f "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/fe.conf &&
-f "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/conf/be.conf &&
-f "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/deploy.sh &&
-f "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/run.sh &&
-f "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/oss-utils.sh &&
-f "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh &&
-f "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/github-utils.sh &&
-f "${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/load-tpch-data.sh &&
-f "${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/create-tpch-tables.sh &&
-f "${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/run-tpch-queries.sh ]]; then
echo "ERROR: depending files missing" && exit 1
fi

View File

@ -0,0 +1,143 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Build Step: Command Line
: <<EOF
#!/bin/bash
teamcity_build_checkoutDir="%teamcity.build.checkoutDir%"
if [[ -f "${teamcity_build_checkoutDir:-}"/regression-test/pipeline/tpch/tpch-sf100/run.sh ]]; then
cd "${teamcity_build_checkoutDir}"/regression-test/pipeline/tpch/tpch-sf100/
bash -x run.sh
else
echo "Build Step file missing: regression-test/pipeline/tpch/tpch-sf100/run.sh" && exit 1
fi
EOF
## run.sh content ##
# check_tpch_table_rows, stop_doris, set_session_variable
source ../../common/doris-utils.sh
# create_an_issue_comment
source ../../common/github-utils.sh
# upload_doris_log_to_oss
source ../../common/oss-utils.sh
echo "#### Check env"
if [[ -z "${teamcity_build_checkoutDir}" ||
-z "${pull_request_id}" ||
-z "${commit_id}" ]]; then
echo "ERROR: env teamcity_build_checkoutDir or pull_request_id or commit_id not set"
exit 1
fi
echo "#### Run tpch-sf100 test on Doris ####"
DORIS_HOME="${teamcity_build_checkoutDir}/output"
exit_flag=0
(
set -e
shopt -s inherit_errexit
echo "#### 1. check if need to load data"
SF="100" # SCALE FACTOR
if ${DEBUG:-false}; then
SF="100"
fi
TPCH_DATA_DIR="/data/tpch/sf_${SF}" # no / at the end
TPCH_DATA_DIR_LINK="${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/tpch-data # no / at the end
db_name="tpch_sf${SF}"
sed -i "s|^export DB=.*$|export DB='${db_name}'|g" \
"${teamcity_build_checkoutDir}"/tools/tpch-tools/conf/doris-cluster.conf
if ! check_tpch_table_rows "${db_name}" "${SF}"; then
echo "INFO: need to load tpch-sf${SF} data"
# prepare data
mkdir -p "${TPCH_DATA_DIR}"
(
cd "${TPCH_DATA_DIR}" || exit 1
declare -A table_file_count
table_file_count=(['region']=1 ['nation']=1 ['supplier']=1 ['customer']=1 ['part']=1 ['partsupp']=10 ['orders']=10 ['lineitem']=10)
for table_name in ${!table_file_count[*]}; do
if [[ ${table_file_count[${table_name}]} -eq 1 ]]; then
url="https://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/tpch/sf${SF}/${table_name}.tbl"
if ! wget --continue -t3 -q "${url}"; then echo "ERROR: wget --continue ${url}" && exit 1; fi
elif [[ ${table_file_count[${table_name}]} -eq 10 ]]; then
(
for i in {1..10}; do
url="https://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/tpch/sf${SF}/${table_name}.tbl.${i}"
if ! wget --continue -t3 -q "${url}"; then echo "ERROR: wget --continue ${url}" && exit 1; fi
done
) &
wait
fi
done
)
# create table and load data
sed -i "s|^SCALE_FACTOR=[0-9]\+$|SCALE_FACTOR=${SF}|g" "${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/create-tpch-tables.sh
bash "${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/create-tpch-tables.sh
rm -rf "${TPCH_DATA_DIR_LINK}"
ln -s "${TPCH_DATA_DIR}" "${TPCH_DATA_DIR_LINK}"
bash "${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/load-tpch-data.sh -c 10
if ! check_tpch_table_rows "${db_name}" "${SF}"; then
exit 1
fi
echo "INFO: sleep 10min to wait compaction done" && sleep 10m
data_reload="true"
fi
echo "#### 2. run tpch-sf${SF} query"
set_session_variable runtime_filter_mode global
sed -i "s|^SCALE_FACTOR=[0-9]\+$|SCALE_FACTOR=${SF}|g" "${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/run-tpch-queries.sh
bash "${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/run-tpch-queries.sh | tee "${teamcity_build_checkoutDir}"/run-tpch-queries.log
if ! grep '^Total hot run time' "${teamcity_build_checkoutDir}"/run-tpch-queries.log >/dev/null; then exit 1; fi
line_end=$(sed -n '/^Total hot run time/=' "${teamcity_build_checkoutDir}"/run-tpch-queries.log)
line_begin=$((line_end - 23))
comment_body="Tpch sf${SF} test result on commit ${commit_id:-}, data reload: ${data_reload:-"false"}
run tpch-sf${SF} query with default conf and session variables
$(sed -n "${line_begin},${line_end}p" "${teamcity_build_checkoutDir}"/run-tpch-queries.log)"
echo "#### 3. run tpch-sf${SF} query with runtime_filter_mode=off"
set_session_variable runtime_filter_mode off
bash "${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/run-tpch-queries.sh | tee "${teamcity_build_checkoutDir}"/run-tpch-queries.log
if ! grep '^Total hot run time' "${teamcity_build_checkoutDir}"/run-tpch-queries.log >/dev/null; then exit 1; fi
line_end=$(sed -n '/^Total hot run time/=' "${teamcity_build_checkoutDir}"/run-tpch-queries.log)
line_begin=$((line_end - 23))
comment_body="${comment_body}
run tpch-sf${SF} query with default conf and set session variable runtime_filter_mode=off
$(sed -n "${line_begin},${line_end}p" "${teamcity_build_checkoutDir}"/run-tpch-queries.log)"
echo "#### 4. comment result on tpch"
comment_body=$(echo "${comment_body}" | sed -e ':a;N;$!ba;s/\t/\\t/g;s/\n/\\n/g') # 将所有的 Tab字符替换为\t 换行符替换为\n
create_an_issue_comment_tpch "${pull_request_id:-}" "${comment_body}"
stop_doris
)
exit_flag="$?"
echo "#### 5. check if need backup doris logs"
if [[ ${exit_flag} != "0" ]]; then
print_doris_fe_log
print_doris_be_log
if archive_doris_logs "${DORIS_HOME}/${pull_request_id}_${commit_id}_doris_logs.tar.gz"; then
upload_doris_log_to_oss "${DORIS_HOME}/${pull_request_id}_${commit_id}_doris_logs.tar.gz"
fi
fi
exit "${exit_flag}"

View File

@ -106,7 +106,7 @@ echo "SF: ${SCALE_FACTOR}"
mysql -h"${FE_HOST}" -u"${USER}" -P"${FE_QUERY_PORT}" -e "CREATE DATABASE IF NOT EXISTS ${DB}"
if [[ ${SCALE_FACTOR} -eq 1 ]]; then
echo "Run SQLs from ${CURDIR}/../ddl/create-tpch-tables.sql"
echo "Run SQLs from ${CURDIR}/../ddl/create-tpch-tables-sf1.sql"
mysql -h"${FE_HOST}" -u"${USER}" -P"${FE_QUERY_PORT}" -D"${DB}" <"${CURDIR}"/../ddl/create-tpch-tables-sf1.sql
elif [[ ${SCALE_FACTOR} -eq 100 ]]; then
echo "Run SQLs from ${CURDIR}/../ddl/create-tpch-tables-sf100.sql"

View File

@ -116,49 +116,49 @@ echo "DB: ${DB}"
function load_region() {
echo "$*"
curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: r_regionkey, r_name, r_comment, temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/region/_stream_load
}
function load_nation() {
echo "$*"
curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: n_nationkey, n_name, n_regionkey, n_comment, temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/nation/_stream_load
}
function load_supplier() {
echo "$*"
curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: s_suppkey, s_name, s_address, s_nationkey, s_phone, s_acctbal, s_comment, temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/supplier/_stream_load
}
function load_customer() {
echo "$*"
curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: c_custkey, c_name, c_address, c_nationkey, c_phone, c_acctbal, c_mktsegment, c_comment, temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/customer/_stream_load
}
function load_part() {
echo "$*"
curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: p_partkey, p_name, p_mfgr, p_brand, p_type, p_size, p_container, p_retailprice, p_comment, temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/part/_stream_load
}
function load_partsupp() {
echo "$*"
curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: ps_partkey, ps_suppkey, ps_availqty, ps_supplycost, ps_comment, temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/partsupp/_stream_load
}
function load_orders() {
echo "$*"
curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: o_orderkey, o_custkey, o_orderstatus, o_totalprice, o_orderdate, o_orderpriority, o_clerk, o_shippriority, o_comment, temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/orders/_stream_load
}
function load_lineitem() {
echo "$*"
curl --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
curl -s --location-trusted -u "${USER}":"${PASSWORD}" -H "column_separator:|" \
-H "columns: l_orderkey, l_partkey, l_suppkey, l_linenumber, l_quantity, l_extendedprice, l_discount, l_tax, l_returnflag,l_linestatus, l_shipdate,l_commitdate,l_receiptdate,l_shipinstruct,l_shipmode,l_comment,temp" \
-T "$*" http://"${FE_HOST}":"${FE_HTTP_PORT}"/api/"${DB}"/lineitem/_stream_load
}

View File

@ -180,5 +180,6 @@ for i in ${query_array[@]}; do
done
echo "Total cold run time: ${cold_run_sum} ms"
# tpch 流水线依赖这个'Total hot run time'字符串
echo "Total hot run time: ${best_hot_run_sum} ms"
echo 'Finish tpch queries.'