[enhance](regresstion case)add external group mark 0727 (#22287)

* add external group mark 0727

* add external pipeline regression conf
 0727

* update pipeline regression config 0727

* open es config from docker 0727
This commit is contained in:
zhangguoqiang
2023-07-28 17:11:19 +08:00
committed by GitHub
parent ef218d79da
commit 3eeca7ee55
24 changed files with 331 additions and 19 deletions

View File

@ -59,7 +59,7 @@ jobs:
if [[ "${comment_message}" =~ "run" && "${comment_message}" =~ "buildall" && ! "${comment_message}" =~ "Thanks for your contribution" ]]; then
trigger_pipelines="Doris_Doris_FeUt Doris_DorisBeUt_BeUt Doris_DorisCompile_Compile Doris_Performance_Clickbench_ClickbenchNew Doris_ArmPipeline_P0Regression ${trigger_pipelines}"
fi
if [[ "${comment_message}" =~ "run" && ( "${comment_message}" =~ "p0" || "${comment_message}" =~ "external" ) && ! "${comment_message}" =~ "Thanks for your contribution" ]]; then
if [[ "${comment_message}" =~ "run" && "${comment_message}" =~ "p0" && ! "${comment_message}" =~ "Thanks for your contribution" ]]; then
trigger_pipelines="Doris_DorisRegression_P0Regression ${trigger_pipelines}"
fi
if [[ "${comment_message}" =~ "run" && "${comment_message}" =~ "nereids_p0" && ! "${comment_message}" =~ "Thanks for your contribution" ]]; then
@ -83,6 +83,9 @@ jobs:
if [[ "${comment_message}" =~ "run" && "${comment_message}" =~ "arm" && ! "${comment_message}" =~ "Thanks for your contribution" ]]; then
trigger_pipelines="Doris_ArmPipeline_P0Regression ${trigger_pipelines}"
fi
if [[ "${comment_message}" =~ "run" && "${comment_message}" =~ "external" && ! "${comment_message}" =~ "Thanks for your contribution" ]]; then
trigger_pipelines="Doris_External_Regression ${trigger_pipelines}"
fi
if [[ "${comment_message}" =~ "run" && "${comment_message}" =~ "just_for_test" && ! "${comment_message}" =~ "Thanks for your contribution" ]]; then
trigger_pipelines="Doris_DorisRegression_ExternalRegression ${trigger_pipelines}"
fi

View File

@ -92,7 +92,7 @@ else
done
if [[ "${COMPONENTS}"x == ""x ]]; then
if [[ "${STOP}" -eq 1 ]]; then
COMPONENTS="mysql,pg,oracle,sqlserver,clickhouse,hive,iceberg,hudi,trino"
COMPONENTS="mysql,es,pg,oracle,sqlserver,clickhouse,hive,iceberg,hudi,trino"
fi
fi
fi

View File

@ -0,0 +1,71 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
PPROF_TMPDIR="$DORIS_HOME/log/"
# INFO, WARNING, ERROR, FATAL
sys_log_level = INFO
# ports for admin, web, heartbeat service
be_port = 9162
webserver_port = 8142
heartbeat_service_port = 9152
brpc_port = 8162
mem_limit = 50%
disable_minidump = true
path_gc_check_interval_second=1
max_garbage_sweep_interval=180
rowbatch_align_tuple_offset=true
buffer_pool_limit = 2%
storage_page_cache_limit = 0%
disable_storage_page_cache = true
chunk_reserved_bytes_limit = 134217728
# Choose one if there are more than one ip except loopback address.
# Note that there should at most one ip match this list.
# If no ip match this rule, will choose one randomly.
# use CIDR format, e.g. 10.10.10.0/24
# Default value is empty.
# priority_networks = 10.10.10.0/24;192.168.0.0/16
priority_networks=172.19.0.0/24
# data root path, separate by ';'
# you can specify the storage medium of each root path, HDD or SSD
# you can add capacity limit at the end of each root path, seperate by ','
# eg:
# /home/disk2/doris, capacity limit is disk capacity, HDD(default)
#
# you also can specify the properties by setting '<property>:<value>', seperate by ','
# property 'medium' has a higher priority than the extension of path
#
# Default value is ${DORIS_HOME}/storage, you should create it by hand.
# Advanced configurations
# sys_log_dir = ${DORIS_HOME}/log
# sys_log_roll_mode = SIZE-MB-1024
# sys_log_roll_num = 10
# sys_log_verbose_modules = *
# log_buffer_level = -1
# palo_cgroups
disable_auto_compaction=true
tablet_map_shard_size=256
fragment_pool_thread_num_max=5000
enable_fuzzy_mode=true
enable_set_in_bitmap_value=true
enable_feature_binlog=true

View File

@ -0,0 +1,89 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#####################################################################
## The uppercase properties are read and exported by bin/start_fe.sh.
## To see all Frontend configurations,
## see fe/src/org/apache/doris/common/Config.java
#####################################################################
# the output dir of stderr and stdout
LOG_DIR = ${DORIS_HOME}/log
DATE = `date +%Y%m%d-%H%M%S`
JAVA_OPTS="-Xmx4096m -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=$DORIS_HOME/log/fe.jmap -XX:+UseMembar -XX:SurvivorRatio=8 -XX:MaxTenuringThreshold=7 -XX:+PrintGCDateStamps -XX:+PrintGCDetails -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+CMSClassUnloadingEnabled -XX:-CMSParallelRemarkEnabled -XX:CMSInitiatingOccupancyFraction=80 -XX:SoftRefLRUPolicyMSPerMB=0 -Xloggc:$DORIS_HOME/log/fe.gc.log.$DATE"
# For jdk 9+, this JAVA_OPTS will be used as default JVM options
JAVA_OPTS_FOR_JDK_9="-Xmx4096m -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=$DORIS_HOME/log/fe.jmap -XX:SurvivorRatio=8 -XX:MaxTenuringThreshold=7 -XX:+CMSClassUnloadingEnabled -XX:-CMSParallelRemarkEnabled -XX:CMSInitiatingOccupancyFraction=80 -XX:SoftRefLRUPolicyMSPerMB=0 -Xlog:gc*:$DORIS_HOME/log/fe.gc.log.$DATE:time"
##
## the lowercase properties are read by main program.
##
# INFO, WARN, ERROR, FATAL
sys_log_level = INFO
# store metadata, must be created before start FE.
# Default value is ${DORIS_HOME}/doris-meta
# meta_dir = ${DORIS_HOME}/doris-meta
disable_decimalv2 = false
disable_datev1 = false
catalog_trash_expire_second=1
# Choose one if there are more than one ip except loopback address.
# Note that there should at most one ip match this list.
# If no ip match this rule, will choose one randomly.
# use CIDR format, e.g. 10.10.10.0/24
# Default value is empty.
# priority_networks = 10.10.10.0/24;192.168.0.0/16
priority_networks=172.19.0.0/24
# Advanced configurations
# log_roll_size_mb = 1024
# sys_log_dir = ${DORIS_HOME}/log
# sys_log_roll_num = 10
# sys_log_verbose_modules = org.apache.doris
# audit_log_dir = ${DORIS_HOME}/log
# audit_log_modules = slow_query, query
# audit_log_roll_num = 10
# meta_delay_toleration_second = 10
# qe_max_connection = 1024
# qe_query_timeout_second = 300
# qe_slow_log_ms = 5000
#
#enable ssl for test
enable_ssl = true
enable_outfile_to_local = true
tablet_create_timeout_second=100
remote_fragment_exec_timeout_ms=60000
fuzzy_test_type=p1
use_fuzzy_session_variable=true
# enable mtmv
enable_mtmv = true
# enable auto collect statistics
enable_auto_collect_statistics=true
auto_check_statistics_in_sec=60
dynamic_partition_check_interval_seconds=3
enable_feature_binlog=true
auth_token = 5ff161c3-2c08-4079-b108-26c8850b6598

View File

@ -0,0 +1,43 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Example driver definitions, you should not use the example odbc driver
# before you prepare env in your server
# Driver from the postgresql-odbc package
# Setup from the unixODBC package
[PostgreSQL]
Description = ODBC for PostgreSQL
Driver = /usr/lib/psqlodbc.so
Setup = /usr/lib/libodbcpsqlS.so
FileUsage = 1
# Driver from the mysql-connector-odbc package
# Setup from the unixODBC package
[MySQL ODBC 8.0 Unicode Driver]
Description = ODBC for MySQL
Driver = /usr/lib64/libmyodbc8w.so
FileUsage = 1
# Driver from the oracle-connector-odbc package
# Setup from the unixODBC package
[Oracle 19 ODBC driver]
Description=Oracle ODBC driver for Oracle 19
Driver=/usr/lib/libsqora.so.19.1

View File

@ -0,0 +1,101 @@
package pipeline.external.conf
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/* ******* Do not commit this file unless you know what you are doing ******* */
// **Note**: default db will be create if not exist
defaultDb = "regression_test"
jdbcUrl = "jdbc:mysql://172.19.0.2:9131/?useLocalSessionState=true&allowLoadLocalInfile=true"
targetJdbcUrl = "jdbc:mysql://172.19.0.2:9131/?useLocalSessionState=true&allowLoadLocalInfile=true"
jdbcUser = "root"
jdbcPassword = ""
feSourceThriftAddress = "127.0.0.1:9020"
feTargetThriftAddress = "127.0.0.1:9020"
feSyncerUser = "root"
feSyncerPassword = ""
feHttpAddress = "172.19.0.2:8131"
feHttpUser = "root"
feHttpPassword = ""
// set DORIS_HOME by system properties
// e.g. java -DDORIS_HOME=./
suitePath = "${DORIS_HOME}/regression-test/suites"
dataPath = "${DORIS_HOME}/regression-test/data"
pluginPath = "${DORIS_HOME}/regression-test/plugins"
realDataPath = "${DORIS_HOME}/regression-test/realdata"
// sf1DataPath can be url like "https://doris-community-test-1308700295.cos.ap-hongkong.myqcloud.com" or local path like "/data"
//sf1DataPath = "https://doris-community-test-1308700295.cos.ap-hongkong.myqcloud.com"
// will test <group>/<suite>.groovy
// empty group will test all group
testGroups = ""
// empty suite will test all suite
testSuites = ""
// empty directories will test all directories
testDirectories = ""
// this groups will not be executed
excludeGroups = ""
// this suites will not be executed
excludeSuites = "test_cast_string_to_array,test_broker_load,test_spark_load,test_analyze_stats_p1,test_refresh_mtmv"
// this directories will not be executed
excludeDirectories = ""
customConf1 = "test_custom_conf_value"
// for test csv with header
enableHdfs=false // set to true if hdfs is ready
hdfsFs = "hdfs://127.0.0.1:9000"
hdfsUser = "doris-test"
hdfsPasswd = ""
brokerName = "broker_name"
// broker load test config
enableBrokerLoad=true
// jdbc connector test config
// To enable jdbc test, you need first start mysql/pg container.
// See `docker/thirdparties/start-thirdparties-docker.sh`
enableJdbcTest=true
mysql_57_port=3316
pg_14_port=7121
oracle_11_port=1521
sqlserver_2022_port=1433
clickhouse_22_port=8123
// hive catalog test config
// To enable jdbc test, you need first start hive container.
// See `docker/thirdparties/start-thirdparties-docker.sh`
enableHiveTest=true
hms_port=7141
enableEsTest=true
es_6_port=19200
es_7_port=29200
es_8_port=39200
cacheDataPath = "/data/regression/"
s3Endpoint = "cos.ap-hongkong.myqcloud.com"
s3BucketName = "doris-build-hk-1308700295"
s3Region = "ap-hongkong"
max_failure_num=50

View File

@ -83,6 +83,11 @@ pg_14_port=7121
enableHiveTest=false
hms_port=7141
enableEsTest=false
es_6_port=19200
es_7_port=29200
es_8_port=39200
cacheDataPath = "/data/regression/"
s3Endpoint = "cos.ap-hongkong.myqcloud.com"

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
suite("test_hdfs_tvf") {
suite("test_hdfs_tvf","external,hive") {
String hdfs_port = context.config.otherConfigs.get("hdfs_port")
// It's okay to use random `hdfsUser`, but can not be empty.
def hdfsUserName = "doris"

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
suite("test_es_query", "p0") {
suite("test_es_query", "p0,external,es") {
String enabled = context.config.otherConfigs.get("enableEsTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
suite("test_hive_orc", "all_types") {
suite("test_hive_orc", "all_types,external,hive") {
// Ensure that all types are parsed correctly
def select_top50 = {
qt_select_top50 """select * from orc_all_types order by int_col desc limit 50;"""

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
suite("test_hive_other", "p0") {
suite("test_hive_other", "p0,external,hive") {
def q01 = {
qt_q24 """ select name, count(1) as c from student group by name order by c desc;"""

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
suite("test_hive_parquet", "p0") {
suite("test_hive_parquet", "p0,external,hive") {
def q01 = {
qt_q01 """
select * from partition_table order by l_orderkey, l_partkey, l_suppkey;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
suite("test_hive_partitions", "p0") {
suite("test_hive_partitions", "p0,external,hive") {
def q01 = {
qt_q01 """
select id, data from table_with_pars where dt_par = '2023-02-01' order by id;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
suite("test_hive_schema_evolution", "p0") {
suite("test_hive_schema_evolution", "p0,external,hive") {
def q_text = {
qt_q01 """
select * from schema_evo_test_text order by id;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
suite("test_mysql_jdbc_catalog", "p0") {
suite("test_mysql_jdbc_catalog", "p0,external,mysql") {
qt_sql """select current_catalog()"""
String enabled = context.config.otherConfigs.get("enableJdbcTest")

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
suite("test_pg_jdbc_catalog", "p0") {
suite("test_pg_jdbc_catalog", "p0,external,mysql") {
String enabled = context.config.otherConfigs.get("enableJdbcTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {
String catalog_name = "pg_jdbc_catalog";

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
suite("test_jdbc_query_mysql", "p0") {
suite("test_jdbc_query_mysql", "p0,external,mysql") {
String enabled = context.config.otherConfigs.get("enableJdbcTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {

View File

@ -17,7 +17,7 @@
import java.nio.charset.Charset;
suite("test_jdbc_query_pg", "p0") {
suite("test_jdbc_query_pg", "p0,external,pg") {
String enabled = context.config.otherConfigs.get("enableJdbcTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
suite("test_hdfs_json_load", "p0") {
suite("test_hdfs_json_load", "p0,external,hive") {
// define a sql table
def testTable = "test_hdfs_json_load"

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
suite("test_load_with_decimal", "p0") {
suite("test_load_with_decimal", "p0,external,hive") {
def tableName = "test_load_with_decimal"
sql """ DROP TABLE IF EXISTS ${tableName} """

View File

@ -16,7 +16,7 @@
// under the License.
suite("test_show_where", "query") {
suite("test_show_where", "query,external,mysql") {
sql "SET enable_nereids_planner=true"
sql "SET enable_fallback_to_original_planner=false"
String ex_db_name = "doris_test";

View File

@ -16,7 +16,7 @@
// under the License.
suite("test_show_where", "query") {
suite("test_show_where", "query,external,mysql") {
String ex_db_name = "doris_test";
String ex_tb0 = "ex_tb0";
String ex_tb1 = "ex_tb1";

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
suite("test_catalog_hive_orc", "p0") {
suite("test_catalog_hive_orc", "p0,external,hive") {
def q01 = {
sql """set exec_mem_limit=8589934592"""

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
suite("test_catalog_hive_parquet", "p0") {
suite("test_catalog_hive_parquet", "p0,external,hive") {
def q01 = {
sql """set exec_mem_limit=8589934592"""