From 3eeca7ee55c00db7594f1d0db4cde11c4fed2d83 Mon Sep 17 00:00:00 2001 From: zhangguoqiang <18372634969@163.com> Date: Fri, 28 Jul 2023 17:11:19 +0800 Subject: [PATCH] [enhance](regresstion case)add external group mark 0727 (#22287) * add external group mark 0727 * add external pipeline regression conf 0727 * update pipeline regression config 0727 * open es config from docker 0727 --- .github/workflows/auto_trigger_teamcity.yml | 5 +- .../thirdparties/run-thirdparties-docker.sh | 2 +- .../pipeline/external/conf/be.conf | 71 ++++++++++++ .../pipeline/external/conf/fe.conf | 89 +++++++++++++++ .../pipeline/external/conf/odbcinst.ini | 43 ++++++++ .../external/conf/regression-conf.groovy | 101 ++++++++++++++++++ .../pipeline/p0/conf/regression-conf.groovy | 5 + .../test_hdfs_tvf.groovy | 2 +- .../suites/es_p0/test_es_query.groovy | 2 +- .../hive/test_hive_orc.groovy | 2 +- .../hive/test_hive_other.groovy | 2 +- .../hive/test_hive_parquet.groovy | 2 +- .../hive/test_hive_partitions.groovy | 2 +- .../hive/test_hive_schema_evolution.groovy | 2 +- .../test_mysql_jdbc_catalog.groovy | 2 +- .../test_pg_jdbc_catalog.groovy | 2 +- .../jdbc_p0/test_jdbc_query_mysql.groovy | 2 +- .../suites/jdbc_p0/test_jdbc_query_pg.groovy | 2 +- .../stream_load/test_hdfs_json_load.groovy | 2 +- .../stream_load/test_load_with_decimal.groovy | 2 +- .../nereids_p0/show/test_show_where.groovy | 2 +- .../query_p0/show/test_show_where.groovy | 2 +- .../hive_catalog_orc.groovy | 2 +- .../hive_catalog_parquet.groovy | 2 +- 24 files changed, 331 insertions(+), 19 deletions(-) create mode 100644 regression-test/pipeline/external/conf/be.conf create mode 100644 regression-test/pipeline/external/conf/fe.conf create mode 100644 regression-test/pipeline/external/conf/odbcinst.ini create mode 100644 regression-test/pipeline/external/conf/regression-conf.groovy diff --git a/.github/workflows/auto_trigger_teamcity.yml b/.github/workflows/auto_trigger_teamcity.yml index 4ffd242c57..5a2a561880 100644 --- a/.github/workflows/auto_trigger_teamcity.yml +++ b/.github/workflows/auto_trigger_teamcity.yml @@ -59,7 +59,7 @@ jobs: if [[ "${comment_message}" =~ "run" && "${comment_message}" =~ "buildall" && ! "${comment_message}" =~ "Thanks for your contribution" ]]; then trigger_pipelines="Doris_Doris_FeUt Doris_DorisBeUt_BeUt Doris_DorisCompile_Compile Doris_Performance_Clickbench_ClickbenchNew Doris_ArmPipeline_P0Regression ${trigger_pipelines}" fi - if [[ "${comment_message}" =~ "run" && ( "${comment_message}" =~ "p0" || "${comment_message}" =~ "external" ) && ! "${comment_message}" =~ "Thanks for your contribution" ]]; then + if [[ "${comment_message}" =~ "run" && "${comment_message}" =~ "p0" && ! "${comment_message}" =~ "Thanks for your contribution" ]]; then trigger_pipelines="Doris_DorisRegression_P0Regression ${trigger_pipelines}" fi if [[ "${comment_message}" =~ "run" && "${comment_message}" =~ "nereids_p0" && ! "${comment_message}" =~ "Thanks for your contribution" ]]; then @@ -83,6 +83,9 @@ jobs: if [[ "${comment_message}" =~ "run" && "${comment_message}" =~ "arm" && ! "${comment_message}" =~ "Thanks for your contribution" ]]; then trigger_pipelines="Doris_ArmPipeline_P0Regression ${trigger_pipelines}" fi + if [[ "${comment_message}" =~ "run" && "${comment_message}" =~ "external" && ! "${comment_message}" =~ "Thanks for your contribution" ]]; then + trigger_pipelines="Doris_External_Regression ${trigger_pipelines}" + fi if [[ "${comment_message}" =~ "run" && "${comment_message}" =~ "just_for_test" && ! "${comment_message}" =~ "Thanks for your contribution" ]]; then trigger_pipelines="Doris_DorisRegression_ExternalRegression ${trigger_pipelines}" fi diff --git a/docker/thirdparties/run-thirdparties-docker.sh b/docker/thirdparties/run-thirdparties-docker.sh index 5d0821da18..ebdb1209d0 100755 --- a/docker/thirdparties/run-thirdparties-docker.sh +++ b/docker/thirdparties/run-thirdparties-docker.sh @@ -92,7 +92,7 @@ else done if [[ "${COMPONENTS}"x == ""x ]]; then if [[ "${STOP}" -eq 1 ]]; then - COMPONENTS="mysql,pg,oracle,sqlserver,clickhouse,hive,iceberg,hudi,trino" + COMPONENTS="mysql,es,pg,oracle,sqlserver,clickhouse,hive,iceberg,hudi,trino" fi fi fi diff --git a/regression-test/pipeline/external/conf/be.conf b/regression-test/pipeline/external/conf/be.conf new file mode 100644 index 0000000000..730cfaba3d --- /dev/null +++ b/regression-test/pipeline/external/conf/be.conf @@ -0,0 +1,71 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +PPROF_TMPDIR="$DORIS_HOME/log/" + +# INFO, WARNING, ERROR, FATAL +sys_log_level = INFO + +# ports for admin, web, heartbeat service +be_port = 9162 +webserver_port = 8142 +heartbeat_service_port = 9152 +brpc_port = 8162 + +mem_limit = 50% +disable_minidump = true +path_gc_check_interval_second=1 +max_garbage_sweep_interval=180 +rowbatch_align_tuple_offset=true + +buffer_pool_limit = 2% +storage_page_cache_limit = 0% +disable_storage_page_cache = true +chunk_reserved_bytes_limit = 134217728 +# Choose one if there are more than one ip except loopback address. +# Note that there should at most one ip match this list. +# If no ip match this rule, will choose one randomly. +# use CIDR format, e.g. 10.10.10.0/24 +# Default value is empty. +# priority_networks = 10.10.10.0/24;192.168.0.0/16 +priority_networks=172.19.0.0/24 + +# data root path, separate by ';' +# you can specify the storage medium of each root path, HDD or SSD +# you can add capacity limit at the end of each root path, seperate by ',' +# eg: +# /home/disk2/doris, capacity limit is disk capacity, HDD(default) +# +# you also can specify the properties by setting ':', seperate by ',' +# property 'medium' has a higher priority than the extension of path +# +# Default value is ${DORIS_HOME}/storage, you should create it by hand. + +# Advanced configurations +# sys_log_dir = ${DORIS_HOME}/log +# sys_log_roll_mode = SIZE-MB-1024 +# sys_log_roll_num = 10 +# sys_log_verbose_modules = * +# log_buffer_level = -1 +# palo_cgroups + +disable_auto_compaction=true +tablet_map_shard_size=256 +fragment_pool_thread_num_max=5000 +enable_fuzzy_mode=true +enable_set_in_bitmap_value=true +enable_feature_binlog=true diff --git a/regression-test/pipeline/external/conf/fe.conf b/regression-test/pipeline/external/conf/fe.conf new file mode 100644 index 0000000000..adc042357c --- /dev/null +++ b/regression-test/pipeline/external/conf/fe.conf @@ -0,0 +1,89 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +##################################################################### +## The uppercase properties are read and exported by bin/start_fe.sh. +## To see all Frontend configurations, +## see fe/src/org/apache/doris/common/Config.java +##################################################################### + +# the output dir of stderr and stdout +LOG_DIR = ${DORIS_HOME}/log + +DATE = `date +%Y%m%d-%H%M%S` +JAVA_OPTS="-Xmx4096m -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=$DORIS_HOME/log/fe.jmap -XX:+UseMembar -XX:SurvivorRatio=8 -XX:MaxTenuringThreshold=7 -XX:+PrintGCDateStamps -XX:+PrintGCDetails -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+CMSClassUnloadingEnabled -XX:-CMSParallelRemarkEnabled -XX:CMSInitiatingOccupancyFraction=80 -XX:SoftRefLRUPolicyMSPerMB=0 -Xloggc:$DORIS_HOME/log/fe.gc.log.$DATE" + +# For jdk 9+, this JAVA_OPTS will be used as default JVM options +JAVA_OPTS_FOR_JDK_9="-Xmx4096m -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=$DORIS_HOME/log/fe.jmap -XX:SurvivorRatio=8 -XX:MaxTenuringThreshold=7 -XX:+CMSClassUnloadingEnabled -XX:-CMSParallelRemarkEnabled -XX:CMSInitiatingOccupancyFraction=80 -XX:SoftRefLRUPolicyMSPerMB=0 -Xlog:gc*:$DORIS_HOME/log/fe.gc.log.$DATE:time" + +## +## the lowercase properties are read by main program. +## + +# INFO, WARN, ERROR, FATAL +sys_log_level = INFO + +# store metadata, must be created before start FE. +# Default value is ${DORIS_HOME}/doris-meta +# meta_dir = ${DORIS_HOME}/doris-meta + +disable_decimalv2 = false +disable_datev1 = false +catalog_trash_expire_second=1 +# Choose one if there are more than one ip except loopback address. +# Note that there should at most one ip match this list. +# If no ip match this rule, will choose one randomly. +# use CIDR format, e.g. 10.10.10.0/24 +# Default value is empty. +# priority_networks = 10.10.10.0/24;192.168.0.0/16 +priority_networks=172.19.0.0/24 + +# Advanced configurations +# log_roll_size_mb = 1024 +# sys_log_dir = ${DORIS_HOME}/log +# sys_log_roll_num = 10 +# sys_log_verbose_modules = org.apache.doris +# audit_log_dir = ${DORIS_HOME}/log +# audit_log_modules = slow_query, query +# audit_log_roll_num = 10 +# meta_delay_toleration_second = 10 +# qe_max_connection = 1024 +# qe_query_timeout_second = 300 +# qe_slow_log_ms = 5000 +# + +#enable ssl for test +enable_ssl = true + +enable_outfile_to_local = true +tablet_create_timeout_second=100 +remote_fragment_exec_timeout_ms=60000 +fuzzy_test_type=p1 +use_fuzzy_session_variable=true + +# enable mtmv +enable_mtmv = true + +# enable auto collect statistics +enable_auto_collect_statistics=true +auto_check_statistics_in_sec=60 + +dynamic_partition_check_interval_seconds=3 + +enable_feature_binlog=true + +auth_token = 5ff161c3-2c08-4079-b108-26c8850b6598 diff --git a/regression-test/pipeline/external/conf/odbcinst.ini b/regression-test/pipeline/external/conf/odbcinst.ini new file mode 100644 index 0000000000..41e21f9227 --- /dev/null +++ b/regression-test/pipeline/external/conf/odbcinst.ini @@ -0,0 +1,43 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Example driver definitions, you should not use the example odbc driver +# before you prepare env in your server + +# Driver from the postgresql-odbc package +# Setup from the unixODBC package +[PostgreSQL] +Description = ODBC for PostgreSQL +Driver = /usr/lib/psqlodbc.so +Setup = /usr/lib/libodbcpsqlS.so +FileUsage = 1 + + +# Driver from the mysql-connector-odbc package +# Setup from the unixODBC package +[MySQL ODBC 8.0 Unicode Driver] +Description = ODBC for MySQL +Driver = /usr/lib64/libmyodbc8w.so +FileUsage = 1 + +# Driver from the oracle-connector-odbc package +# Setup from the unixODBC package +[Oracle 19 ODBC driver] +Description=Oracle ODBC driver for Oracle 19 +Driver=/usr/lib/libsqora.so.19.1 + + diff --git a/regression-test/pipeline/external/conf/regression-conf.groovy b/regression-test/pipeline/external/conf/regression-conf.groovy new file mode 100644 index 0000000000..c6593865e1 --- /dev/null +++ b/regression-test/pipeline/external/conf/regression-conf.groovy @@ -0,0 +1,101 @@ +package pipeline.external.conf +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +/* ******* Do not commit this file unless you know what you are doing ******* */ + +// **Note**: default db will be create if not exist +defaultDb = "regression_test" + +jdbcUrl = "jdbc:mysql://172.19.0.2:9131/?useLocalSessionState=true&allowLoadLocalInfile=true" +targetJdbcUrl = "jdbc:mysql://172.19.0.2:9131/?useLocalSessionState=true&allowLoadLocalInfile=true" +jdbcUser = "root" +jdbcPassword = "" + +feSourceThriftAddress = "127.0.0.1:9020" +feTargetThriftAddress = "127.0.0.1:9020" +feSyncerUser = "root" +feSyncerPassword = "" + +feHttpAddress = "172.19.0.2:8131" +feHttpUser = "root" +feHttpPassword = "" + +// set DORIS_HOME by system properties +// e.g. java -DDORIS_HOME=./ +suitePath = "${DORIS_HOME}/regression-test/suites" +dataPath = "${DORIS_HOME}/regression-test/data" +pluginPath = "${DORIS_HOME}/regression-test/plugins" +realDataPath = "${DORIS_HOME}/regression-test/realdata" +// sf1DataPath can be url like "https://doris-community-test-1308700295.cos.ap-hongkong.myqcloud.com" or local path like "/data" +//sf1DataPath = "https://doris-community-test-1308700295.cos.ap-hongkong.myqcloud.com" + +// will test /.groovy +// empty group will test all group +testGroups = "" +// empty suite will test all suite +testSuites = "" +// empty directories will test all directories +testDirectories = "" + +// this groups will not be executed +excludeGroups = "" +// this suites will not be executed +excludeSuites = "test_cast_string_to_array,test_broker_load,test_spark_load,test_analyze_stats_p1,test_refresh_mtmv" +// this directories will not be executed +excludeDirectories = "" + +customConf1 = "test_custom_conf_value" + +// for test csv with header +enableHdfs=false // set to true if hdfs is ready +hdfsFs = "hdfs://127.0.0.1:9000" +hdfsUser = "doris-test" +hdfsPasswd = "" +brokerName = "broker_name" + +// broker load test config +enableBrokerLoad=true + +// jdbc connector test config +// To enable jdbc test, you need first start mysql/pg container. +// See `docker/thirdparties/start-thirdparties-docker.sh` +enableJdbcTest=true +mysql_57_port=3316 +pg_14_port=7121 +oracle_11_port=1521 +sqlserver_2022_port=1433 +clickhouse_22_port=8123 + +// hive catalog test config +// To enable jdbc test, you need first start hive container. +// See `docker/thirdparties/start-thirdparties-docker.sh` +enableHiveTest=true +hms_port=7141 + +enableEsTest=true +es_6_port=19200 +es_7_port=29200 +es_8_port=39200 + +cacheDataPath = "/data/regression/" + +s3Endpoint = "cos.ap-hongkong.myqcloud.com" +s3BucketName = "doris-build-hk-1308700295" +s3Region = "ap-hongkong" + +max_failure_num=50 diff --git a/regression-test/pipeline/p0/conf/regression-conf.groovy b/regression-test/pipeline/p0/conf/regression-conf.groovy index 0ea3300d19..59fa53a119 100644 --- a/regression-test/pipeline/p0/conf/regression-conf.groovy +++ b/regression-test/pipeline/p0/conf/regression-conf.groovy @@ -83,6 +83,11 @@ pg_14_port=7121 enableHiveTest=false hms_port=7141 +enableEsTest=false +es_6_port=19200 +es_7_port=29200 +es_8_port=39200 + cacheDataPath = "/data/regression/" s3Endpoint = "cos.ap-hongkong.myqcloud.com" diff --git a/regression-test/suites/correctness_p0/table_valued_function/test_hdfs_tvf.groovy b/regression-test/suites/correctness_p0/table_valued_function/test_hdfs_tvf.groovy index 162af3c722..2ea9b47b5b 100644 --- a/regression-test/suites/correctness_p0/table_valued_function/test_hdfs_tvf.groovy +++ b/regression-test/suites/correctness_p0/table_valued_function/test_hdfs_tvf.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_hdfs_tvf") { +suite("test_hdfs_tvf","external,hive") { String hdfs_port = context.config.otherConfigs.get("hdfs_port") // It's okay to use random `hdfsUser`, but can not be empty. def hdfsUserName = "doris" diff --git a/regression-test/suites/es_p0/test_es_query.groovy b/regression-test/suites/es_p0/test_es_query.groovy index 24b1817732..41bd89b297 100644 --- a/regression-test/suites/es_p0/test_es_query.groovy +++ b/regression-test/suites/es_p0/test_es_query.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_es_query", "p0") { +suite("test_es_query", "p0,external,es") { String enabled = context.config.otherConfigs.get("enableEsTest") if (enabled != null && enabled.equalsIgnoreCase("true")) { diff --git a/regression-test/suites/external_catalog_p0/hive/test_hive_orc.groovy b/regression-test/suites/external_catalog_p0/hive/test_hive_orc.groovy index 35877387f6..ad53436665 100644 --- a/regression-test/suites/external_catalog_p0/hive/test_hive_orc.groovy +++ b/regression-test/suites/external_catalog_p0/hive/test_hive_orc.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_hive_orc", "all_types") { +suite("test_hive_orc", "all_types,external,hive") { // Ensure that all types are parsed correctly def select_top50 = { qt_select_top50 """select * from orc_all_types order by int_col desc limit 50;""" diff --git a/regression-test/suites/external_catalog_p0/hive/test_hive_other.groovy b/regression-test/suites/external_catalog_p0/hive/test_hive_other.groovy index 07435dd1e5..f87ff337b2 100644 --- a/regression-test/suites/external_catalog_p0/hive/test_hive_other.groovy +++ b/regression-test/suites/external_catalog_p0/hive/test_hive_other.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_hive_other", "p0") { +suite("test_hive_other", "p0,external,hive") { def q01 = { qt_q24 """ select name, count(1) as c from student group by name order by c desc;""" diff --git a/regression-test/suites/external_catalog_p0/hive/test_hive_parquet.groovy b/regression-test/suites/external_catalog_p0/hive/test_hive_parquet.groovy index bbe324ff2e..9c8529fac1 100644 --- a/regression-test/suites/external_catalog_p0/hive/test_hive_parquet.groovy +++ b/regression-test/suites/external_catalog_p0/hive/test_hive_parquet.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_hive_parquet", "p0") { +suite("test_hive_parquet", "p0,external,hive") { def q01 = { qt_q01 """ select * from partition_table order by l_orderkey, l_partkey, l_suppkey; diff --git a/regression-test/suites/external_catalog_p0/hive/test_hive_partitions.groovy b/regression-test/suites/external_catalog_p0/hive/test_hive_partitions.groovy index 7e70f5729a..50499afbce 100644 --- a/regression-test/suites/external_catalog_p0/hive/test_hive_partitions.groovy +++ b/regression-test/suites/external_catalog_p0/hive/test_hive_partitions.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_hive_partitions", "p0") { +suite("test_hive_partitions", "p0,external,hive") { def q01 = { qt_q01 """ select id, data from table_with_pars where dt_par = '2023-02-01' order by id; diff --git a/regression-test/suites/external_table_emr_p2/hive/test_hive_schema_evolution.groovy b/regression-test/suites/external_table_emr_p2/hive/test_hive_schema_evolution.groovy index 2cbe589e8b..5a97cc01c0 100644 --- a/regression-test/suites/external_table_emr_p2/hive/test_hive_schema_evolution.groovy +++ b/regression-test/suites/external_table_emr_p2/hive/test_hive_schema_evolution.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_hive_schema_evolution", "p0") { +suite("test_hive_schema_evolution", "p0,external,hive") { def q_text = { qt_q01 """ select * from schema_evo_test_text order by id; diff --git a/regression-test/suites/jdbc_catalog_p0/test_mysql_jdbc_catalog.groovy b/regression-test/suites/jdbc_catalog_p0/test_mysql_jdbc_catalog.groovy index b37c83bba6..25287c5f1f 100644 --- a/regression-test/suites/jdbc_catalog_p0/test_mysql_jdbc_catalog.groovy +++ b/regression-test/suites/jdbc_catalog_p0/test_mysql_jdbc_catalog.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_mysql_jdbc_catalog", "p0") { +suite("test_mysql_jdbc_catalog", "p0,external,mysql") { qt_sql """select current_catalog()""" String enabled = context.config.otherConfigs.get("enableJdbcTest") diff --git a/regression-test/suites/jdbc_catalog_p0/test_pg_jdbc_catalog.groovy b/regression-test/suites/jdbc_catalog_p0/test_pg_jdbc_catalog.groovy index 9d74c70c8e..a4076475b4 100644 --- a/regression-test/suites/jdbc_catalog_p0/test_pg_jdbc_catalog.groovy +++ b/regression-test/suites/jdbc_catalog_p0/test_pg_jdbc_catalog.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_pg_jdbc_catalog", "p0") { +suite("test_pg_jdbc_catalog", "p0,external,mysql") { String enabled = context.config.otherConfigs.get("enableJdbcTest") if (enabled != null && enabled.equalsIgnoreCase("true")) { String catalog_name = "pg_jdbc_catalog"; diff --git a/regression-test/suites/jdbc_p0/test_jdbc_query_mysql.groovy b/regression-test/suites/jdbc_p0/test_jdbc_query_mysql.groovy index 36fa0b0ced..475b6c37dd 100644 --- a/regression-test/suites/jdbc_p0/test_jdbc_query_mysql.groovy +++ b/regression-test/suites/jdbc_p0/test_jdbc_query_mysql.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_jdbc_query_mysql", "p0") { +suite("test_jdbc_query_mysql", "p0,external,mysql") { String enabled = context.config.otherConfigs.get("enableJdbcTest") if (enabled != null && enabled.equalsIgnoreCase("true")) { diff --git a/regression-test/suites/jdbc_p0/test_jdbc_query_pg.groovy b/regression-test/suites/jdbc_p0/test_jdbc_query_pg.groovy index 9e8a451069..cc48990f0b 100644 --- a/regression-test/suites/jdbc_p0/test_jdbc_query_pg.groovy +++ b/regression-test/suites/jdbc_p0/test_jdbc_query_pg.groovy @@ -17,7 +17,7 @@ import java.nio.charset.Charset; -suite("test_jdbc_query_pg", "p0") { +suite("test_jdbc_query_pg", "p0,external,pg") { String enabled = context.config.otherConfigs.get("enableJdbcTest") if (enabled != null && enabled.equalsIgnoreCase("true")) { diff --git a/regression-test/suites/load_p0/stream_load/test_hdfs_json_load.groovy b/regression-test/suites/load_p0/stream_load/test_hdfs_json_load.groovy index 6565357e96..ffa8c9bea2 100644 --- a/regression-test/suites/load_p0/stream_load/test_hdfs_json_load.groovy +++ b/regression-test/suites/load_p0/stream_load/test_hdfs_json_load.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_hdfs_json_load", "p0") { +suite("test_hdfs_json_load", "p0,external,hive") { // define a sql table def testTable = "test_hdfs_json_load" diff --git a/regression-test/suites/load_p0/stream_load/test_load_with_decimal.groovy b/regression-test/suites/load_p0/stream_load/test_load_with_decimal.groovy index 380bcb278a..f5260778cd 100644 --- a/regression-test/suites/load_p0/stream_load/test_load_with_decimal.groovy +++ b/regression-test/suites/load_p0/stream_load/test_load_with_decimal.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_load_with_decimal", "p0") { +suite("test_load_with_decimal", "p0,external,hive") { def tableName = "test_load_with_decimal" sql """ DROP TABLE IF EXISTS ${tableName} """ diff --git a/regression-test/suites/nereids_p0/show/test_show_where.groovy b/regression-test/suites/nereids_p0/show/test_show_where.groovy index 7f2d9b2e5d..c65c23c68f 100644 --- a/regression-test/suites/nereids_p0/show/test_show_where.groovy +++ b/regression-test/suites/nereids_p0/show/test_show_where.groovy @@ -16,7 +16,7 @@ // under the License. -suite("test_show_where", "query") { +suite("test_show_where", "query,external,mysql") { sql "SET enable_nereids_planner=true" sql "SET enable_fallback_to_original_planner=false" String ex_db_name = "doris_test"; diff --git a/regression-test/suites/query_p0/show/test_show_where.groovy b/regression-test/suites/query_p0/show/test_show_where.groovy index 921722d89b..665ddab0d5 100644 --- a/regression-test/suites/query_p0/show/test_show_where.groovy +++ b/regression-test/suites/query_p0/show/test_show_where.groovy @@ -16,7 +16,7 @@ // under the License. -suite("test_show_where", "query") { +suite("test_show_where", "query,external,mysql") { String ex_db_name = "doris_test"; String ex_tb0 = "ex_tb0"; String ex_tb1 = "ex_tb1"; diff --git a/regression-test/suites/tpch_sf1_p0/multi_catalog_query/hive_catalog_orc.groovy b/regression-test/suites/tpch_sf1_p0/multi_catalog_query/hive_catalog_orc.groovy index 8902190a88..c7faf2e030 100644 --- a/regression-test/suites/tpch_sf1_p0/multi_catalog_query/hive_catalog_orc.groovy +++ b/regression-test/suites/tpch_sf1_p0/multi_catalog_query/hive_catalog_orc.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_catalog_hive_orc", "p0") { +suite("test_catalog_hive_orc", "p0,external,hive") { def q01 = { sql """set exec_mem_limit=8589934592""" diff --git a/regression-test/suites/tpch_sf1_p0/multi_catalog_query/hive_catalog_parquet.groovy b/regression-test/suites/tpch_sf1_p0/multi_catalog_query/hive_catalog_parquet.groovy index 31acb64310..04a28609a9 100644 --- a/regression-test/suites/tpch_sf1_p0/multi_catalog_query/hive_catalog_parquet.groovy +++ b/regression-test/suites/tpch_sf1_p0/multi_catalog_query/hive_catalog_parquet.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_catalog_hive_parquet", "p0") { +suite("test_catalog_hive_parquet", "p0,external,hive") { def q01 = { sql """set exec_mem_limit=8589934592"""