[test]Mv external p2 test case to p0. (#37070) (#37140)

backport: https://github.com/apache/doris/pull/37070
This commit is contained in:
Jibing-Li
2024-07-04 11:19:31 +08:00
committed by GitHub
parent fb344b66ca
commit bf3ea1839c
42 changed files with 646 additions and 139 deletions

View File

@ -0,0 +1,20 @@
CREATE DATABASE IF NOT EXISTS multi_catalog;
USE multi_catalog;
CREATE TABLE `orc_partitioned_columns`(
`t_timestamp` timestamp)
PARTITIONED BY (
`t_int` int,
`t_float` float,
`t_string` string)
ROW FORMAT SERDE
'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
WITH SERDEPROPERTIES (
'serialization.format' = '1')
STORED AS INPUTFORMAT
'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
LOCATION '/user/doris/suites/multi_catalog/orc_partitioned_columns';
msck repair table orc_partitioned_columns;

View File

@ -0,0 +1,12 @@
#!/bin/bash
set -x
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
## mkdir and put data to hdfs
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
# create table
hive -f "${CUR_DIR}/create_table.hql"

View File

@ -0,0 +1,20 @@
CREATE DATABASE IF NOT EXISTS multi_catalog;
USE multi_catalog;
CREATE TABLE `orc_partitioned_one_column`(
`t_float` float,
`t_string` string,
`t_timestamp` timestamp)
PARTITIONED BY (
`t_int` int)
ROW FORMAT SERDE
'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
WITH SERDEPROPERTIES (
'serialization.format' = '1')
STORED AS INPUTFORMAT
'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
LOCATION '/user/doris/suites/multi_catalog/orc_partitioned_one_column';
msck repair table orc_partitioned_one_column;

View File

@ -0,0 +1,12 @@
#!/bin/bash
set -x
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
## mkdir and put data to hdfs
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
# create table
hive -f "${CUR_DIR}/create_table.hql"

View File

@ -0,0 +1,20 @@
CREATE DATABASE IF NOT EXISTS multi_catalog;
USE multi_catalog;
CREATE TABLE `parquet_partitioned_columns`(
`t_timestamp` timestamp)
PARTITIONED BY (
`t_int` int,
`t_float` float,
`t_string` string)
ROW FORMAT SERDE
'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
WITH SERDEPROPERTIES (
'serialization.format' = '1')
STORED AS INPUTFORMAT
'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
LOCATION '/user/doris/suites/multi_catalog/parquet_partitioned_columns';
msck repair table parquet_partitioned_columns;

View File

@ -0,0 +1,12 @@
#!/bin/bash
set -x
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
## mkdir and put data to hdfs
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
# create table
hive -f "${CUR_DIR}/create_table.hql"

View File

@ -0,0 +1,20 @@
CREATE DATABASE IF NOT EXISTS multi_catalog;
USE multi_catalog;
CREATE TABLE `parquet_partitioned_one_column`(
`t_float` float,
`t_string` string,
`t_timestamp` timestamp)
PARTITIONED BY (
`t_int` int)
ROW FORMAT SERDE
'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
WITH SERDEPROPERTIES (
'serialization.format' = '1')
STORED AS INPUTFORMAT
'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
LOCATION '/user/doris/suites/multi_catalog/parquet_partitioned_one_column';
msck repair table parquet_partitioned_one_column;

View File

@ -0,0 +1,12 @@
#!/bin/bash
set -x
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
## mkdir and put data to hdfs
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
# create table
hive -f "${CUR_DIR}/create_table.hql"

View File

@ -0,0 +1,22 @@
CREATE DATABASE IF NOT EXISTS multi_catalog;
USE multi_catalog;
CREATE TABLE `test_mixed_par_locations_orc`(
`id` int,
`name` string,
`age` int,
`city` string,
`sex` string)
PARTITIONED BY (
`par` string)
ROW FORMAT SERDE
'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
WITH SERDEPROPERTIES (
'serialization.format' = '1')
STORED AS INPUTFORMAT
'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
LOCATION '/user/doris/suites/multi_catalog/test_mixed_par_locations_orc';
msck repair table test_mixed_par_locations_orc;

View File

@ -0,0 +1,12 @@
#!/bin/bash
set -x
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
## mkdir and put data to hdfs
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
# create table
hive -f "${CUR_DIR}/create_table.hql"

View File

@ -0,0 +1,22 @@
CREATE DATABASE IF NOT EXISTS multi_catalog;
USE multi_catalog;
CREATE TABLE `test_mixed_par_locations_parquet`(
`id` int,
`name` string,
`age` int,
`city` string,
`sex` string)
PARTITIONED BY (
`par` string)
ROW FORMAT SERDE
'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
WITH SERDEPROPERTIES (
'serialization.format' = '1')
STORED AS INPUTFORMAT
'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
LOCATION '/user/doris/suites/multi_catalog/test_mixed_par_locations_parquet';
msck repair table test_mixed_par_locations_parquet;

View File

@ -0,0 +1,12 @@
#!/bin/bash
set -x
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
## mkdir and put data to hdfs
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
# create table
hive -f "${CUR_DIR}/create_table.hql"

View File

@ -0,0 +1,18 @@
CREATE DATABASE IF NOT EXISTS multi_catalog;
USE multi_catalog;
CREATE TABLE `test_truncate_char_or_varchar_columns_orc`(
`id` int,
`city` varchar(3),
`country` char(3))
ROW FORMAT SERDE
'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
WITH SERDEPROPERTIES (
'serialization.format' = '1')
STORED AS INPUTFORMAT
'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
LOCATION '/user/doris/suites/multi_catalog/test_truncate_char_or_varchar_columns_orc';
msck repair table test_truncate_char_or_varchar_columns_orc;

View File

@ -0,0 +1,12 @@
#!/bin/bash
set -x
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
## mkdir and put data to hdfs
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
# create table
hive -f "${CUR_DIR}/create_table.hql"

View File

@ -0,0 +1,18 @@
CREATE DATABASE IF NOT EXISTS multi_catalog;
USE multi_catalog;
CREATE TABLE `test_truncate_char_or_varchar_columns_parquet`(
`id` int,
`city` varchar(3),
`country` char(3))
ROW FORMAT SERDE
'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
WITH SERDEPROPERTIES (
'serialization.format' = '1')
STORED AS INPUTFORMAT
'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
LOCATION '/user/doris/suites/multi_catalog/test_truncate_char_or_varchar_columns_parquet';
msck repair table test_truncate_char_or_varchar_columns_parquet;

View File

@ -0,0 +1,12 @@
#!/bin/bash
set -x
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
## mkdir and put data to hdfs
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
# create table
hive -f "${CUR_DIR}/create_table.hql"

View File

@ -0,0 +1,18 @@
CREATE DATABASE IF NOT EXISTS multi_catalog;
USE multi_catalog;
CREATE TABLE `test_truncate_char_or_varchar_columns_text`(
`id` int,
`city` varchar(3),
`country` char(3))
ROW FORMAT SERDE
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
WITH SERDEPROPERTIES (
'serialization.format' = '1')
STORED AS INPUTFORMAT
'org.apache.hadoop.mapred.TextInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
LOCATION '/user/doris/suites/multi_catalog/test_truncate_char_or_varchar_columns_text';
msck repair table test_truncate_char_or_varchar_columns_text;

View File

@ -0,0 +1,12 @@
#!/bin/bash
set -x
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
## mkdir and put data to hdfs
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
# create table
hive -f "${CUR_DIR}/create_table.hql"

View File

@ -0,0 +1,21 @@
CREATE DATABASE IF NOT EXISTS multi_catalog;
USE multi_catalog;
CREATE TABLE `text_partitioned_columns`(
`t_timestamp` timestamp)
PARTITIONED BY (
`t_int` int,
`t_float` float,
`t_string` string)
ROW FORMAT SERDE
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
WITH SERDEPROPERTIES (
'serialization.format' = ',',
'field.delim' = ',')
STORED AS INPUTFORMAT
'org.apache.hadoop.mapred.TextInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
LOCATION '/user/doris/suites/multi_catalog/text_partitioned_columns';
msck repair table text_partitioned_columns;

View File

@ -0,0 +1,12 @@
#!/bin/bash
set -x
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
## mkdir and put data to hdfs
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
# create table
hive -f "${CUR_DIR}/create_table.hql"

View File

@ -0,0 +1,21 @@
CREATE DATABASE IF NOT EXISTS multi_catalog;
USE multi_catalog;
CREATE TABLE `text_partitioned_one_column`(
`t_float` float,
`t_string` string,
`t_timestamp` timestamp)
PARTITIONED BY (
`t_int` int)
ROW FORMAT SERDE
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
WITH SERDEPROPERTIES (
'serialization.format' = ',',
'field.delim' = ',')
STORED AS INPUTFORMAT
'org.apache.hadoop.mapred.TextInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
LOCATION '/user/doris/suites/multi_catalog/text_partitioned_one_column';
msck repair table text_partitioned_one_column;

View File

@ -0,0 +1,12 @@
#!/bin/bash
set -x
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
## mkdir and put data to hdfs
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
# create table
hive -f "${CUR_DIR}/create_table.hql"

View File

@ -23,30 +23,29 @@
-- !q06 --
2023-01-03T00:00 100 0.3 test3
-- !q07 --
1994 50063846 1820677
1995 58220229 1820677
1995 66859335 1820677
1997 77350500 1820677
1995 98899109 1820677
1996 122310373 1820677
1996 138664326 1820677
1995 145803300 1820677
1998 187514084 1820677
1994 197627203 1820677
1993 216217095 1820677
1997 260737890 1820677
1998 279581856 1820677
1992 296560224 1820677
1993 306190854 1820677
1997 329189126 1820677
1992 389043491 1820677
1997 435247522 1820677
1998 449388167 1820677
1994 526241665 1820677
1998 533034534 1820677
1996 576018657 1820677
1997 582732039 1820677
-- !q01 --
0.1 test1 2023-01-01T00:00 \N
0.2 test2 2023-01-02T00:00 \N
0.3 test3 2023-01-03T00:00 100
-- !q02 --
0.1 test1 2023-01-01T00:00 \N
0.2 test2 2023-01-02T00:00 \N
-- !q03 --
0.3 test3 2023-01-03T00:00 100
-- !q04 --
2023-01-01T00:00 \N 0.1 test1
2023-01-02T00:00 \N 0.2 test2
2023-01-03T00:00 100 0.3 test3
-- !q05 --
2023-01-01T00:00 \N 0.1 test1
2023-01-02T00:00 \N 0.2 test2
-- !q06 --
2023-01-03T00:00 100 0.3 test3
-- !q01 --
0.1 test1 2023-01-01T00:00 \N
@ -72,30 +71,53 @@
-- !q06 --
2023-01-03T00:00 100 0.3 test3
-- !q07 --
1994 50063846 1820677
1995 58220229 1820677
1995 66859335 1820677
1997 77350500 1820677
1995 98899109 1820677
1996 122310373 1820677
1996 138664326 1820677
1995 145803300 1820677
1998 187514084 1820677
1994 197627203 1820677
1993 216217095 1820677
1997 260737890 1820677
1998 279581856 1820677
1992 296560224 1820677
1993 306190854 1820677
1997 329189126 1820677
1992 389043491 1820677
1997 435247522 1820677
1998 449388167 1820677
1994 526241665 1820677
1998 533034534 1820677
1996 576018657 1820677
1997 582732039 1820677
-- !q01 --
0.1 test1 2023-01-01T00:00 \N
0.2 test2 2023-01-02T00:00 \N
0.3 test3 2023-01-03T00:00 100
-- !q02 --
0.1 test1 2023-01-01T00:00 \N
0.2 test2 2023-01-02T00:00 \N
-- !q03 --
0.3 test3 2023-01-03T00:00 100
-- !q04 --
2023-01-01T00:00 \N 0.1 test1
2023-01-02T00:00 \N 0.2 test2
2023-01-03T00:00 100 0.3 test3
-- !q05 --
2023-01-01T00:00 \N 0.1 test1
2023-01-02T00:00 \N 0.2 test2
-- !q06 --
2023-01-03T00:00 100 0.3 test3
-- !q01 --
0.1 test1 2023-01-01T00:00 \N
0.2 test2 2023-01-02T00:00 \N
0.3 test3 2023-01-03T00:00 100
-- !q02 --
0.1 test1 2023-01-01T00:00 \N
0.2 test2 2023-01-02T00:00 \N
-- !q03 --
0.3 test3 2023-01-03T00:00 100
-- !q04 --
2023-01-01T00:00 \N 0.1 test1
2023-01-02T00:00 \N 0.2 test2
2023-01-03T00:00 100 0.3 test3
-- !q05 --
2023-01-01T00:00 \N 0.1 test1
2023-01-02T00:00 \N 0.2 test2
-- !q06 --
2023-01-03T00:00 100 0.3 test3
-- !q01 --
0.1 test1 2023-01-01T00:00 \N

View File

@ -35,3 +35,39 @@ guangzhou 2
hangzhou 2
shanghai 2
-- !01 --
1 Tom 48 shanghai male 20230101
2 Jerry 35 guangzhou male 20230101
3 Frank 25 hangzhou male 20230101
4 Ada 22 beijing female 20230101
5 Jason 46 shanghai male 20230102
6 Andy 38 guangzhou male 20230102
7 Sam 29 hangzhou male 20230102
8 Chloea 18 beijing female 20230102
-- !02 --
8
-- !03 --
guangzhou 2
hangzhou 2
shanghai 2
-- !01 --
1 Tom 48 shanghai male 20230101
2 Jerry 35 guangzhou male 20230101
3 Frank 25 hangzhou male 20230101
4 Ada 22 beijing female 20230101
5 Jason 46 shanghai male 20230102
6 Andy 38 guangzhou male 20230102
7 Sam 29 hangzhou male 20230102
8 Chloea 18 beijing female 20230102
-- !02 --
8
-- !03 --
guangzhou 2
hangzhou 2
shanghai 2

View File

@ -83,3 +83,87 @@ beijing at beijing in china
Boston at Boston in 美利坚合众国
哈尔滨 at 哈尔滨 in 中华人民共和国
-- !q01 --
1 han chi
2 bei chi
3 杭州 中华人
4 Bos 美利坚
5 哈尔滨 中华人
-- !q02 --
han at han in chi
bei at bei in chi
杭州 at 杭州 in 中华人
Bos at Bos in 美利坚
哈尔滨 at 哈尔滨 in 中华人
-- !q01 --
1 han chi
2 bei chi
3 杭州 中华人
4 Bos 美利坚
5 哈尔滨 中华人
-- !q02 --
han at han in chi
bei at bei in chi
杭州 at 杭州 in 中华人
Bos at Bos in 美利坚
哈尔滨 at 哈尔滨 in 中华人
-- !q01 --
1 han chi
2 bei chi
3 杭州 中华人
4 Bos 美利坚
5 哈尔滨 中华人
-- !q02 --
han at han in chi
bei at bei in chi
杭州 at 杭州 in 中华人
Bos at Bos in 美利坚
哈尔滨 at 哈尔滨 in 中华人
-- !q01 --
1 hangzhou china
2 beijing china
3 杭州 中华人民共和国
4 Boston 美利坚合众国
5 哈尔滨 中华人民共和国
-- !q02 --
hangzhou at hangzhou in china
beijing at beijing in china
杭州 at 杭州 in 中华人民共和国
Boston at Boston in 美利坚合众国
哈尔滨 at 哈尔滨 in 中华人民共和国
-- !q01 --
1 hangzhou china
2 beijing china
3 杭州 中华人民共和国
4 Boston 美利坚合众国
5 哈尔滨 中华人民共和国
-- !q02 --
hangzhou at hangzhou in china
beijing at beijing in china
杭州 at 杭州 in 中华人民共和国
Boston at Boston in 美利坚合众国
哈尔滨 at 哈尔滨 in 中华人民共和国
-- !q01 --
1 hangzhou china
2 beijing china
3 杭州 中华人民共和国
4 Boston 美利坚合众国
5 哈尔滨 中华人民共和国
-- !q02 --
hangzhou at hangzhou in china
beijing at beijing in china
杭州 at 杭州 in 中华人民共和国
Boston at Boston in 美利坚合众国
哈尔滨 at 哈尔滨 in 中华人民共和国

View File

@ -15,12 +15,16 @@
// specific language governing permissions and limitations
// under the License.
suite("test_external_catalog_hive_partition", "p2,external,hive,external_remote,external_remote_hive") {
String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {
String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost")
String extHiveHmsPort = context.config.otherConfigs.get("extHiveHmsPort")
String catalog_name = "test_external_catalog_hive_partition"
suite("test_external_catalog_hive_partition", "p0,external,hive,external_docker,external_docker_hive") {
String enabled = context.config.otherConfigs.get("enableHiveTest")
if (enabled == null || !enabled.equalsIgnoreCase("true")) {
logger.info("disable Hive test.")
return;
}
for (String hivePrefix : ["hive2", "hive3"]) {
String extHiveHmsHost = context.config.otherConfigs.get("externalEnvIp")
String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + "HmsPort")
String catalog_name = "${hivePrefix}_test_external_catalog_hive_partition"
sql """drop catalog if exists ${catalog_name};"""
sql """
@ -39,7 +43,7 @@ suite("test_external_catalog_hive_partition", "p2,external,hive,external_remote,
qt_q04 """ select * from multi_catalog.parquet_partitioned_columns order by t_float """
qt_q05 """ select * from multi_catalog.parquet_partitioned_columns where t_int is null order by t_float """
qt_q06 """ select * from multi_catalog.parquet_partitioned_columns where t_int is not null order by t_float """
qt_q07 """ select o_orderyear, o_orderkey, o_custkey from multi_catalog.orders_par_parquet where o_custkey=1820677 order by o_orderkey """
//qt_q07 """ select o_orderyear, o_orderkey, o_custkey from multi_catalog.orders_par_parquet where o_custkey=1820677 order by o_orderkey """
}
// test orc format
def q01_orc = {
@ -49,7 +53,7 @@ suite("test_external_catalog_hive_partition", "p2,external,hive,external_remote,
qt_q04 """ select * from multi_catalog.orc_partitioned_columns order by t_float """
qt_q05 """ select * from multi_catalog.orc_partitioned_columns where t_int is null order by t_float """
qt_q06 """ select * from multi_catalog.orc_partitioned_columns where t_int is not null order by t_float """
qt_q07 """ select o_orderyear, o_orderkey, o_custkey from multi_catalog.orders_par_orc where o_custkey=1820677 order by o_orderkey """
//qt_q07 """ select o_orderyear, o_orderkey, o_custkey from multi_catalog.orders_par_orc where o_custkey=1820677 order by o_orderkey """
}
// test text format
def q01_text = {

View File

@ -15,12 +15,16 @@
// specific language governing permissions and limitations
// under the License.
suite("test_hive_statistic_auto", "p2,external,hive,external_remote,external_remote_hive") {
String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {
String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost")
String extHiveHmsPort = context.config.otherConfigs.get("extHiveHmsPort")
String catalog_name = "test_hive_statistic_auto"
suite("test_hive_statistic_auto", "p0,external,hive,external_docker,external_docker_hive") {
String enabled = context.config.otherConfigs.get("enableHiveTest")
if (enabled == null || !enabled.equalsIgnoreCase("true")) {
logger.info("disable Hive test.")
return;
}
for (String hivePrefix : ["hive2", "hive3"]) {
String extHiveHmsHost = context.config.otherConfigs.get("externalEnvIp")
String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + "HmsPort")
String catalog_name = "${hivePrefix}_test_hive_statistic_auto"
sql """drop catalog if exists ${catalog_name};"""
sql """
create catalog if not exists ${catalog_name} properties (

View File

@ -15,17 +15,20 @@
// specific language governing permissions and limitations
// under the License.
suite("test_hive_statistic_clean", "p2,external,hive,external_remote,external_remote_hive") {
String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {
String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost")
String extHiveHmsPort = context.config.otherConfigs.get("extHiveHmsPort")
String catalog_name = "test_hive_statistic_clean"
suite("test_hive_statistic_clean", "p0,external,hive,external_docker,external_docker_hive") {
String enabled = context.config.otherConfigs.get("enableHiveTest")
if (enabled == null || !enabled.equalsIgnoreCase("true")) {
logger.info("disable Hive test.")
return;
}
for (String hivePrefix : ["hive2", "hive3"]) {
String extHiveHmsHost = context.config.otherConfigs.get("externalEnvIp")
String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + "HmsPort")
String catalog_name = "${hivePrefix}_test_hive_statistic_clean"
sql """drop catalog if exists ${catalog_name};"""
sql """
create catalog if not exists ${catalog_name} properties (
'type'='hms',
'hadoop.username' = 'hadoop',
'hive.metastore.uris' = 'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
);
"""
@ -68,6 +71,7 @@ suite("test_hive_statistic_clean", "p2,external,hive,external_remote,external_re
assertEquals(result[0][7], "1")
assertEquals(result[0][8], "7")
/*
sql """drop expired stats"""
result = sql """show column stats `statistics` (lo_quantity)"""
assertEquals(result.size(), 1)
@ -101,6 +105,7 @@ suite("test_hive_statistic_clean", "p2,external,hive,external_remote,external_re
assertEquals(result[0][6], "4.0")
assertEquals(result[0][7], "1")
assertEquals(result[0][8], "7")
*/
def ctlId
result = sql """show catalogs"""
@ -111,8 +116,9 @@ suite("test_hive_statistic_clean", "p2,external,hive,external_remote,external_re
}
}
sql """drop catalog ${catalog_name}"""
sql """drop expired stats"""
// sql """drop catalog ${catalog_name}"""
// sql """drop expired stats"""
sql """drop stats `statistics`"""
result = sql """select * from internal.__internal_schema.column_statistics where catalog_id=${ctlId}"""
assertEquals(result.size(), 0)

View File

@ -0,0 +1,57 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
suite("test_mixed_par_locations", "p0,external,hive,external_docker,external_docker_hive") {
def formats = ["_parquet", "_orc"]
def q1 = """select * from test_mixed_par_locationsSUFFIX order by id;"""
def q2 = """select count(id) from test_mixed_par_locationsSUFFIX;"""
def q3 = """select city, count(*) from test_mixed_par_locations_parquet where sex = 'male' group by city order by city;"""
String enabled = context.config.otherConfigs.get("enableHiveTest")
if (enabled == null || !enabled.equalsIgnoreCase("true")) {
logger.info("disable Hive test.")
return;
}
for (String hivePrefix : ["hive2", "hive3"]) {
String extHiveHmsHost = context.config.otherConfigs.get("externalEnvIp")
String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + "HmsPort")
String catalog_name = "${hivePrefix}_test_mixed_par_locations"
sql """drop catalog if exists ${catalog_name};"""
sql """
create catalog if not exists ${catalog_name} properties (
'type'='hms',
'hive.metastore.uris' = 'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
);
"""
logger.info("catalog " + catalog_name + " created")
sql """switch ${catalog_name};"""
logger.info("switched to catalog " + catalog_name)
sql """use multi_catalog;"""
logger.info("use multi_catalog")
for (String format in formats) {
logger.info("Process format " + format)
qt_01 q1.replace("SUFFIX", format)
qt_02 q2.replace("SUFFIX", format)
qt_03 q3.replace("SUFFIX", format)
}
sql """drop catalog if exists ${catalog_name}"""
}
}

View File

@ -15,12 +15,16 @@
// specific language governing permissions and limitations
// under the License.
suite("test_truncate_char_or_varchar_columns", "p2,external,hive,external_remote,external_remote_hive") {
String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {
String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost")
String extHiveHmsPort = context.config.otherConfigs.get("extHiveHmsPort")
String catalog_name = "test_truncate_char_or_varchar_columns"
suite("test_truncate_char_or_varchar_columns", "p0,external,hive,external_docker,external_docker_hive") {
String enabled = context.config.otherConfigs.get("enableHiveTest")
if (enabled == null || !enabled.equalsIgnoreCase("true")) {
logger.info("disable Hive test.")
return;
}
for (String hivePrefix : ["hive2", "hive3"]) {
String extHiveHmsHost = context.config.otherConfigs.get("externalEnvIp")
String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + "HmsPort")
String catalog_name = "${hivePrefix}_test_truncate_char_or_varchar_columns"
sql """drop catalog if exists ${catalog_name};"""
sql """

View File

@ -1,62 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
suite("test_mixed_par_locations", "p2,external,hive,external_remote,external_remote_hive") {
def formats = ["_parquet", "_orc"]
def q1 = """select * from test_mixed_par_locationsSUFFIX order by id;"""
def q2 = """select count(id) from test_mixed_par_locationsSUFFIX;"""
def q3 = """select city, count(*) from test_mixed_par_locations_parquet where sex = 'male' group by city order by city;"""
String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {
try {
String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost")
String extHiveHmsPort = context.config.otherConfigs.get("extHiveHmsPort")
String extAk = context.config.otherConfigs.get("extAk");
String extSk = context.config.otherConfigs.get("extSk");
String extS3Endpoint = context.config.otherConfigs.get("extS3Endpoint");
String extS3Region = context.config.otherConfigs.get("extS3Region");
String catalog_name = "test_mixed_par_locations"
sql """drop catalog if exists ${catalog_name};"""
sql """
create catalog if not exists ${catalog_name} properties (
'type'='hms',
'hive.metastore.uris' = 'thrift://${extHiveHmsHost}:${extHiveHmsPort}',
'cos.access_key' = '${extAk}',
'cos.secret_key' = '${extSk}',
'cos.endpoint' = '${extS3Endpoint}'
);
"""
logger.info("catalog " + catalog_name + " created")
sql """switch ${catalog_name};"""
logger.info("switched to catalog " + catalog_name)
sql """use multi_catalog;"""
logger.info("use multi_catalog")
for (String format in formats) {
logger.info("Process format " + format)
qt_01 q1.replace("SUFFIX", format)
qt_02 q2.replace("SUFFIX", format)
qt_03 q3.replace("SUFFIX", format)
}
sql """drop catalog if exists ${catalog_name}"""
} finally {
}
}
}