From bf3ea1839c6df9b4bb6d40402aced1f8fe5301ca Mon Sep 17 00:00:00 2001 From: Jibing-Li <64681310+Jibing-Li@users.noreply.github.com> Date: Thu, 4 Jul 2024 11:19:31 +0800 Subject: [PATCH] [test]Mv external p2 test case to p0. (#37070) (#37140) backport: https://github.com/apache/doris/pull/37070 --- .../orc_partitioned_columns/create_table.hql | 20 +++ .../orc_partitioned_columns/data.tar.gz | Bin 0 -> 722 bytes .../orc_partitioned_columns/run.sh | 12 ++ .../create_table.hql | 20 +++ .../orc_partitioned_one_column/data.tar.gz | Bin 0 -> 1064 bytes .../orc_partitioned_one_column/run.sh | 12 ++ .../create_table.hql | 20 +++ .../parquet_partitioned_columns/data.tar.gz | Bin 0 -> 639 bytes .../parquet_partitioned_columns/run.sh | 12 ++ .../create_table.hql | 20 +++ .../data.tar.gz | Bin 0 -> 729 bytes .../parquet_partitioned_one_column/run.sh | 12 ++ .../create_table.hql | 22 ++++ .../test_mixed_par_locations_orc/data.tar.gz | Bin 0 -> 1325 bytes .../test_mixed_par_locations_orc/run.sh | 12 ++ .../create_table.hql | 22 ++++ .../data.tar.gz | Bin 0 -> 952 bytes .../test_mixed_par_locations_parquet/run.sh | 12 ++ .../create_table.hql | 18 +++ .../data.tar.gz | Bin 0 -> 781 bytes .../run.sh | 12 ++ .../create_table.hql | 18 +++ .../data.tar.gz | Bin 0 -> 611 bytes .../run.sh | 12 ++ .../create_table.hql | 18 +++ .../data.tar.gz | Bin 0 -> 316 bytes .../run.sh | 12 ++ .../text_partitioned_columns/create_table.hql | 21 ++++ .../text_partitioned_columns/data.tar.gz | Bin 0 -> 410 bytes .../text_partitioned_columns/run.sh | 12 ++ .../create_table.hql | 21 ++++ .../text_partitioned_one_column/data.tar.gz | Bin 0 -> 321 bytes .../text_partitioned_one_column/run.sh | 12 ++ .../test_external_catalog_hive_partition.out | 118 +++++++++++------- .../hive/test_mixed_par_locations.out | 36 ++++++ .../test_truncate_char_or_varchar_columns.out | 84 +++++++++++++ ...est_external_catalog_hive_partition.groovy | 20 +-- .../hive/test_hive_statistic_auto.groovy | 16 ++- .../hive/test_hive_statistic_clean.groovy | 24 ++-- .../hive/test_mixed_par_locations.groovy | 57 +++++++++ ...st_truncate_char_or_varchar_columns.groovy | 16 ++- .../hive/test_mixed_par_locations.groovy | 62 --------- 42 files changed, 646 insertions(+), 139 deletions(-) create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/create_table.hql create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/data.tar.gz create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/run.sh create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/create_table.hql create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/data.tar.gz create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/run.sh create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/create_table.hql create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/data.tar.gz create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/run.sh create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/create_table.hql create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/data.tar.gz create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/run.sh create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/create_table.hql create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/data.tar.gz create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/run.sh create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/create_table.hql create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/data.tar.gz create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/run.sh create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/create_table.hql create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/data.tar.gz create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/run.sh create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/create_table.hql create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/data.tar.gz create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/run.sh create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/create_table.hql create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/data.tar.gz create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/run.sh create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/create_table.hql create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/data.tar.gz create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/run.sh create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/create_table.hql create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/data.tar.gz create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/run.sh rename regression-test/data/{external_table_p2 => external_table_p0}/hive/test_external_catalog_hive_partition.out (51%) rename regression-test/data/{external_table_p2 => external_table_p0}/hive/test_mixed_par_locations.out (53%) rename regression-test/data/{external_table_p2 => external_table_p0}/hive/test_truncate_char_or_varchar_columns.out (51%) rename regression-test/suites/{external_table_p2 => external_table_p0}/hive/test_external_catalog_hive_partition.groovy (82%) rename regression-test/suites/{external_table_p2 => external_table_p0}/hive/test_hive_statistic_auto.groovy (88%) rename regression-test/suites/{external_table_p2 => external_table_p0}/hive/test_hive_statistic_clean.groovy (88%) create mode 100644 regression-test/suites/external_table_p0/hive/test_mixed_par_locations.groovy rename regression-test/suites/{external_table_p2 => external_table_p0}/hive/test_truncate_char_or_varchar_columns.groovy (88%) delete mode 100644 regression-test/suites/external_table_p2/hive/test_mixed_par_locations.groovy diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/create_table.hql b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/create_table.hql new file mode 100644 index 0000000000..3cc9ce6703 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/create_table.hql @@ -0,0 +1,20 @@ +CREATE DATABASE IF NOT EXISTS multi_catalog; +USE multi_catalog; + +CREATE TABLE `orc_partitioned_columns`( + `t_timestamp` timestamp) +PARTITIONED BY ( + `t_int` int, + `t_float` float, + `t_string` string) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.ql.io.orc.OrcSerde' +WITH SERDEPROPERTIES ( + 'serialization.format' = '1') +STORED AS INPUTFORMAT + 'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat' +LOCATION '/user/doris/suites/multi_catalog/orc_partitioned_columns'; + +msck repair table orc_partitioned_columns; diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/data.tar.gz b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/data.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..ea87f4489b0c968338e45b0decad0b6ceb5b7fc7 GIT binary patch literal 722 zcmb2|=3od7Xi8&X_V(2Aypp~^*TBHQz}z&H;c}Y5tpg`moSYk*7Ix0;Z0uawxUg|% zQ{%#kJ0~hOH);0XWo27>RJGI9VR=fbTI#c<&r==+v4>2X)^=QGh5dss0*smqew^3$ z>eKho(K@Mf^2~W}-%kH?=QYos)m|31OF{D=Z*RlL{JgXz8>#$P3pYN@_;Z<6K=A*^k8<-8*L~Dmm)Ez@PVpmSpQGYD#d-hg zK7Mpjh&+J}F~i z;X5Dmd3=Cpsq%95||WqirqN-MbE{-6izYnm@V_u`-(q9ydf6#&*3@X;w48j9DdpGUCGDIxoct|vYBiN6 zmdcL~ZCi84)kimmlZU6);U&vPVPSb&FM-_#4;^7%WRT#$3k6r_bot*~Z4Gs>UYATgkj_nf{qG4*gyHB?/dev/null && pwd)" + +## mkdir and put data to hdfs +cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz +hadoop fs -mkdir -p /user/doris/suites/multi_catalog/ +hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/ + +# create table +hive -f "${CUR_DIR}/create_table.hql" diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/create_table.hql b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/create_table.hql new file mode 100644 index 0000000000..21e42866ab --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/create_table.hql @@ -0,0 +1,20 @@ +CREATE DATABASE IF NOT EXISTS multi_catalog; +USE multi_catalog; + +CREATE TABLE `orc_partitioned_one_column`( + `t_float` float, + `t_string` string, + `t_timestamp` timestamp) +PARTITIONED BY ( + `t_int` int) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.ql.io.orc.OrcSerde' +WITH SERDEPROPERTIES ( + 'serialization.format' = '1') +STORED AS INPUTFORMAT + 'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat' +LOCATION '/user/doris/suites/multi_catalog/orc_partitioned_one_column'; + +msck repair table orc_partitioned_one_column; diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/data.tar.gz b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/data.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..91dd8646e79a8fdd4f923c58f257a19cc21edd37 GIT binary patch literal 1064 zcmb2|=3sc^-;~C{?Cq)Jc_n>;u7QDpfw^fY!{s!ATL(_CI5{^qE$p1x+1S~+b7yB~ zQ{%#kJ2zT3H);0XWo27>RJGI9VR=fbTI#c<&r==+v4>2X)^=QGh5dss0*smqew^3$ z>eKho(K@Mf^2~W}-%fU+k0*axF5Ouf;wiP{=>DBejT0L;cD~H`bD3G7>;I%5XZ8q1 zJ2NwL1u=d6n4goDk;c|j?_6Ab_~MV{>SHP=U!m(Pw`)``GI8ifO}_-u$2FqJzIM5(fu=5G8+8*IMdVA z;Lzjy_9r4IG&XkocC*uu#_&oK6l;H;?zr!RM2Jboj(+3<31 zW9!eJEuQ=l`adnsJhGj9?wjSNiz(Y046HYI6h3B~`1NFKX7?+uJCh!*x-;pDgczgm zqpEpd3>vi`GRoDTT3hfk^V^r47xz|eDm^}tfluD^JYU=5bz;Wf{sc@Ey1mIP>suE$ zTj%-b`EOI6Bwk2PshFei?eTNuHI;%hn{Nd_{IF2k-s~n@iPr}Ic}EoHovn0a5qjjK^oU>B&UV%u z!NSDouy;z!59=1cUcYjqWWTKt%S<@5eV%cy_sVhm}Ef6kX<(EM-AW^hB{`G3s^HGv5U+w|S%E8l!PfyH2E zw8Hah^N4-^VNU{VHL4mK53*R^-rcyVcJ|ND*{h=Xv?XpxSV&5wot=|;ocHA0V+(rD z2^`=)^Ma>nlly_0a~czcqSW3qr50HxsM#N6IrZe)0-tjXXP;b_+Yl4=*x>Z`#hYht z%wFc~a{X(s*s;cMVyD`EZ9CO=h(~rq&N=3D&m`^@2KY{T`7x<0a`CQDD_$9@Gp<+5 z5#LkSo8Ke2k;pxpNH9aySdlU1NCu zhMn_(k(%Fw8!cjo+14wI2?a-%xm1|zOny*y<-wKc4b9E98-LV@KR>*=JD$5!tHr=X jK5b+AgBOnHPQS~jtQ6^K3N+yIU}SJ$?{{GkVPF6Ntv3TO literal 0 HcmV?d00001 diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/run.sh b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/run.sh new file mode 100644 index 0000000000..f3136eaa20 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/run.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -x + +CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" + +## mkdir and put data to hdfs +cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz +hadoop fs -mkdir -p /user/doris/suites/multi_catalog/ +hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/ + +# create table +hive -f "${CUR_DIR}/create_table.hql" diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/create_table.hql b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/create_table.hql new file mode 100644 index 0000000000..8df497e249 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/create_table.hql @@ -0,0 +1,20 @@ +CREATE DATABASE IF NOT EXISTS multi_catalog; +USE multi_catalog; + +CREATE TABLE `parquet_partitioned_columns`( + `t_timestamp` timestamp) +PARTITIONED BY ( + `t_int` int, + `t_float` float, + `t_string` string) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' +WITH SERDEPROPERTIES ( + 'serialization.format' = '1') +STORED AS INPUTFORMAT + 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat' +LOCATION '/user/doris/suites/multi_catalog/parquet_partitioned_columns'; + +msck repair table parquet_partitioned_columns; diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/data.tar.gz b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/data.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..c212294940ecde69da38564899d3224bc41945a5 GIT binary patch literal 639 zcmb2|=3uDzZ%SieemmoC-Yo->hPxt)DpePyl_%^t;k#PQDowrYlGo!!m#jor-dfiA z_L`T+rnE=Pm6yHWV0&CHl|PI<;Cc?1fQOCZPvH{{yk`IW&UL=Ayi>{B#j2PRyy|>( z_C=*;om;9_YGRHpH?BBY9cs&+<*=M}efBw>N9RIxo<_fDd%XUzLGBz0F&6E*^q=`MvRcsKi=2YUaUekkhyoYv%fM>%rk zIcNVi*R}bcFCn4%-{{5#`{eC=_08YBJ-hq$`SRLtAAbe!zy7|q;`|M^#EED3tNw{T zvEgZHvi}t+E5LBBehvf214EmA9o7kwVY9w2p1${F`PLt+XI03_H8`&@)yrG`{o&&C ziw^F!FIvEs@Y7{=xaH%A9xffmRSc$UFXou|Yx_pzT_{?4$)m*iV*Kw?1-a!@&9l?{ z+_N66+1~R!eDTVEFZ`7SFIc^cDcex@?A)P+%Y~1Zc0B*F?X^<>k9pP{Z?q4W)a1+x zZ!DCoNK0UEejxKF?%lcn#mdb!@Ba&V9=M_TfAw#BO_8N_&;R&`{G0BW^)G+zng45D z#PU4P{MR&I{b#?d3EL4|<< E09|uQF#rGn literal 0 HcmV?d00001 diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/run.sh b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/run.sh new file mode 100644 index 0000000000..f3136eaa20 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/run.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -x + +CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" + +## mkdir and put data to hdfs +cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz +hadoop fs -mkdir -p /user/doris/suites/multi_catalog/ +hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/ + +# create table +hive -f "${CUR_DIR}/create_table.hql" diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/create_table.hql b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/create_table.hql new file mode 100644 index 0000000000..ad839449a0 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/create_table.hql @@ -0,0 +1,20 @@ +CREATE DATABASE IF NOT EXISTS multi_catalog; +USE multi_catalog; + +CREATE TABLE `parquet_partitioned_one_column`( + `t_float` float, + `t_string` string, + `t_timestamp` timestamp) +PARTITIONED BY ( + `t_int` int) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' +WITH SERDEPROPERTIES ( + 'serialization.format' = '1') +STORED AS INPUTFORMAT + 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat' +LOCATION '/user/doris/suites/multi_catalog/parquet_partitioned_one_column'; + +msck repair table parquet_partitioned_one_column; diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/data.tar.gz b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/data.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..193de4c019130438ad3e1e59e3a201f57c8660bd GIT binary patch literal 729 zcmb2|=3w~i*ObP<{5I^Y-)#qhZL*W2nwXv5Xie04!Yw9pYu(;|jG79HQ@cC@qeG`8 zhxz4*hkGtKsrLSYT=nATcJBPPm(5rF(vdx`IAe-HhV!ab|DL>gQd3wwb>FH}b=lM#j3$SRbdn({t-)&9Q&R-mLrRs9W zv7e36689#g7xXRs>;B;QpZQg9%kMTAZ?JiE(Np__q{*7POE1i#>tp1~AKov>?VZu) zIk)G3h{M*gTf1m#r7tVL{ z^xp9Hr-wgvKkxC?>fZhN{SCIWKjyP8J5*nk=5}liV{u<<@6-DVSL7bvU#j=PUhjU$ z$N8W6O{6>=w1Uhh^31gfy3+LL?Ve52+n-xoluddt{q)hp6-;|Llr+Cv%9ZSxDWNQC zz@Gd6uIup@368Z56O*^|e=hYFxORU%-*5hf{F&1)1kH)}`=q}lUEjShCd<@NU2$cE zTY8B3+@WKg$62>9D7S9r%V)m5LGRSz?#go&S6X)P;bAWy}Bx5*Rx4q-Y@F0Gk#U%i#}ZQonojiQegE7mNSIwd>){EGui=IE=;Z|V>a zSjZ%(dGfwTN}ZtYv6XJAb&tEw<;F~`3_RiM{&cByNZcX!A1l^>QWjqrzCOH?^Y@MG wtCwc4bN+7?_f+*yWD9$Ym#}|-y+_2U5Ig5K9!Vn~c6&yy-+R_FNH8z}0AM0-=Kufz literal 0 HcmV?d00001 diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/run.sh b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/run.sh new file mode 100644 index 0000000000..f3136eaa20 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/run.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -x + +CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" + +## mkdir and put data to hdfs +cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz +hadoop fs -mkdir -p /user/doris/suites/multi_catalog/ +hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/ + +# create table +hive -f "${CUR_DIR}/create_table.hql" diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/create_table.hql b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/create_table.hql new file mode 100644 index 0000000000..9521cd80fb --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/create_table.hql @@ -0,0 +1,22 @@ +CREATE DATABASE IF NOT EXISTS multi_catalog; +USE multi_catalog; + +CREATE TABLE `test_mixed_par_locations_orc`( + `id` int, + `name` string, + `age` int, + `city` string, + `sex` string) +PARTITIONED BY ( + `par` string) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.ql.io.orc.OrcSerde' +WITH SERDEPROPERTIES ( + 'serialization.format' = '1') +STORED AS INPUTFORMAT + 'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat' +LOCATION '/user/doris/suites/multi_catalog/test_mixed_par_locations_orc'; + +msck repair table test_mixed_par_locations_orc; diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/data.tar.gz b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/data.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..047a4e85bcd398dfbd8c553b8467a00752c14938 GIT binary patch literal 1325 zcmb2|=3tNuX-Z>Y_V(2Aypp~^*TBHQz}z&H;c}Y5tpg`moSYk*7Ix0;Z0uY)v2kN# zQ{%#kI~Q6uH);0XWo27>RJGI9VR=fbTI#c<&r==+v4>2X)^=QGh5dss0*smqe&~DZ z>UEw!yT-$}Q{O|U>-P44lHn(rJK?uDEI%L{boU%hm-R+VUb zojG0RSrhv96tVq*iDWWNb9JS8i?S@s*>dv1H~$BWHzTvPw-3^};&@cGwoW(KG4 zxhZ*ImG$k$Rgx025)wBWV~&Y1vaxL_?K#PFSwh0eKl-z�P`;9pB6ow97AF+{tw5 z{#()hvr{8B-#JwL_f~mt_v^cQroP|jCV#6;e*JA;?DkTvJ$^G*%|83@^#0G+m3R6s z59u@S>ijcv%U6T%%nP<}kyE$zdEis=;YDFn!llP73)zKPEQGCu=5gElaMXM;Sb64n zuuA9qCqGv*^2Elq%FlE*7u)|J;Ny*?11GXJDlh#w<@7n}$<00H(zW}pWL-?v`SNqp z#t9o&`5a!lMnbXO$8B$J^twBiM(QqsJ`Tl-m#1{<@MQMz?2DD&$tT&d<6hmStwv84 zHF!*Fc6%|gnO#@?`OjFZiVsO{o4y>o#NGGw8~ZQ&od3%IencMpeJyG4g8Hu2QOnln-U8Qd=wF%Wrm z+hne7Rivr=lCOs@w_B9G-IMgi^vTm*RfjF*pI_tn$o}Dftabh2dqH1c2hX_Xf7Yh- zRMPYIRj%IaZSFrcR*_$o>fv;zOvD{|JW#DCE!l7Yk_#s0?dV_gwVj7#8 zZztnfX=TO#GLuhXG{;0oHOI~oQad?bFE}@I&&b@ zf#-h?*Xjnz16mKJDDL30NO<_jSy29GpG| z4ObZb8%qydUUJCzo_6EZhWj&DGWNtQ=e9HQzZ2b$#F=r3e4_z<38ZMs+?ojNW z_^_}k;wb0T#p(R7a%Kpvcv5Sy@8;Fww9VDWuXN5ej&|N{(i^)wwcy;CG&5@fPKReQ6lxCP~xG$3@=T=!wQde3{RZq{H$&$A$PHkbnxH!J# zV4GavZwAlT-}Kt$#4{f@v>lF1(*Coo/dev/null && pwd)" + +## mkdir and put data to hdfs +cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz +hadoop fs -mkdir -p /user/doris/suites/multi_catalog/ +hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/ + +# create table +hive -f "${CUR_DIR}/create_table.hql" diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/create_table.hql b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/create_table.hql new file mode 100644 index 0000000000..951b2f724a --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/create_table.hql @@ -0,0 +1,22 @@ +CREATE DATABASE IF NOT EXISTS multi_catalog; +USE multi_catalog; + +CREATE TABLE `test_mixed_par_locations_parquet`( + `id` int, + `name` string, + `age` int, + `city` string, + `sex` string) +PARTITIONED BY ( + `par` string) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' +WITH SERDEPROPERTIES ( + 'serialization.format' = '1') +STORED AS INPUTFORMAT + 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat' +LOCATION '/user/doris/suites/multi_catalog/test_mixed_par_locations_parquet'; + +msck repair table test_mixed_par_locations_parquet; diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/data.tar.gz b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/data.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..9994d85e4ebaf59e34c0cee237a8d4c4830272de GIT binary patch literal 952 zcmb2|=3rPG+?2+^{5Je_-em`oQkm_h`i)w?PdpeS7lpWLz5Fp#snxZ$CE%(ntIotl zo<_bgGRtrHW&UIk(-F`<(j_|o;PeeiC%f_=#2xRPYqoMykWxUc>Ey~gGpmo=eX}f_ z+?wP3W4D{;+K6dVTg&z@-E}iRIoR~F*~-k!ZJDb=E^D``t+*C%7oM_LA;nH6_5Moh zxYMO-YnxN6tJ9Bku1~)4PyL$9A7{Cit$oEuW^ZMFcKuFXbF1HV?t(A7jvuzKaY#E9 zzddwS_O#TrALkD*J1l?8-Rdgus=(TB?TKQ4bh$s8mu#N&(4K#`WPPsRkK2;}K5r;~ zaDV?Hskv6{YuqB|x9m@ozQ*~-_bkJ|`F4lpZ>S!xFMmDLjb)SHwd+<2NcF z8NU~`N_hCP_<}~UqMf^uRNIl2y92r}PI@7uSLl9Dkv;d>)>~%+r@gv!lYK(_?$jT$ z73&@LUv2((yKH-VU)i1brn)^7JyKfNym3(es=xbi%s!5{a>wPK#HgK~5hJBCy&x{~ z&F{&(r%&&UD48FZZQZ^{%;eSswQyFYd%~g@Gr68vm8kTW{;*wWKRaDl&fk2_2Z`l{ z6Zn4!e&H_K7*exSYr4WJRqk%r9;bv|AN1H$L)Xn?%-rbmVcWx#VfIhFo>a*wosws* zoZrl}?-j?Ym$mbcs+h`|PW`<(!j}1X$prnMAw_#mX`I?FBl02ixbsZD53yZL9!B$+ z-uZ0#AI!(KW7YBt8tarNY!LnC9oTZ{d}|Yr;?(H;8ucf2mL*nu{%m_)`1DxVgAKjU z3+o(j_O1Nrcs!hcr#wUKs;J1tsjJpqt?s)WvT~u}?QK3@@Idil@#^gs6e(&zu5sPAg| z)AK&tNQjN6jakJ`#l7V4x!6tCr~aRhbf36w&$Evw*5!(o*e;R(qVVysQO}nhZ_9p4 zaB8@poc?&R%HqU;IZ>@u(yum6@MQR~L7{(YL+6UVi47VqN{sTw0c=7O7_6P{Cr$d@ z!dBzRJA)&|>csULt>2uyKdgG${=CR;(3yL!LH=^)f%Wyy%nL@3=9D0A=$0~ literal 0 HcmV?d00001 diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/run.sh b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/run.sh new file mode 100644 index 0000000000..f3136eaa20 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/run.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -x + +CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" + +## mkdir and put data to hdfs +cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz +hadoop fs -mkdir -p /user/doris/suites/multi_catalog/ +hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/ + +# create table +hive -f "${CUR_DIR}/create_table.hql" diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/create_table.hql b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/create_table.hql new file mode 100644 index 0000000000..19cb03245a --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/create_table.hql @@ -0,0 +1,18 @@ +CREATE DATABASE IF NOT EXISTS multi_catalog; +USE multi_catalog; + +CREATE TABLE `test_truncate_char_or_varchar_columns_orc`( + `id` int, + `city` varchar(3), + `country` char(3)) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.ql.io.orc.OrcSerde' +WITH SERDEPROPERTIES ( + 'serialization.format' = '1') +STORED AS INPUTFORMAT + 'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat' +LOCATION '/user/doris/suites/multi_catalog/test_truncate_char_or_varchar_columns_orc'; + +msck repair table test_truncate_char_or_varchar_columns_orc; diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/data.tar.gz b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/data.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..78316632106695c0bb2f0f0ae046a83094c0eb20 GIT binary patch literal 781 zcmb2|=3vnBYf58a_V(2Aypp~^*TBHQz}z&H;c}Y5tpg`moSYk*7Ix0;Z0zjZ*|>3I zQ{%#kJ10suH);0XWo27>RJGI9VR=fbTI#c<&r==+v4>2X)^=QGh5dss0*smqe&~DZ z>UHXCYMu4=)A8)}Kj)#-bzZ+q-&=>tc=G3Buj0>LjZP<@EESpb`2L9-M@}S|{Cmmh z(No{?{t>UoCgv4~&m3V%|0B(`+1lh^?^&L+XBs}{C*>rZTi`HDH>F8X?*Bi>b4@%v zF*!~;GHV!lZaFM75H}8p>1b~b?dWaxZ18r{Yd9#YvF^lCu4{@a2M?}NIdb&bl=zrQ z^VN@h%XxQe`R(VP6)Xl%3=C|ZRZW;OZGyswm!3`;7dLy}d+s;qBcJ7gTc1+y{l0#6 zyIEaTrTV4#y+@uc`*?qP{=^(ON;t9_Li_wb1F@N81&<1*9W zuwb7TP*wim#c|CT?;H2tnHM~po%yf0@K@cs=jMm>Ouv49cY90LzKEU)7w;e5G+#~Q z-Sj?_l_&1%?Pq@RJk)=ZoZ1I9n~E<6jwUY)n=Uka3eT`#8R6oSd$Tb4-15Bl{_~^r z&R9$T-u+}duloNcxvve$#(Tf&Ozr*3EP0ewE%bf#f4k2=Rz5xYa$B)}jgRGj))x^= zFW<`++9@O{A}lZQ8|%KpH56{&hC>5idk@A-65@CE$T_REv_t^kIdrg?zko`W*kwr!X|UW z!zbLqp;wHf|2@CB#NMpy_NDIix9@KKY=30U#~H<5iU%Wpt4y(N_H=T4%dX6PyvB{u wt+S1dQB{piuD61D+cNz#XAY>S8cy)666t9QG~jY$6l7qq`fkg~z{kJ<0DAp+(*OVf literal 0 HcmV?d00001 diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/run.sh b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/run.sh new file mode 100644 index 0000000000..f3136eaa20 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/run.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -x + +CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" + +## mkdir and put data to hdfs +cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz +hadoop fs -mkdir -p /user/doris/suites/multi_catalog/ +hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/ + +# create table +hive -f "${CUR_DIR}/create_table.hql" diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/create_table.hql b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/create_table.hql new file mode 100644 index 0000000000..d038dbe4f5 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/create_table.hql @@ -0,0 +1,18 @@ +CREATE DATABASE IF NOT EXISTS multi_catalog; +USE multi_catalog; + +CREATE TABLE `test_truncate_char_or_varchar_columns_parquet`( + `id` int, + `city` varchar(3), + `country` char(3)) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' +WITH SERDEPROPERTIES ( + 'serialization.format' = '1') +STORED AS INPUTFORMAT + 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat' +LOCATION '/user/doris/suites/multi_catalog/test_truncate_char_or_varchar_columns_parquet'; + +msck repair table test_truncate_char_or_varchar_columns_parquet; diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/data.tar.gz b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/data.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..1e65dca1bb32d8101f210d92f64f179b0b506999 GIT binary patch literal 611 zcmb2|=3w~d+myz@{5JS>-em`o(irnn=kOK6szEAyGDB1}>U!RWr7&wx*yZr2!L;t` zIw5O|GL?Xje(E7l|iu3Y2r^dqB=fV^P){Rg-FR(S|aX$sx(Y6E93_f55UPK`z;KYG|Lhi)goq`uQE}L;}@S7^iG{ z`)b|xt0n3`8)xVKS3CbfM*W+7fWMsBEGo9t%qQ@DO_2cY{r=?rW zIN!O1Rs6bMl=atD$8+bu^d|cY_SOrJ`R`?~(3bGO5-*Tqb0t2A#g*}I>cr`qdB0@; zG5z29Z~MBtVgjuOho%Q`dI)aZ%4N*2ExL>$*|Koa!t=2jJrX5sRgYK%tuxYfrj&MU zFFpTp@u{RgTg2sUrE4l|r!}`0KH10+v;9u)j&DjA4jxSW^Dgwzy5Dmzd>3n+yTpMf zYw__^=fzdyRy{Nusyiv8FB z|By*rcr10x9Ffy0N6#&f)Os=9^4O|n&%beZtjoJ+@q9eJ3AwQNN)wZPn{Now~v1Ii=21I#fL_wdt%=yd ZoBh5%N7=nk#&X$Fe)$DV6%2d~3;+VkB98z7 literal 0 HcmV?d00001 diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/run.sh b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/run.sh new file mode 100644 index 0000000000..f3136eaa20 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/run.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -x + +CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" + +## mkdir and put data to hdfs +cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz +hadoop fs -mkdir -p /user/doris/suites/multi_catalog/ +hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/ + +# create table +hive -f "${CUR_DIR}/create_table.hql" diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/create_table.hql b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/create_table.hql new file mode 100644 index 0000000000..c52bbf4a2d --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/create_table.hql @@ -0,0 +1,18 @@ +CREATE DATABASE IF NOT EXISTS multi_catalog; +USE multi_catalog; + +CREATE TABLE `test_truncate_char_or_varchar_columns_text`( + `id` int, + `city` varchar(3), + `country` char(3)) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +WITH SERDEPROPERTIES ( + 'serialization.format' = '1') +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION '/user/doris/suites/multi_catalog/test_truncate_char_or_varchar_columns_text'; + +msck repair table test_truncate_char_or_varchar_columns_text; diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/data.tar.gz b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/data.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..ce107f7438e6ee16ef545312986434bd035f49f4 GIT binary patch literal 316 zcmb2|=3of)Yf58aemmLL@34VL!|ScfKk#l4zI}6N$J1q*lUG>;JyJLlr^y`h$J1kh z(cyrVJ_|k-&$8IdmM2uotufWB^~81?{`Ws^e{cTn)Y*J4&ymA&$|0pqMoQgNHZmn0 z;!rwrP{O%I_xL933GXV_HK~Lt87=NGiE9KP=d}Y>5kd5X3vL_+s-HYWe(oZ(;tgB?W z`8sC4|K_Jg@iDJXZr*2g`MZ2+D2q(nMlRKeTEEg?OVZ1KDV@ES|JplYI#cc3S4VyS ceY>&d=Dz&BOP4N9{a?elg=5n+1|9|m0E(TPr2qf` literal 0 HcmV?d00001 diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/run.sh b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/run.sh new file mode 100644 index 0000000000..f3136eaa20 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/run.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -x + +CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" + +## mkdir and put data to hdfs +cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz +hadoop fs -mkdir -p /user/doris/suites/multi_catalog/ +hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/ + +# create table +hive -f "${CUR_DIR}/create_table.hql" diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/create_table.hql b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/create_table.hql new file mode 100644 index 0000000000..863155230f --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/create_table.hql @@ -0,0 +1,21 @@ +CREATE DATABASE IF NOT EXISTS multi_catalog; +USE multi_catalog; + +CREATE TABLE `text_partitioned_columns`( + `t_timestamp` timestamp) +PARTITIONED BY ( + `t_int` int, + `t_float` float, + `t_string` string) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +WITH SERDEPROPERTIES ( + 'serialization.format' = ',', + 'field.delim' = ',') +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION '/user/doris/suites/multi_catalog/text_partitioned_columns'; + +msck repair table text_partitioned_columns; diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/data.tar.gz b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/data.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..f70f44b00a4e1b24836c625ae0a76736572219bf GIT binary patch literal 410 zcmb2|=3o#GY)WHbemf)H`>=t)f$wjYzYq`5pI)!Nn1*ckOfB1V??8Y4 zn`6)9%%pC+rF>Tw;99ukXY9LKJ;tZbRo`V|2z$Nc)!9|-A*<_rm8JwPFwvO6$+1PF zs`Fh%oapIEn_B0eivBl8eEsH{`xf>&R<YmUgXt}@^@7~ObVtq zum1P=?1{VI<=qZn{P6DD&Z-$+AzqUUT z(V+ajzu}WeOFriT*Q/dev/null && pwd)" + +## mkdir and put data to hdfs +cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz +hadoop fs -mkdir -p /user/doris/suites/multi_catalog/ +hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/ + +# create table +hive -f "${CUR_DIR}/create_table.hql" diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/create_table.hql b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/create_table.hql new file mode 100644 index 0000000000..1eff2e0909 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/create_table.hql @@ -0,0 +1,21 @@ +CREATE DATABASE IF NOT EXISTS multi_catalog; +USE multi_catalog; + +CREATE TABLE `text_partitioned_one_column`( + `t_float` float, + `t_string` string, + `t_timestamp` timestamp) +PARTITIONED BY ( + `t_int` int) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +WITH SERDEPROPERTIES ( + 'serialization.format' = ',', + 'field.delim' = ',') +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION '/user/doris/suites/multi_catalog/text_partitioned_one_column'; + +msck repair table text_partitioned_one_column; diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/data.tar.gz b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/data.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..443e52b561f8649041f729f36f17d5f4004b10d9 GIT binary patch literal 321 zcmb2|=3qD&(3Hl&{B~-r_hAE(w(7U*Pw>z9`sB%Cg$l!hB*%~?Rn;rXCZBkda{9ph z{G9ey$NhK00zV5>>G^pG&)btb?ba6+3mpal&7)nbPIGCT-ql#RQq_Z1qL58Sc?x4N z`@QgkRjHSk-age`Y4Y>)ohxUHFQ;v8D>ZxmW7YnaH<$O%-4|WuJ^fn5<`c^$ce2~+ zK6`z=M!tOU{Q8p&j+_tw@;k1s_g34UF7x2G`NG}x3wK^SXa9*~?|;P?FXq26s+)1) z=aZzG`l%P*{?O~3{NvfI6^FOJcpuMv{oa3pH}Ce_7&!j5j*0fZ`QU!-=k0&o85px8 zit7Iq{}=c)aqc=DdBp`5Z132ZEp(3wwnUU{e3f+d@5{Z_Wy_EM-*5ZEbj!2&7i<4; j2fX-f&mz#J^l*{ajt`vQD{SWNKhN&rd&z@Agn/dev/null && pwd)" + +## mkdir and put data to hdfs +cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz +hadoop fs -mkdir -p /user/doris/suites/multi_catalog/ +hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/ + +# create table +hive -f "${CUR_DIR}/create_table.hql" diff --git a/regression-test/data/external_table_p2/hive/test_external_catalog_hive_partition.out b/regression-test/data/external_table_p0/hive/test_external_catalog_hive_partition.out similarity index 51% rename from regression-test/data/external_table_p2/hive/test_external_catalog_hive_partition.out rename to regression-test/data/external_table_p0/hive/test_external_catalog_hive_partition.out index c823189e68..aa1e48a439 100644 --- a/regression-test/data/external_table_p2/hive/test_external_catalog_hive_partition.out +++ b/regression-test/data/external_table_p0/hive/test_external_catalog_hive_partition.out @@ -23,30 +23,29 @@ -- !q06 -- 2023-01-03T00:00 100 0.3 test3 --- !q07 -- -1994 50063846 1820677 -1995 58220229 1820677 -1995 66859335 1820677 -1997 77350500 1820677 -1995 98899109 1820677 -1996 122310373 1820677 -1996 138664326 1820677 -1995 145803300 1820677 -1998 187514084 1820677 -1994 197627203 1820677 -1993 216217095 1820677 -1997 260737890 1820677 -1998 279581856 1820677 -1992 296560224 1820677 -1993 306190854 1820677 -1997 329189126 1820677 -1992 389043491 1820677 -1997 435247522 1820677 -1998 449388167 1820677 -1994 526241665 1820677 -1998 533034534 1820677 -1996 576018657 1820677 -1997 582732039 1820677 +-- !q01 -- +0.1 test1 2023-01-01T00:00 \N +0.2 test2 2023-01-02T00:00 \N +0.3 test3 2023-01-03T00:00 100 + +-- !q02 -- +0.1 test1 2023-01-01T00:00 \N +0.2 test2 2023-01-02T00:00 \N + +-- !q03 -- +0.3 test3 2023-01-03T00:00 100 + +-- !q04 -- +2023-01-01T00:00 \N 0.1 test1 +2023-01-02T00:00 \N 0.2 test2 +2023-01-03T00:00 100 0.3 test3 + +-- !q05 -- +2023-01-01T00:00 \N 0.1 test1 +2023-01-02T00:00 \N 0.2 test2 + +-- !q06 -- +2023-01-03T00:00 100 0.3 test3 -- !q01 -- 0.1 test1 2023-01-01T00:00 \N @@ -72,30 +71,53 @@ -- !q06 -- 2023-01-03T00:00 100 0.3 test3 --- !q07 -- -1994 50063846 1820677 -1995 58220229 1820677 -1995 66859335 1820677 -1997 77350500 1820677 -1995 98899109 1820677 -1996 122310373 1820677 -1996 138664326 1820677 -1995 145803300 1820677 -1998 187514084 1820677 -1994 197627203 1820677 -1993 216217095 1820677 -1997 260737890 1820677 -1998 279581856 1820677 -1992 296560224 1820677 -1993 306190854 1820677 -1997 329189126 1820677 -1992 389043491 1820677 -1997 435247522 1820677 -1998 449388167 1820677 -1994 526241665 1820677 -1998 533034534 1820677 -1996 576018657 1820677 -1997 582732039 1820677 +-- !q01 -- +0.1 test1 2023-01-01T00:00 \N +0.2 test2 2023-01-02T00:00 \N +0.3 test3 2023-01-03T00:00 100 + +-- !q02 -- +0.1 test1 2023-01-01T00:00 \N +0.2 test2 2023-01-02T00:00 \N + +-- !q03 -- +0.3 test3 2023-01-03T00:00 100 + +-- !q04 -- +2023-01-01T00:00 \N 0.1 test1 +2023-01-02T00:00 \N 0.2 test2 +2023-01-03T00:00 100 0.3 test3 + +-- !q05 -- +2023-01-01T00:00 \N 0.1 test1 +2023-01-02T00:00 \N 0.2 test2 + +-- !q06 -- +2023-01-03T00:00 100 0.3 test3 + +-- !q01 -- +0.1 test1 2023-01-01T00:00 \N +0.2 test2 2023-01-02T00:00 \N +0.3 test3 2023-01-03T00:00 100 + +-- !q02 -- +0.1 test1 2023-01-01T00:00 \N +0.2 test2 2023-01-02T00:00 \N + +-- !q03 -- +0.3 test3 2023-01-03T00:00 100 + +-- !q04 -- +2023-01-01T00:00 \N 0.1 test1 +2023-01-02T00:00 \N 0.2 test2 +2023-01-03T00:00 100 0.3 test3 + +-- !q05 -- +2023-01-01T00:00 \N 0.1 test1 +2023-01-02T00:00 \N 0.2 test2 + +-- !q06 -- +2023-01-03T00:00 100 0.3 test3 -- !q01 -- 0.1 test1 2023-01-01T00:00 \N diff --git a/regression-test/data/external_table_p2/hive/test_mixed_par_locations.out b/regression-test/data/external_table_p0/hive/test_mixed_par_locations.out similarity index 53% rename from regression-test/data/external_table_p2/hive/test_mixed_par_locations.out rename to regression-test/data/external_table_p0/hive/test_mixed_par_locations.out index e4344d897f..7e57e2d847 100644 --- a/regression-test/data/external_table_p2/hive/test_mixed_par_locations.out +++ b/regression-test/data/external_table_p0/hive/test_mixed_par_locations.out @@ -35,3 +35,39 @@ guangzhou 2 hangzhou 2 shanghai 2 +-- !01 -- +1 Tom 48 shanghai male 20230101 +2 Jerry 35 guangzhou male 20230101 +3 Frank 25 hangzhou male 20230101 +4 Ada 22 beijing female 20230101 +5 Jason 46 shanghai male 20230102 +6 Andy 38 guangzhou male 20230102 +7 Sam 29 hangzhou male 20230102 +8 Chloea 18 beijing female 20230102 + +-- !02 -- +8 + +-- !03 -- +guangzhou 2 +hangzhou 2 +shanghai 2 + +-- !01 -- +1 Tom 48 shanghai male 20230101 +2 Jerry 35 guangzhou male 20230101 +3 Frank 25 hangzhou male 20230101 +4 Ada 22 beijing female 20230101 +5 Jason 46 shanghai male 20230102 +6 Andy 38 guangzhou male 20230102 +7 Sam 29 hangzhou male 20230102 +8 Chloea 18 beijing female 20230102 + +-- !02 -- +8 + +-- !03 -- +guangzhou 2 +hangzhou 2 +shanghai 2 + diff --git a/regression-test/data/external_table_p2/hive/test_truncate_char_or_varchar_columns.out b/regression-test/data/external_table_p0/hive/test_truncate_char_or_varchar_columns.out similarity index 51% rename from regression-test/data/external_table_p2/hive/test_truncate_char_or_varchar_columns.out rename to regression-test/data/external_table_p0/hive/test_truncate_char_or_varchar_columns.out index 5dab20925f..5c56200040 100644 --- a/regression-test/data/external_table_p2/hive/test_truncate_char_or_varchar_columns.out +++ b/regression-test/data/external_table_p0/hive/test_truncate_char_or_varchar_columns.out @@ -83,3 +83,87 @@ beijing at beijing in china Boston at Boston in 美利坚合众国 哈尔滨 at 哈尔滨 in 中华人民共和国 +-- !q01 -- +1 han chi +2 bei chi +3 杭州 中华人 +4 Bos 美利坚 +5 哈尔滨 中华人 + +-- !q02 -- +han at han in chi +bei at bei in chi +杭州 at 杭州 in 中华人 +Bos at Bos in 美利坚 +哈尔滨 at 哈尔滨 in 中华人 + +-- !q01 -- +1 han chi +2 bei chi +3 杭州 中华人 +4 Bos 美利坚 +5 哈尔滨 中华人 + +-- !q02 -- +han at han in chi +bei at bei in chi +杭州 at 杭州 in 中华人 +Bos at Bos in 美利坚 +哈尔滨 at 哈尔滨 in 中华人 + +-- !q01 -- +1 han chi +2 bei chi +3 杭州 中华人 +4 Bos 美利坚 +5 哈尔滨 中华人 + +-- !q02 -- +han at han in chi +bei at bei in chi +杭州 at 杭州 in 中华人 +Bos at Bos in 美利坚 +哈尔滨 at 哈尔滨 in 中华人 + +-- !q01 -- +1 hangzhou china +2 beijing china +3 杭州 中华人民共和国 +4 Boston 美利坚合众国 +5 哈尔滨 中华人民共和国 + +-- !q02 -- +hangzhou at hangzhou in china +beijing at beijing in china +杭州 at 杭州 in 中华人民共和国 +Boston at Boston in 美利坚合众国 +哈尔滨 at 哈尔滨 in 中华人民共和国 + +-- !q01 -- +1 hangzhou china +2 beijing china +3 杭州 中华人民共和国 +4 Boston 美利坚合众国 +5 哈尔滨 中华人民共和国 + +-- !q02 -- +hangzhou at hangzhou in china +beijing at beijing in china +杭州 at 杭州 in 中华人民共和国 +Boston at Boston in 美利坚合众国 +哈尔滨 at 哈尔滨 in 中华人民共和国 + +-- !q01 -- +1 hangzhou china +2 beijing china +3 杭州 中华人民共和国 +4 Boston 美利坚合众国 +5 哈尔滨 中华人民共和国 + +-- !q02 -- +hangzhou at hangzhou in china +beijing at beijing in china +杭州 at 杭州 in 中华人民共和国 +Boston at Boston in 美利坚合众国 +哈尔滨 at 哈尔滨 in 中华人民共和国 + diff --git a/regression-test/suites/external_table_p2/hive/test_external_catalog_hive_partition.groovy b/regression-test/suites/external_table_p0/hive/test_external_catalog_hive_partition.groovy similarity index 82% rename from regression-test/suites/external_table_p2/hive/test_external_catalog_hive_partition.groovy rename to regression-test/suites/external_table_p0/hive/test_external_catalog_hive_partition.groovy index 196625b3b4..32b80f5650 100644 --- a/regression-test/suites/external_table_p2/hive/test_external_catalog_hive_partition.groovy +++ b/regression-test/suites/external_table_p0/hive/test_external_catalog_hive_partition.groovy @@ -15,12 +15,16 @@ // specific language governing permissions and limitations // under the License. -suite("test_external_catalog_hive_partition", "p2,external,hive,external_remote,external_remote_hive") { - String enabled = context.config.otherConfigs.get("enableExternalHiveTest") - if (enabled != null && enabled.equalsIgnoreCase("true")) { - String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost") - String extHiveHmsPort = context.config.otherConfigs.get("extHiveHmsPort") - String catalog_name = "test_external_catalog_hive_partition" +suite("test_external_catalog_hive_partition", "p0,external,hive,external_docker,external_docker_hive") { + String enabled = context.config.otherConfigs.get("enableHiveTest") + if (enabled == null || !enabled.equalsIgnoreCase("true")) { + logger.info("disable Hive test.") + return; + } + for (String hivePrefix : ["hive2", "hive3"]) { + String extHiveHmsHost = context.config.otherConfigs.get("externalEnvIp") + String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + "HmsPort") + String catalog_name = "${hivePrefix}_test_external_catalog_hive_partition" sql """drop catalog if exists ${catalog_name};""" sql """ @@ -39,7 +43,7 @@ suite("test_external_catalog_hive_partition", "p2,external,hive,external_remote, qt_q04 """ select * from multi_catalog.parquet_partitioned_columns order by t_float """ qt_q05 """ select * from multi_catalog.parquet_partitioned_columns where t_int is null order by t_float """ qt_q06 """ select * from multi_catalog.parquet_partitioned_columns where t_int is not null order by t_float """ - qt_q07 """ select o_orderyear, o_orderkey, o_custkey from multi_catalog.orders_par_parquet where o_custkey=1820677 order by o_orderkey """ + //qt_q07 """ select o_orderyear, o_orderkey, o_custkey from multi_catalog.orders_par_parquet where o_custkey=1820677 order by o_orderkey """ } // test orc format def q01_orc = { @@ -49,7 +53,7 @@ suite("test_external_catalog_hive_partition", "p2,external,hive,external_remote, qt_q04 """ select * from multi_catalog.orc_partitioned_columns order by t_float """ qt_q05 """ select * from multi_catalog.orc_partitioned_columns where t_int is null order by t_float """ qt_q06 """ select * from multi_catalog.orc_partitioned_columns where t_int is not null order by t_float """ - qt_q07 """ select o_orderyear, o_orderkey, o_custkey from multi_catalog.orders_par_orc where o_custkey=1820677 order by o_orderkey """ + //qt_q07 """ select o_orderyear, o_orderkey, o_custkey from multi_catalog.orders_par_orc where o_custkey=1820677 order by o_orderkey """ } // test text format def q01_text = { diff --git a/regression-test/suites/external_table_p2/hive/test_hive_statistic_auto.groovy b/regression-test/suites/external_table_p0/hive/test_hive_statistic_auto.groovy similarity index 88% rename from regression-test/suites/external_table_p2/hive/test_hive_statistic_auto.groovy rename to regression-test/suites/external_table_p0/hive/test_hive_statistic_auto.groovy index 8a7591daeb..8a34bf9204 100644 --- a/regression-test/suites/external_table_p2/hive/test_hive_statistic_auto.groovy +++ b/regression-test/suites/external_table_p0/hive/test_hive_statistic_auto.groovy @@ -15,12 +15,16 @@ // specific language governing permissions and limitations // under the License. -suite("test_hive_statistic_auto", "p2,external,hive,external_remote,external_remote_hive") { - String enabled = context.config.otherConfigs.get("enableExternalHiveTest") - if (enabled != null && enabled.equalsIgnoreCase("true")) { - String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost") - String extHiveHmsPort = context.config.otherConfigs.get("extHiveHmsPort") - String catalog_name = "test_hive_statistic_auto" +suite("test_hive_statistic_auto", "p0,external,hive,external_docker,external_docker_hive") { + String enabled = context.config.otherConfigs.get("enableHiveTest") + if (enabled == null || !enabled.equalsIgnoreCase("true")) { + logger.info("disable Hive test.") + return; + } + for (String hivePrefix : ["hive2", "hive3"]) { + String extHiveHmsHost = context.config.otherConfigs.get("externalEnvIp") + String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + "HmsPort") + String catalog_name = "${hivePrefix}_test_hive_statistic_auto" sql """drop catalog if exists ${catalog_name};""" sql """ create catalog if not exists ${catalog_name} properties ( diff --git a/regression-test/suites/external_table_p2/hive/test_hive_statistic_clean.groovy b/regression-test/suites/external_table_p0/hive/test_hive_statistic_clean.groovy similarity index 88% rename from regression-test/suites/external_table_p2/hive/test_hive_statistic_clean.groovy rename to regression-test/suites/external_table_p0/hive/test_hive_statistic_clean.groovy index e04f9fbe88..2813f1ffc9 100644 --- a/regression-test/suites/external_table_p2/hive/test_hive_statistic_clean.groovy +++ b/regression-test/suites/external_table_p0/hive/test_hive_statistic_clean.groovy @@ -15,17 +15,20 @@ // specific language governing permissions and limitations // under the License. -suite("test_hive_statistic_clean", "p2,external,hive,external_remote,external_remote_hive") { - String enabled = context.config.otherConfigs.get("enableExternalHiveTest") - if (enabled != null && enabled.equalsIgnoreCase("true")) { - String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost") - String extHiveHmsPort = context.config.otherConfigs.get("extHiveHmsPort") - String catalog_name = "test_hive_statistic_clean" +suite("test_hive_statistic_clean", "p0,external,hive,external_docker,external_docker_hive") { + String enabled = context.config.otherConfigs.get("enableHiveTest") + if (enabled == null || !enabled.equalsIgnoreCase("true")) { + logger.info("disable Hive test.") + return; + } + for (String hivePrefix : ["hive2", "hive3"]) { + String extHiveHmsHost = context.config.otherConfigs.get("externalEnvIp") + String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + "HmsPort") + String catalog_name = "${hivePrefix}_test_hive_statistic_clean" sql """drop catalog if exists ${catalog_name};""" sql """ create catalog if not exists ${catalog_name} properties ( 'type'='hms', - 'hadoop.username' = 'hadoop', 'hive.metastore.uris' = 'thrift://${extHiveHmsHost}:${extHiveHmsPort}' ); """ @@ -68,6 +71,7 @@ suite("test_hive_statistic_clean", "p2,external,hive,external_remote,external_re assertEquals(result[0][7], "1") assertEquals(result[0][8], "7") + /* sql """drop expired stats""" result = sql """show column stats `statistics` (lo_quantity)""" assertEquals(result.size(), 1) @@ -101,6 +105,7 @@ suite("test_hive_statistic_clean", "p2,external,hive,external_remote,external_re assertEquals(result[0][6], "4.0") assertEquals(result[0][7], "1") assertEquals(result[0][8], "7") + */ def ctlId result = sql """show catalogs""" @@ -111,8 +116,9 @@ suite("test_hive_statistic_clean", "p2,external,hive,external_remote,external_re } } - sql """drop catalog ${catalog_name}""" - sql """drop expired stats""" + // sql """drop catalog ${catalog_name}""" + // sql """drop expired stats""" + sql """drop stats `statistics`""" result = sql """select * from internal.__internal_schema.column_statistics where catalog_id=${ctlId}""" assertEquals(result.size(), 0) diff --git a/regression-test/suites/external_table_p0/hive/test_mixed_par_locations.groovy b/regression-test/suites/external_table_p0/hive/test_mixed_par_locations.groovy new file mode 100644 index 0000000000..3936787710 --- /dev/null +++ b/regression-test/suites/external_table_p0/hive/test_mixed_par_locations.groovy @@ -0,0 +1,57 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_mixed_par_locations", "p0,external,hive,external_docker,external_docker_hive") { + + def formats = ["_parquet", "_orc"] + def q1 = """select * from test_mixed_par_locationsSUFFIX order by id;""" + def q2 = """select count(id) from test_mixed_par_locationsSUFFIX;""" + def q3 = """select city, count(*) from test_mixed_par_locations_parquet where sex = 'male' group by city order by city;""" + + String enabled = context.config.otherConfigs.get("enableHiveTest") + if (enabled == null || !enabled.equalsIgnoreCase("true")) { + logger.info("disable Hive test.") + return; + } + for (String hivePrefix : ["hive2", "hive3"]) { + String extHiveHmsHost = context.config.otherConfigs.get("externalEnvIp") + String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + "HmsPort") + String catalog_name = "${hivePrefix}_test_mixed_par_locations" + + sql """drop catalog if exists ${catalog_name};""" + sql """ + create catalog if not exists ${catalog_name} properties ( + 'type'='hms', + 'hive.metastore.uris' = 'thrift://${extHiveHmsHost}:${extHiveHmsPort}' + ); + """ + logger.info("catalog " + catalog_name + " created") + sql """switch ${catalog_name};""" + logger.info("switched to catalog " + catalog_name) + sql """use multi_catalog;""" + logger.info("use multi_catalog") + + for (String format in formats) { + logger.info("Process format " + format) + qt_01 q1.replace("SUFFIX", format) + qt_02 q2.replace("SUFFIX", format) + qt_03 q3.replace("SUFFIX", format) + } + sql """drop catalog if exists ${catalog_name}""" + } +} + diff --git a/regression-test/suites/external_table_p2/hive/test_truncate_char_or_varchar_columns.groovy b/regression-test/suites/external_table_p0/hive/test_truncate_char_or_varchar_columns.groovy similarity index 88% rename from regression-test/suites/external_table_p2/hive/test_truncate_char_or_varchar_columns.groovy rename to regression-test/suites/external_table_p0/hive/test_truncate_char_or_varchar_columns.groovy index b597e3d457..88ba9afca6 100644 --- a/regression-test/suites/external_table_p2/hive/test_truncate_char_or_varchar_columns.groovy +++ b/regression-test/suites/external_table_p0/hive/test_truncate_char_or_varchar_columns.groovy @@ -15,12 +15,16 @@ // specific language governing permissions and limitations // under the License. -suite("test_truncate_char_or_varchar_columns", "p2,external,hive,external_remote,external_remote_hive") { - String enabled = context.config.otherConfigs.get("enableExternalHiveTest") - if (enabled != null && enabled.equalsIgnoreCase("true")) { - String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost") - String extHiveHmsPort = context.config.otherConfigs.get("extHiveHmsPort") - String catalog_name = "test_truncate_char_or_varchar_columns" +suite("test_truncate_char_or_varchar_columns", "p0,external,hive,external_docker,external_docker_hive") { + String enabled = context.config.otherConfigs.get("enableHiveTest") + if (enabled == null || !enabled.equalsIgnoreCase("true")) { + logger.info("disable Hive test.") + return; + } + for (String hivePrefix : ["hive2", "hive3"]) { + String extHiveHmsHost = context.config.otherConfigs.get("externalEnvIp") + String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + "HmsPort") + String catalog_name = "${hivePrefix}_test_truncate_char_or_varchar_columns" sql """drop catalog if exists ${catalog_name};""" sql """ diff --git a/regression-test/suites/external_table_p2/hive/test_mixed_par_locations.groovy b/regression-test/suites/external_table_p2/hive/test_mixed_par_locations.groovy deleted file mode 100644 index c6ac330db4..0000000000 --- a/regression-test/suites/external_table_p2/hive/test_mixed_par_locations.groovy +++ /dev/null @@ -1,62 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("test_mixed_par_locations", "p2,external,hive,external_remote,external_remote_hive") { - - def formats = ["_parquet", "_orc"] - def q1 = """select * from test_mixed_par_locationsSUFFIX order by id;""" - def q2 = """select count(id) from test_mixed_par_locationsSUFFIX;""" - def q3 = """select city, count(*) from test_mixed_par_locations_parquet where sex = 'male' group by city order by city;""" - - String enabled = context.config.otherConfigs.get("enableExternalHiveTest") - if (enabled != null && enabled.equalsIgnoreCase("true")) { - try { - String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost") - String extHiveHmsPort = context.config.otherConfigs.get("extHiveHmsPort") - String extAk = context.config.otherConfigs.get("extAk"); - String extSk = context.config.otherConfigs.get("extSk"); - String extS3Endpoint = context.config.otherConfigs.get("extS3Endpoint"); - String extS3Region = context.config.otherConfigs.get("extS3Region"); - String catalog_name = "test_mixed_par_locations" - - sql """drop catalog if exists ${catalog_name};""" - sql """ - create catalog if not exists ${catalog_name} properties ( - 'type'='hms', - 'hive.metastore.uris' = 'thrift://${extHiveHmsHost}:${extHiveHmsPort}', - 'cos.access_key' = '${extAk}', - 'cos.secret_key' = '${extSk}', - 'cos.endpoint' = '${extS3Endpoint}' - ); - """ - logger.info("catalog " + catalog_name + " created") - sql """switch ${catalog_name};""" - logger.info("switched to catalog " + catalog_name) - sql """use multi_catalog;""" - logger.info("use multi_catalog") - - for (String format in formats) { - logger.info("Process format " + format) - qt_01 q1.replace("SUFFIX", format) - qt_02 q2.replace("SUFFIX", format) - qt_03 q3.replace("SUFFIX", format) - } - sql """drop catalog if exists ${catalog_name}""" - } finally { - } - } -}