diff --git a/docker/thirdparties/docker-compose/kerberos/ccache/create_kerberos_credential_cache_files.sh b/docker/thirdparties/docker-compose/kerberos/ccache/create_kerberos_credential_cache_files.sh
new file mode 100644
index 0000000000..2bba3f928b
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/ccache/create_kerberos_credential_cache_files.sh
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -exuo pipefail
+
+TICKET_LIFETIME='30m'
+
+kinit -l "$TICKET_LIFETIME" -f -c /etc/trino/conf/presto-server-krbcc \
+ -kt /etc/trino/conf/presto-server.keytab presto-server/$(hostname -f)@LABS.TERADATA.COM
+
+kinit -l "$TICKET_LIFETIME" -f -c /etc/trino/conf/hive-presto-master-krbcc \
+ -kt /etc/trino/conf/hive-presto-master.keytab hive/$(hostname -f)@LABS.TERADATA.COM
+
+kinit -l "$TICKET_LIFETIME" -f -c /etc/trino/conf/hdfs-krbcc \
+ -kt /etc/hadoop/conf/hdfs.keytab hdfs/hadoop-master@LABS.TERADATA.COM
+
+kinit -l "$TICKET_LIFETIME" -f -c /etc/trino/conf/hive-krbcc \
+ -kt /etc/hive/conf/hive.keytab hive/hadoop-master@LABS.TERADATA.COM
diff --git a/docker/thirdparties/docker-compose/kerberos/common/conf/doris-krb5.conf b/docker/thirdparties/docker-compose/kerberos/common/conf/doris-krb5.conf
new file mode 100644
index 0000000000..7624b94e6a
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/common/conf/doris-krb5.conf
@@ -0,0 +1,52 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+[logging]
+ default = FILE:/var/log/krb5libs.log
+ kdc = FILE:/var/log/krb5kdc.log
+ admin_server = FILE:/var/log/kadmind.log
+
+[libdefaults]
+ default_realm = LABS.TERADATA.COM
+ dns_lookup_realm = false
+ dns_lookup_kdc = false
+ ticket_lifetime = 24h
+ # this setting is causing a Message stream modified (41) error when talking to KDC running on CentOS 7: https://stackoverflow.com/a/60978520
+ # renew_lifetime = 7d
+ forwardable = true
+ udp_preference_limit = 1
+
+[realms]
+ LABS.TERADATA.COM = {
+ kdc = hadoop-master:88
+ admin_server = hadoop-master
+ }
+ OTHERLABS.TERADATA.COM = {
+ kdc = hadoop-master:89
+ admin_server = hadoop-master
+ }
+ OTHERLABS.TERADATA.COM = {
+ kdc = hadoop-master:89
+ admin_server = hadoop-master
+ }
+OTHERREALM.COM = {
+ kdc = hadoop-master-2:88
+ admin_server = hadoop-master
+ }
+
+[domain_realm]
+ hadoop-master-2 = OTHERREALM.COM
diff --git a/docker/thirdparties/docker-compose/kerberos/common/hadoop/apply-config-overrides.sh b/docker/thirdparties/docker-compose/kerberos/common/hadoop/apply-config-overrides.sh
new file mode 100755
index 0000000000..ec2dc074e7
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/common/hadoop/apply-config-overrides.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# test whether OVERRIDES_DIR is set
+if [[ -n "${OVERRIDES_DIR+x}" ]]; then
+ echo "The OVERRIDES_DIR (${OVERRIDES_DIR}) support is disabled as it was deemed unused." >&2
+ echo "It is being removed." >&2
+ exit 16
+fi
+
+if test -e /overrides; then
+ find /overrides >&2
+ echo "The /overrides handling is disabled as it was deemed unused." >&2
+ echo "It is being removed." >&2
+ exit 17
+fi
diff --git a/docker/thirdparties/docker-compose/kerberos/common/hadoop/hadoop-run.sh b/docker/thirdparties/docker-compose/kerberos/common/hadoop/hadoop-run.sh
new file mode 100755
index 0000000000..b8bfd8715e
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/common/hadoop/hadoop-run.sh
@@ -0,0 +1,42 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -euo pipefail
+
+if test $# -gt 0; then
+ echo "$0 does not accept arguments" >&2
+ exit 32
+fi
+
+set -x
+
+HADOOP_INIT_D=${HADOOP_INIT_D:-/etc/hadoop-init.d/}
+
+echo "Applying hadoop init.d scripts from ${HADOOP_INIT_D}"
+if test -d "${HADOOP_INIT_D}"; then
+ for init_script in "${HADOOP_INIT_D}"*; do
+ chmod a+x "${init_script}"
+ "${init_script}"
+ done
+fi
+
+trap exit INT
+
+echo "Running services with supervisord"
+
+supervisord -c /etc/supervisord.conf
diff --git a/docker/thirdparties/docker-compose/kerberos/entrypoint-hive-master-2.sh b/docker/thirdparties/docker-compose/kerberos/entrypoint-hive-master-2.sh
new file mode 100755
index 0000000000..c21460c3a5
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/entrypoint-hive-master-2.sh
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -euo pipefail
+
+echo "Copying kerberos keytabs to /keytabs/"
+mkdir -p /etc/hadoop-init.d/
+cp /etc/trino/conf/hive-presto-master.keytab /keytabs/other-hive-presto-master.keytab
+cp /etc/trino/conf/presto-server.keytab /keytabs/other-presto-server.keytab
+cp /keytabs/update-location.sh /etc/hadoop-init.d/update-location.sh
+/usr/local/hadoop-run.sh &
+
+sleep 30
+
+echo "Init kerberos test data"
+kinit -kt /etc/hive/conf/hive.keytab hive/hadoop-master-2@OTHERREALM.COM
+hive -f /usr/local/sql/create_kerberos_hive_table.sql
+
+sleep 20
+
+tail -f /dev/null
diff --git a/docker/thirdparties/docker-compose/kerberos/entrypoint-hive-master.sh b/docker/thirdparties/docker-compose/kerberos/entrypoint-hive-master.sh
new file mode 100755
index 0000000000..6292499221
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/entrypoint-hive-master.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -euo pipefail
+
+echo "Copying kerberos keytabs to keytabs/"
+mkdir -p /etc/hadoop-init.d/
+cp /etc/trino/conf/* /keytabs/
+/usr/local/hadoop-run.sh &
+
+sleep 30
+
+echo "Init kerberos test data"
+kinit -kt /etc/hive/conf/hive.keytab hive/hadoop-master@LABS.TERADATA.COM
+hive -f /usr/local/sql/create_kerberos_hive_table.sql
+
+sleep 20
+
+tail -f /dev/null
diff --git a/docker/thirdparties/docker-compose/kerberos/health-checks/hadoop-health-check.sh b/docker/thirdparties/docker-compose/kerberos/health-checks/hadoop-health-check.sh
new file mode 100755
index 0000000000..190fa838d6
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/health-checks/hadoop-health-check.sh
@@ -0,0 +1,39 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -euo pipefail
+
+if test $# -gt 0; then
+ echo "$0 does not accept arguments" >&2
+ exit 32
+fi
+
+# Supervisord is not running
+if ! test -f /tmp/supervisor.sock; then
+ exit 0
+fi
+
+# Check if all Hadoop services are running
+FAILED=$(supervisorctl status | grep -v RUNNING || true)
+
+if [ "$FAILED" == "" ]; then
+ exit 0
+else
+ echo "Some of the services are failing: ${FAILED}"
+ exit 1
+fi
diff --git a/docker/thirdparties/docker-compose/kerberos/health-checks/health.sh b/docker/thirdparties/docker-compose/kerberos/health-checks/health.sh
new file mode 100644
index 0000000000..515f37e36a
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/health-checks/health.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -euo pipefail
+
+if test $# -gt 0; then
+ echo "$0 does not accept arguments" >&2
+ exit 32
+fi
+
+set -x
+
+HEALTH_D=${HEALTH_D:-/etc/health.d/}
+
+if test -d "${HEALTH_D}"; then
+ for health_script in "${HEALTH_D}"/*; do
+ "${health_script}" &>> /var/log/container-health.log || exit 1
+ done
+fi
diff --git a/docker/thirdparties/docker-compose/kerberos/kerberos.yaml.tpl b/docker/thirdparties/docker-compose/kerberos/kerberos.yaml.tpl
new file mode 100644
index 0000000000..6f175ab9c6
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/kerberos.yaml.tpl
@@ -0,0 +1,73 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+version: "3"
+services:
+ hive-krb:
+ image: ghcr.io/trinodb/testing/hdp3.1-hive-kerberized
+ container_name: doris--kerberos1
+ volumes:
+ - ./two-kerberos-hives:/keytabs
+ - ./sql:/usr/local/sql
+ - ./common/hadoop/apply-config-overrides.sh:/etc/hadoop-init.d/00-apply-config-overrides.sh
+ - ./common/hadoop/hadoop-run.sh:/usr/local/hadoop-run.sh
+ - ./health-checks/hadoop-health-check.sh:/etc/health.d/hadoop-health-check.sh
+ - ./entrypoint-hive-master.sh:/usr/local/entrypoint-hive-master.sh
+ hostname: hadoop-master
+ entrypoint: /usr/local/entrypoint-hive-master.sh
+ healthcheck:
+ test: ./health-checks/health.sh
+ ports:
+ - "5806:5006"
+ - "8820:8020"
+ - "8842:8042"
+ - "9800:9000"
+ - "9883:9083"
+ - "18000:10000"
+ networks:
+ doris--krb_net:
+ ipv4_address: 172.31.71.25
+
+ hive-krb2:
+ image: ghcr.io/trinodb/testing/hdp3.1-hive-kerberized-2:96
+ container_name: doris--kerberos2
+ hostname: hadoop-master-2
+ volumes:
+ - ./two-kerberos-hives:/keytabs
+ - ./sql:/usr/local/sql
+ - ./common/hadoop/apply-config-overrides.sh:/etc/hadoop-init.d/00-apply-config-overrides.sh
+ - ./common/hadoop/hadoop-run.sh:/usr/local/hadoop-run.sh
+ - ./health-checks/hadoop-health-check.sh:/etc/health.d/hadoop-health-check.sh
+ - ./entrypoint-hive-master-2.sh:/usr/local/entrypoint-hive-master-2.sh
+ entrypoint: /usr/local/entrypoint-hive-master-2.sh
+ healthcheck:
+ test: ./health-checks/health.sh
+ ports:
+ - "15806:5006"
+ - "18820:8020"
+ - "18842:8042"
+ - "19800:9000"
+ - "19883:9083"
+ - "18800:10000"
+ networks:
+ doris--krb_net:
+ ipv4_address: 172.31.71.26
+
+networks:
+ doris--krb_net:
+ ipam:
+ config:
+ - subnet: 172.31.71.0/24
diff --git a/docker/thirdparties/docker-compose/kerberos/sql/create_kerberos_hive_table.sql b/docker/thirdparties/docker-compose/kerberos/sql/create_kerberos_hive_table.sql
new file mode 100644
index 0000000000..ecf58e8815
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/sql/create_kerberos_hive_table.sql
@@ -0,0 +1,17 @@
+CREATE DATABASE IF NOT EXISTS `test_krb_hive_db`;
+CREATE TABLE IF NOT EXISTS `test_krb_hive_db`.`test_krb_hive_tbl`(
+ `id_key` int,
+ `string_key` string,
+ `rate_val` double,
+ `comment` string)
+ROW FORMAT SERDE
+ 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
+STORED AS INPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
+OUTPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat';
+
+INSERT INTO test_krb_hive_db.test_krb_hive_tbl values(1, 'a', 3.16, 'cc0');
+INSERT INTO test_krb_hive_db.test_krb_hive_tbl values(2, 'b', 41.2, 'cc1');
+INSERT INTO test_krb_hive_db.test_krb_hive_tbl values(3, 'c', 6.2, 'cc2');
+INSERT INTO test_krb_hive_db.test_krb_hive_tbl values(4, 'd', 1.4, 'cc3');
diff --git a/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/auth-to-local.xml b/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/auth-to-local.xml
new file mode 100755
index 0000000000..c0ce38e3cd
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/auth-to-local.xml
@@ -0,0 +1,29 @@
+
+
+
+
+ hadoop.security.auth_to_local
+
+ RULE:[2:$1@$0](.*@OTHERREALM.COM)s/@.*//
+ RULE:[2:$1@$0](.*@OTHERLABS.TERADATA.COM)s/@.*//
+ DEFAULT
+
+
+
diff --git a/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/hive2-default-fs-site.xml b/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/hive2-default-fs-site.xml
new file mode 100755
index 0000000000..4541c1328a
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/hive2-default-fs-site.xml
@@ -0,0 +1,25 @@
+
+
+
+
+ fs.default.name
+ hdfs://hadoop-master-2:9000
+
+
diff --git a/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/update-location.sh b/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/update-location.sh
new file mode 100755
index 0000000000..8d727b2308
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/update-location.sh
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+/usr/bin/mysqld_safe &
+while ! mysqladmin ping -proot --silent; do sleep 1; done
+
+hive --service metatool -updateLocation hdfs://hadoop-master-2:9000/user/hive/warehouse hdfs://hadoop-master:9000/user/hive/warehouse
+
+killall mysqld
+while pgrep mysqld; do sleep 1; done
diff --git a/docker/thirdparties/run-thirdparties-docker.sh b/docker/thirdparties/run-thirdparties-docker.sh
index 67ab8e2317..0be76706f4 100755
--- a/docker/thirdparties/run-thirdparties-docker.sh
+++ b/docker/thirdparties/run-thirdparties-docker.sh
@@ -37,7 +37,7 @@ Usage: $0
--stop stop the specified components
All valid components:
- mysql,pg,oracle,sqlserver,clickhouse,es,hive2,hive3,iceberg,hudi,trino,kafka,mariadb,db2
+ mysql,pg,oracle,sqlserver,clickhouse,es,hive2,hive3,iceberg,hudi,trino,kafka,mariadb,db2,kerberos
"
exit 1
}
@@ -60,7 +60,7 @@ STOP=0
if [[ "$#" == 1 ]]; then
# default
- COMPONENTS="mysql,es,hive2,hive3,pg,oracle,sqlserver,clickhouse,mariadb,iceberg,db2"
+ COMPONENTS="mysql,es,hive2,hive3,pg,oracle,sqlserver,clickhouse,mariadb,iceberg,db2,kerberos"
else
while true; do
case "$1" in
@@ -92,7 +92,7 @@ else
done
if [[ "${COMPONENTS}"x == ""x ]]; then
if [[ "${STOP}" -eq 1 ]]; then
- COMPONENTS="mysql,es,pg,oracle,sqlserver,clickhouse,hive2,hive3,iceberg,hudi,trino,kafka,mariadb,db2"
+ COMPONENTS="mysql,es,pg,oracle,sqlserver,clickhouse,hive2,hive3,iceberg,hudi,trino,kafka,mariadb,db2,kerberos,lakesoul"
fi
fi
fi
@@ -135,6 +135,7 @@ RUN_KAFKA=0
RUN_SPARK=0
RUN_MARIADB=0
RUN_DB2=0
+RUN_KERBEROS=0
for element in "${COMPONENTS_ARR[@]}"; do
if [[ "${element}"x == "mysql"x ]]; then
@@ -167,6 +168,8 @@ for element in "${COMPONENTS_ARR[@]}"; do
RUN_MARIADB=1
elif [[ "${element}"x == "db2"x ]];then
RUN_DB2=1
+ elif [[ "${element}"x == "kerberos"x ]]; then
+ RUN_KERBEROS=1
else
echo "Invalid component: ${element}"
usage
@@ -519,3 +522,27 @@ if [[ "${RUN_MARIADB}" -eq 1 ]]; then
sudo docker compose -f "${ROOT}"/docker-compose/mariadb/mariadb-10.yaml --env-file "${ROOT}"/docker-compose/mariadb/mariadb-10.env up -d
fi
fi
+
+if [[ "${RUN_KERBEROS}" -eq 1 ]]; then
+ echo "RUN_KERBEROS"
+ cp "${ROOT}"/docker-compose/kerberos/kerberos.yaml.tpl "${ROOT}"/docker-compose/kerberos/kerberos.yaml
+ sed -i "s/doris--/${CONTAINER_UID}/g" "${ROOT}"/docker-compose/kerberos/kerberos.yaml
+ sudo docker compose -f "${ROOT}"/docker-compose/kerberos/kerberos.yaml down
+ sudo rm -rf "${ROOT}"/docker-compose/kerberos/data
+ if [[ "${STOP}" -ne 1 ]]; then
+ echo "PREPARE KERBEROS DATA"
+ rm -rf "${ROOT}"/docker-compose/kerberos/two-kerberos-hives/*.keytab
+ rm -rf "${ROOT}"/docker-compose/kerberos/two-kerberos-hives/*.jks
+ rm -rf "${ROOT}"/docker-compose/kerberos/two-kerberos-hives/*.conf
+ sudo docker compose -f "${ROOT}"/docker-compose/kerberos/kerberos.yaml up -d
+ sudo rm -f /keytabs
+ sudo ln -s "${ROOT}"/docker-compose/kerberos/two-kerberos-hives /keytabs
+ sudo cp "${ROOT}"/docker-compose/kerberos/common/conf/doris-krb5.conf /keytabs/krb5.conf
+ sudo cp "${ROOT}"/docker-compose/kerberos/common/conf/doris-krb5.conf /etc/krb5.conf
+
+ sudo chmod a+w /etc/hosts
+ echo '172.31.71.25 hadoop-master' >> /etc/hosts
+ echo '172.31.71.26 hadoop-master-2' >> /etc/hosts
+ sleep 2
+ fi
+fi
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java
index db8b9093b0..1a86b9e327 100644
--- a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java
@@ -122,7 +122,9 @@ public class HadoopUGI {
UserGroupInformation ugi = HadoopUGI.loginWithUGI(authConf);
try {
if (ugi != null) {
- ugi.checkTGTAndReloginFromKeytab();
+ if (authConf instanceof KerberosAuthenticationConfig) {
+ ugi.checkTGTAndReloginFromKeytab();
+ }
return ugi.doAs(action);
} else {
return action.run();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java
index 82f4d309c8..f9253aa03f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java
@@ -51,6 +51,7 @@ import org.apache.doris.datasource.operations.ExternalMetadataOps;
import org.apache.doris.datasource.paimon.PaimonExternalDatabase;
import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.datasource.test.TestExternalDatabase;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import org.apache.doris.persist.gson.GsonPostProcessable;
import org.apache.doris.persist.gson.GsonUtils;
import org.apache.doris.qe.ConnectContext;
@@ -67,7 +68,6 @@ import lombok.Data;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull;
@@ -148,7 +148,7 @@ public abstract class ExternalCatalog
}
public Configuration getConfiguration() {
- Configuration conf = new HdfsConfiguration();
+ Configuration conf = DFSFileSystem.getHdfsConf(ifNotSetFallbackToSimpleAuth());
Map catalogProperties = catalogProperty.getHadoopProperties();
for (Map.Entry entry : catalogProperties.entrySet()) {
conf.set(entry.getKey(), entry.getValue());
@@ -181,6 +181,11 @@ public abstract class ExternalCatalog
Boolean.valueOf(catalogProperty.getOrDefault(USE_META_CACHE, String.valueOf(DEFAULT_USE_META_CACHE))));
}
+ // we need check auth fallback for kerberos or simple
+ public boolean ifNotSetFallbackToSimpleAuth() {
+ return catalogProperty.getOrDefault(DFSFileSystem.PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "").isEmpty();
+ }
+
// Will be called when creating catalog(not replaying).
// Subclass can override this method to do some check when creating catalog.
public void checkWhenCreating() throws DdlException {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
index 243dfb3c24..91192b63c2 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
@@ -37,6 +37,7 @@ import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.datasource.property.constants.HMSProperties;
import org.apache.doris.fs.FileSystemProvider;
import org.apache.doris.fs.FileSystemProviderImpl;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import org.apache.doris.transaction.TransactionManagerFactory;
import com.google.common.base.Strings;
@@ -59,7 +60,6 @@ public class HMSExternalCatalog extends ExternalCatalog {
public static final String FILE_META_CACHE_TTL_SECOND = "file.meta.cache.ttl-second";
// broker name for file split and query scan.
public static final String BIND_BROKER_NAME = "broker.name";
- private static final String PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH = "ipc.client.fallback-to-simple-auth-allowed";
// -1 means file cache no ttl set
public static final int FILE_META_CACHE_NO_TTL = -1;
@@ -244,9 +244,9 @@ public class HMSExternalCatalog extends ExternalCatalog {
@Override
public void setDefaultPropsIfMissing(boolean isReplay) {
super.setDefaultPropsIfMissing(isReplay);
- if (catalogProperty.getOrDefault(PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "").isEmpty()) {
+ if (ifNotSetFallbackToSimpleAuth()) {
// always allow fallback to simple auth, so to support both kerberos and simple auth
- catalogProperty.addProperty(PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "true");
+ catalogProperty.addProperty(DFSFileSystem.PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "true");
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
index da90fadf83..7f23385d84 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
@@ -41,6 +41,7 @@ import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.fs.FileSystemCache;
import org.apache.doris.fs.remote.RemoteFile;
import org.apache.doris.fs.remote.RemoteFileSystem;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import org.apache.doris.metric.GaugeMetric;
import org.apache.doris.metric.Metric;
import org.apache.doris.metric.MetricLabel;
@@ -66,7 +67,6 @@ import org.apache.commons.lang3.math.NumberUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hive.common.ValidWriteIdList;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
@@ -433,7 +433,7 @@ public class HiveMetaStoreCache {
}
private synchronized void setJobConf() {
- Configuration configuration = new HdfsConfiguration();
+ Configuration configuration = DFSFileSystem.getHdfsConf(catalog.ifNotSetFallbackToSimpleAuth());
for (Map.Entry entry : catalog.getCatalogProperty().getHadoopProperties().entrySet()) {
configuration.set(entry.getKey(), entry.getValue());
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
index 7ad7621f7c..22bf13755a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
@@ -42,13 +42,13 @@ import org.apache.doris.common.DdlException;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.common.security.authentication.HadoopUGI;
import org.apache.doris.datasource.ExternalCatalog;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import org.apache.doris.thrift.TExprOpcode;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import org.apache.avro.Schema;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
@@ -843,7 +843,7 @@ public class HiveMetaStoreClientHelper {
}
public static Configuration getConfiguration(HMSExternalTable table) {
- Configuration conf = new HdfsConfiguration();
+ Configuration conf = DFSFileSystem.getHdfsConf(table.getCatalog().ifNotSetFallbackToSimpleAuth());
for (Map.Entry entry : table.getHadoopProperties().entrySet()) {
conf.set(entry.getKey(), entry.getValue());
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java
index 68064c4e43..dc11a6cacc 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java
@@ -25,13 +25,13 @@ import org.apache.doris.datasource.CatalogIf;
import org.apache.doris.datasource.hive.HMSExternalCatalog;
import org.apache.doris.datasource.hive.HiveMetaStoreClientHelper;
import org.apache.doris.datasource.property.constants.HMSProperties;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import org.apache.doris.thrift.TIcebergMetadataParams;
import com.github.benmanes.caffeine.cache.LoadingCache;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.iceberg.ManifestFiles;
import org.apache.iceberg.SerializableTable;
import org.apache.iceberg.Snapshot;
@@ -177,7 +177,8 @@ public class IcebergMetadataCache {
private Catalog createIcebergHiveCatalog(String uri, Map hdfsConf, Map props) {
// set hdfs configure
- Configuration conf = new HdfsConfiguration();
+ Configuration conf = DFSFileSystem.getHdfsConf(
+ hdfsConf.getOrDefault(DFSFileSystem.PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "").isEmpty());
for (Map.Entry entry : hdfsConf.entrySet()) {
conf.set(entry.getKey(), entry.getValue());
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalCatalog.java
index 1169122468..8f187e6d7c 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalCatalog.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalCatalog.java
@@ -25,6 +25,7 @@ import org.apache.doris.datasource.InitCatalogLog;
import org.apache.doris.datasource.SessionContext;
import org.apache.doris.datasource.property.constants.HMSProperties;
import org.apache.doris.datasource.property.constants.PaimonProperties;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
@@ -60,7 +61,7 @@ public abstract class PaimonExternalCatalog extends ExternalCatalog {
@Override
protected void initLocalObjectsImpl() {
- Configuration conf = new Configuration();
+ Configuration conf = DFSFileSystem.getHdfsConf(ifNotSetFallbackToSimpleAuth());
for (Map.Entry propEntry : this.catalogProperty.getHadoopProperties().entrySet()) {
conf.set(propEntry.getKey(), propEntry.getValue());
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/RemoteFileSystem.java b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/RemoteFileSystem.java
index 311532794f..68de3a8fde 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/RemoteFileSystem.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/RemoteFileSystem.java
@@ -21,6 +21,7 @@ import org.apache.doris.analysis.StorageBackend;
import org.apache.doris.backup.Status;
import org.apache.doris.common.UserException;
import org.apache.doris.fs.PersistentFileSystem;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import com.google.common.collect.ImmutableSet;
import org.apache.hadoop.fs.FileStatus;
@@ -46,6 +47,10 @@ public abstract class RemoteFileSystem extends PersistentFileSystem {
throw new UserException("Not support to getFileSystem.");
}
+ public boolean ifNotSetFallbackToSimpleAuth() {
+ return properties.getOrDefault(DFSFileSystem.PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "").isEmpty();
+ }
+
@Override
public Status listFiles(String remotePath, boolean recursive, List result) {
try {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/S3FileSystem.java b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/S3FileSystem.java
index 3130a0cea5..525d80d679 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/S3FileSystem.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/S3FileSystem.java
@@ -22,6 +22,7 @@ import org.apache.doris.backup.Status;
import org.apache.doris.common.UserException;
import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.fs.obj.S3ObjStorage;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import com.amazonaws.services.s3.model.AmazonS3Exception;
import com.google.common.annotations.VisibleForTesting;
@@ -60,7 +61,7 @@ public class S3FileSystem extends ObjFileSystem {
if (dfsFileSystem == null) {
synchronized (this) {
if (dfsFileSystem == null) {
- Configuration conf = new Configuration();
+ Configuration conf = DFSFileSystem.getHdfsConf(ifNotSetFallbackToSimpleAuth());
System.setProperty("com.amazonaws.services.s3.enableV4", "true");
// the entry value in properties may be null, and
PropertyConverter.convertToHadoopFSProperties(properties).entrySet().stream()
diff --git a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java
index d608653024..944051e874 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java
@@ -56,6 +56,7 @@ import java.util.Map;
public class DFSFileSystem extends RemoteFileSystem {
+ public static final String PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH = "ipc.client.fallback-to-simple-auth-allowed";
private static final Logger LOG = LogManager.getLogger(DFSFileSystem.class);
private HDFSFileOperations operations = null;
@@ -75,7 +76,7 @@ public class DFSFileSystem extends RemoteFileSystem {
if (dfsFileSystem == null) {
synchronized (this) {
if (dfsFileSystem == null) {
- Configuration conf = new HdfsConfiguration();
+ Configuration conf = getHdfsConf(ifNotSetFallbackToSimpleAuth());
for (Map.Entry propEntry : properties.entrySet()) {
conf.set(propEntry.getKey(), propEntry.getValue());
}
@@ -87,13 +88,22 @@ public class DFSFileSystem extends RemoteFileSystem {
throw new RuntimeException(e);
}
});
- operations = new HDFSFileOperations(dfsFileSystem);
}
}
}
+ operations = new HDFSFileOperations(dfsFileSystem);
return dfsFileSystem;
}
+ public static Configuration getHdfsConf(boolean fallbackToSimpleAuth) {
+ Configuration hdfsConf = new HdfsConfiguration();
+ if (fallbackToSimpleAuth) {
+ // need support fallback to simple if the cluster is a mixture of kerberos and simple auth.
+ hdfsConf.set(PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "true");
+ }
+ return hdfsConf;
+ }
+
@Override
public Status downloadWithFileSize(String remoteFilePath, String localFilePath, long fileSize) {
if (LOG.isDebugEnabled()) {
diff --git a/regression-test/conf/regression-conf.groovy b/regression-test/conf/regression-conf.groovy
index 08b51ce46a..5dde9df043 100644
--- a/regression-test/conf/regression-conf.groovy
+++ b/regression-test/conf/regression-conf.groovy
@@ -215,3 +215,7 @@ max_failure_num=0
s3ExportBucketName = ""
externalEnvIp="127.0.0.1"
+
+enableKerberosTest=false
+kerberosHmsPort=9883
+kerberosHdfsPort=8820
diff --git a/regression-test/data/external_table_p0/kerberos/test_single_hive_kerberos.out b/regression-test/data/external_table_p0/kerberos/test_single_hive_kerberos.out
new file mode 100644
index 0000000000..95640fecb5
--- /dev/null
+++ b/regression-test/data/external_table_p0/kerberos/test_single_hive_kerberos.out
@@ -0,0 +1,6 @@
+-- This file is automatically generated. You should know what you did if you want to edit this
+-- !q01 --
+1 a 3.16 cc0
+2 b 41.2 cc1
+3 c 6.2 cc2
+4 d 1.4 cc3
diff --git a/regression-test/data/external_table_p0/kerberos/test_two_hive_kerberos.out b/regression-test/data/external_table_p0/kerberos/test_two_hive_kerberos.out
new file mode 100644
index 0000000000..9415efd787
--- /dev/null
+++ b/regression-test/data/external_table_p0/kerberos/test_two_hive_kerberos.out
@@ -0,0 +1,12 @@
+-- This file is automatically generated. You should know what you did if you want to edit this
+-- !q01 --
+1 a 3.16 cc0
+2 b 41.2 cc1
+3 c 6.2 cc2
+4 d 1.4 cc3
+
+-- !q02 --
+1 a 3.16 cc0
+2 b 41.2 cc1
+3 c 6.2 cc2
+4 d 1.4 cc3
diff --git a/regression-test/pipeline/external/conf/be.conf b/regression-test/pipeline/external/conf/be.conf
index 78140d80a1..5da70da23c 100644
--- a/regression-test/pipeline/external/conf/be.conf
+++ b/regression-test/pipeline/external/conf/be.conf
@@ -84,3 +84,5 @@ enable_missing_rows_correctness_check=true
#enable_jvm_monitor = true
+KRB5_CONFIG=/keytabs/krb5.conf
+kerberos_krb5_conf_path=/keytabs/krb5.conf
diff --git a/regression-test/pipeline/external/conf/fe.conf b/regression-test/pipeline/external/conf/fe.conf
index 6f0cfb7925..bdbc56564a 100644
--- a/regression-test/pipeline/external/conf/fe.conf
+++ b/regression-test/pipeline/external/conf/fe.conf
@@ -94,3 +94,5 @@ enable_feature_binlog=true
auth_token = 5ff161c3-2c08-4079-b108-26c8850b6598
infodb_support_ext_catalog=true
+
+KRB5_CONFIG=/keytabs/krb5.conf
diff --git a/regression-test/pipeline/external/conf/regression-conf.groovy b/regression-test/pipeline/external/conf/regression-conf.groovy
index 6c57fc9f89..ae4b89bd9f 100644
--- a/regression-test/pipeline/external/conf/regression-conf.groovy
+++ b/regression-test/pipeline/external/conf/regression-conf.groovy
@@ -130,3 +130,7 @@ max_failure_num=50
externalEnvIp="127.0.0.1"
+// kerberos docker config
+enableKerberosTest = true
+kerberosHmsPort=9883
+kerberosHdfsPort=8820
diff --git a/regression-test/suites/external_table_p0/kerberos/test_single_hive_kerberos.groovy b/regression-test/suites/external_table_p0/kerberos/test_single_hive_kerberos.groovy
new file mode 100644
index 0000000000..7a0864923f
--- /dev/null
+++ b/regression-test/suites/external_table_p0/kerberos/test_single_hive_kerberos.groovy
@@ -0,0 +1,101 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_single_hive_kerberos", "p0,external,kerberos,external_docker,external_docker_kerberos") {
+ String enabled = context.config.otherConfigs.get("enableKerberosTest")
+ if (enabled != null && enabled.equalsIgnoreCase("true")) {
+ String hms_catalog_name = "test_single_hive_kerberos"
+ sql """drop catalog if exists hms_kerberos;"""
+ sql """
+ CREATE CATALOG IF NOT EXISTS hms_kerberos
+ PROPERTIES (
+ "type" = "hms",
+ "hive.metastore.uris" = "thrift://172.31.71.25:9083",
+ "fs.defaultFS" = "hdfs://172.31.71.25:8020",
+ "hadoop.security.authentication" = "kerberos",
+ "hadoop.kerberos.principal"="presto-server/presto-master.docker.cluster@LABS.TERADATA.COM",
+ "hadoop.kerberos.keytab" = "/keytabs/presto-server.keytab",
+ "hive.metastore.sasl.enabled " = "true",
+ "hive.metastore.kerberos.principal" = "hive/_HOST@LABS.TERADATA.COM"
+ );
+ """
+ sql """ switch hms_kerberos """
+ sql """ show databases """
+ order_qt_q01 """ select * from hms_kerberos.test_krb_hive_db.test_krb_hive_tbl """
+ sql """drop catalog hms_kerberos;"""
+
+ try {
+ sql """drop catalog if exists hms_kerberos_hadoop_err1;"""
+ sql """
+ CREATE CATALOG IF NOT EXISTS hms_kerberos_hadoop_err1
+ PROPERTIES (
+ "type" = "hms",
+ "hive.metastore.uris" = "thrift://172.31.71.25:9083",
+ "fs.defaultFS" = "hdfs://172.31.71.25:8020",
+ "hadoop.security.authentication" = "kerberos",
+ "hadoop.kerberos.principal"="presto-server/presto-master.docker.cluster@LABS.TERADATA.COM",
+ "hadoop.kerberos.keytab" = "/keytabs/presto-server.keytab"
+ );
+ """
+ sql """ switch hms_kerberos_hadoop_err1 """
+ sql """ show databases """
+ } catch (Exception e) {
+ logger.info(e.toString())
+ // caused by a warning msg if enable sasl on hive but "hive.metastore.sasl.enabled" is not true:
+ // "set_ugi() not successful, Likely cause: new client talking to old server. Continuing without it."
+ assertTrue(e.toString().contains("org.apache.thrift.transport.TTransportException: null"))
+ }
+
+ try {
+ sql """drop catalog if exists hms_kerberos_hadoop_err2;"""
+ sql """
+ CREATE CATALOG IF NOT EXISTS hms_kerberos_hadoop_err2
+ PROPERTIES (
+ "type" = "hms",
+ "hive.metastore.sasl.enabled " = "true",
+ "hive.metastore.uris" = "thrift://172.31.71.25:9083",
+ "fs.defaultFS" = "hdfs://172.31.71.25:8020"
+ );
+ """
+ sql """ switch hms_kerberos_hadoop_err2 """
+ sql """ show databases """
+ } catch (Exception e) {
+ // org.apache.thrift.transport.TTransportException: GSS initiate failed
+ assertTrue(e.toString().contains("Could not connect to meta store using any of the URIs provided. Most recent failure: shade.doris.hive.org.apache.thrift.transport.TTransportException: GSS initiate failed"))
+ }
+
+ // try {
+ // sql """
+ // CREATE CATALOG IF NOT EXISTS hms_keberos_ccache
+ // PROPERTIES (
+ // "type" = "hms",
+ // "hive.metastore.uris" = "thrift://172.31.71.25:9083",
+ // "fs.defaultFS" = "hdfs://172.31.71.25:8020",
+ // "hadoop.security.authentication" = "kerberos",
+ // "hadoop.kerberos.principal"="presto-server/presto-master.docker.cluster@LABS.TERADATA.COM",
+ // "hadoop.kerberos.keytab" = "/keytabs/presto-server.keytab",
+ // "hive.metastore.thrift.impersonation.enabled" = true"
+ // "hive.metastore.client.credential-cache.location" = "hive-presto-master-krbcc"
+ // );
+ // """
+ // sql """ switch hms_keberos_ccache """
+ // sql """ show databases """
+ // } catch (Exception e) {
+ // logger.error(e.message)
+ // }
+ }
+}
diff --git a/regression-test/suites/external_table_p0/kerberos/test_two_hive_kerberos.groovy b/regression-test/suites/external_table_p0/kerberos/test_two_hive_kerberos.groovy
new file mode 100644
index 0000000000..a3b39d1221
--- /dev/null
+++ b/regression-test/suites/external_table_p0/kerberos/test_two_hive_kerberos.groovy
@@ -0,0 +1,72 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_two_hive_kerberos", "p0,external,kerberos,external_docker,external_docker_kerberos") {
+ String enabled = context.config.otherConfigs.get("enableKerberosTest")
+ if (enabled != null && enabled.equalsIgnoreCase("true")) {
+ String hms_catalog_name = "test_two_hive_kerberos"
+ sql """drop catalog if exists ${hms_catalog_name};"""
+ sql """
+ CREATE CATALOG IF NOT EXISTS ${hms_catalog_name}
+ PROPERTIES (
+ "type" = "hms",
+ "hive.metastore.uris" = "thrift://172.31.71.25:9083",
+ "fs.defaultFS" = "hdfs://172.31.71.25:8020",
+ "hadoop.security.authentication" = "kerberos",
+ "hadoop.kerberos.principal"="presto-server/presto-master.docker.cluster@LABS.TERADATA.COM",
+ "hadoop.kerberos.keytab" = "/keytabs/presto-server.keytab",
+ "hive.metastore.sasl.enabled " = "true",
+ "hive.metastore.kerberos.principal" = "hive/_HOST@LABS.TERADATA.COM"
+ );
+ """
+
+ sql """drop catalog if exists other_${hms_catalog_name};"""
+ sql """
+ CREATE CATALOG IF NOT EXISTS other_${hms_catalog_name}
+ PROPERTIES (
+ "type" = "hms",
+ "hive.metastore.uris" = "thrift://172.31.71.26:9083",
+ "fs.defaultFS" = "hdfs://172.31.71.26:8020",
+ "hadoop.security.authentication" = "kerberos",
+ "hadoop.kerberos.principal"="presto-server/presto-master.docker.cluster@OTHERREALM.COM",
+ "hadoop.kerberos.keytab" = "/keytabs/other-presto-server.keytab",
+ "hive.metastore.sasl.enabled " = "true",
+ "hive.metastore.kerberos.principal" = "hive/_HOST@OTHERREALM.COM",
+ "hadoop.security.auth_to_local" ="RULE:[2:\$1@\$0](.*@OTHERREALM.COM)s/@.*//
+ RULE:[2:\$1@\$0](.*@OTHERLABS.TERADATA.COM)s/@.*//
+ DEFAULT"
+ );
+ """
+
+ // 1. catalogA
+ sql """switch ${hms_catalog_name};"""
+ logger.info("switched to catalog " + hms_catalog_name)
+ sql """ show databases """
+ sql """ use test_krb_hive_db """
+ order_qt_q01 """ select * from test_krb_hive_db.test_krb_hive_tbl """
+
+ // 2. catalogB
+ sql """switch other_${hms_catalog_name};"""
+ logger.info("switched to other catalog " + hms_catalog_name)
+ sql """ show databases """
+ sql """ use test_krb_hive_db """
+ order_qt_q02 """ select * from test_krb_hive_db.test_krb_hive_tbl """
+
+ sql """drop catalog ${hms_catalog_name};"""
+ sql """drop catalog other_${hms_catalog_name};"""
+ }
+}