From 4a33d9820ab93ef12c4ad01ce0a598278d18969a Mon Sep 17 00:00:00 2001
From: slothever <18522955+wsjz@users.noreply.github.com>
Date: Mon, 19 Feb 2024 20:19:36 +0800
Subject: [PATCH] [fix](multi-catalog)fix getting ugi methods and unify them
(#30844)
put all ugi login methods to HadoopUGI
---
.../org/apache/doris/hudi/HudiJniScanner.java | 4 +-
.../java/org/apache/doris/hudi/Utils.java | 34 +------
fe/fe-common/pom.xml | 15 +++
.../security/authentication}/AuthType.java | 2 +-
.../authentication/AuthenticationConfig.java | 69 +++++++++++++
.../security/authentication/HadoopUGI.java | 99 +++++++++++++++++++
.../KerberosAuthenticationConfig.java | 34 +++++++
.../SimpleAuthenticationConfig.java | 31 ++++++
.../apache/doris/catalog/HdfsResource.java | 13 +--
.../catalog/HiveMetaStoreClientHelper.java | 40 +++-----
.../org/apache/doris/catalog/HiveTable.java | 33 ++++---
.../doris/datasource/HMSExternalCatalog.java | 32 ++----
.../datasource/hive/HiveMetaStoreCache.java | 4 +-
.../doris/fs/remote/dfs/DFSFileSystem.java | 58 +----------
14 files changed, 305 insertions(+), 163 deletions(-)
rename fe/{fe-core/src/main/java/org/apache/doris/catalog => fe-common/src/main/java/org/apache/doris/common/security/authentication}/AuthType.java (96%)
create mode 100644 fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/AuthenticationConfig.java
create mode 100644 fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java
create mode 100644 fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/KerberosAuthenticationConfig.java
create mode 100644 fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/SimpleAuthenticationConfig.java
diff --git a/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/HudiJniScanner.java b/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/HudiJniScanner.java
index 17ca650675..f22a69255a 100644
--- a/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/HudiJniScanner.java
+++ b/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/HudiJniScanner.java
@@ -21,6 +21,8 @@ package org.apache.doris.hudi;
import org.apache.doris.common.jni.JniScanner;
import org.apache.doris.common.jni.vec.ColumnType;
import org.apache.doris.common.jni.vec.ScanPredicate;
+import org.apache.doris.common.security.authentication.AuthenticationConfig;
+import org.apache.doris.common.security.authentication.HadoopUGI;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.avro.generic.GenericDatumReader;
@@ -138,7 +140,7 @@ public class HudiJniScanner extends JniScanner {
predicates = new ScanPredicate[0];
}
}
- ugi = Utils.getUserGroupInformation(split.hadoopConf());
+ ugi = HadoopUGI.loginWithUGI(AuthenticationConfig.getKerberosConfig(split.hadoopConf()));
} catch (Exception e) {
LOG.error("Failed to initialize hudi scanner, split params:\n" + debugString, e);
throw e;
diff --git a/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/Utils.java b/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/Utils.java
index 4e8d670dac..be5628d2ce 100644
--- a/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/Utils.java
+++ b/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/Utils.java
@@ -17,6 +17,9 @@
package org.apache.doris.hudi;
+import org.apache.doris.common.security.authentication.AuthenticationConfig;
+import org.apache.doris.common.security.authentication.HadoopUGI;
+
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
@@ -36,35 +39,6 @@ import java.util.LinkedList;
import java.util.List;
public class Utils {
- public static class Constants {
- public static String HADOOP_USER_NAME = "hadoop.username";
- public static String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
- public static String HADOOP_KERBEROS_PRINCIPAL = "hadoop.kerberos.principal";
- public static String HADOOP_KERBEROS_KEYTAB = "hadoop.kerberos.keytab";
- }
-
- public static UserGroupInformation getUserGroupInformation(Configuration conf) {
- String authentication = conf.get(Constants.HADOOP_SECURITY_AUTHENTICATION, null);
- if ("kerberos".equals(authentication)) {
- conf.set("hadoop.security.authorization", "true");
- UserGroupInformation.setConfiguration(conf);
- String principal = conf.get(Constants.HADOOP_KERBEROS_PRINCIPAL);
- String keytab = conf.get(Constants.HADOOP_KERBEROS_KEYTAB);
- try {
- UserGroupInformation ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytab);
- UserGroupInformation.setLoginUser(ugi);
- return ugi;
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- } else {
- String hadoopUserName = conf.get(Constants.HADOOP_USER_NAME);
- if (hadoopUserName != null) {
- return UserGroupInformation.createRemoteUser(hadoopUserName);
- }
- }
- return null;
- }
public static long getCurrentProcId() {
try {
@@ -114,7 +88,7 @@ public class Utils {
}
public static HoodieTableMetaClient getMetaClient(Configuration conf, String basePath) {
- UserGroupInformation ugi = getUserGroupInformation(conf);
+ UserGroupInformation ugi = HadoopUGI.loginWithUGI(AuthenticationConfig.getKerberosConfig(conf));
HoodieTableMetaClient metaClient;
if (ugi != null) {
try {
diff --git a/fe/fe-common/pom.xml b/fe/fe-common/pom.xml
index 6192ffd202..c4b1b29b35 100644
--- a/fe/fe-common/pom.xml
+++ b/fe/fe-common/pom.xml
@@ -93,6 +93,21 @@ under the License.
org.projectlombok
lombok
+
+ org.apache.hadoop
+ hadoop-common
+
+
+ commons-collections
+ commons-collections
+
+
+ org.apache.commons
+ commons-compress
+
+
+ provided
+
doris-fe-common
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/AuthType.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/AuthType.java
similarity index 96%
rename from fe/fe-core/src/main/java/org/apache/doris/catalog/AuthType.java
rename to fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/AuthType.java
index 25097bb24e..6cf3358fe7 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/AuthType.java
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/AuthType.java
@@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
-package org.apache.doris.catalog;
+package org.apache.doris.common.security.authentication;
/**
* Define different auth type for external table such as hive/iceberg
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/AuthenticationConfig.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/AuthenticationConfig.java
new file mode 100644
index 0000000000..b3cb69f700
--- /dev/null
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/AuthenticationConfig.java
@@ -0,0 +1,69 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.common.security.authentication;
+
+import lombok.Data;
+import org.apache.hadoop.conf.Configuration;
+
+@Data
+public abstract class AuthenticationConfig {
+
+ public static String HADOOP_USER_NAME = "hadoop.username";
+ public static String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
+ public static String HADOOP_KERBEROS_PRINCIPAL = "hadoop.kerberos.principal";
+ public static String HADOOP_KERBEROS_AUTHORIZATION = "hadoop.security.authorization";
+ public static String HADOOP_KERBEROS_KEYTAB = "hadoop.kerberos.keytab";
+ public static String HIVE_KERBEROS_PRINCIPAL = "hive.metastore.kerberos.principal";
+ public static String HIVE_KERBEROS_KEYTAB = "hive.metastore.kerberos.keytab.file";
+
+ private boolean isValid;
+
+ /**
+ * get kerberos config from hadoop conf
+ * @param conf config
+ * @return ugi
+ */
+ public static AuthenticationConfig getKerberosConfig(Configuration conf) {
+ return AuthenticationConfig.getKerberosConfig(conf, HADOOP_KERBEROS_PRINCIPAL, HADOOP_KERBEROS_KEYTAB);
+ }
+
+ /**
+ * get kerberos config from hadoop conf
+ * @param conf config
+ * @param krbPrincipalKey principal key
+ * @param krbKeytabKey keytab key
+ * @return ugi
+ */
+ public static AuthenticationConfig getKerberosConfig(Configuration conf,
+ String krbPrincipalKey,
+ String krbKeytabKey) {
+ String authentication = conf.get(HADOOP_SECURITY_AUTHENTICATION, null);
+ if (AuthType.KERBEROS.getDesc().equals(authentication)) {
+ KerberosAuthenticationConfig krbConfig = new KerberosAuthenticationConfig();
+ krbConfig.setKerberosPrincipal(conf.get(krbPrincipalKey));
+ krbConfig.setKerberosKeytab(conf.get(krbKeytabKey));
+ krbConfig.setConf(conf);
+ return krbConfig;
+ } else {
+ // AuthType.SIMPLE
+ SimpleAuthenticationConfig simpleAuthenticationConfig = new SimpleAuthenticationConfig();
+ simpleAuthenticationConfig.setUsername(conf.get(HADOOP_USER_NAME));
+ return simpleAuthenticationConfig;
+ }
+ }
+}
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java
new file mode 100644
index 0000000000..5fb8f4fdab
--- /dev/null
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java
@@ -0,0 +1,99 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.common.security.authentication;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.io.IOException;
+
+public class HadoopUGI {
+ private static final Logger LOG = LogManager.getLogger(HadoopUGI.class);
+
+ /**
+ * login and return hadoop ugi
+ * @param config auth config
+ * @return ugi
+ */
+ public static UserGroupInformation loginWithUGI(AuthenticationConfig config) {
+ UserGroupInformation ugi;
+ if (config instanceof KerberosAuthenticationConfig) {
+ KerberosAuthenticationConfig krbConfig = (KerberosAuthenticationConfig) config;
+ Configuration hadoopConf = krbConfig.getConf();
+ hadoopConf.set(AuthenticationConfig.HADOOP_KERBEROS_AUTHORIZATION, "true");
+ UserGroupInformation.setConfiguration(hadoopConf);
+ String principal = krbConfig.getKerberosPrincipal();
+ try {
+ // login hadoop with keytab and try checking TGT
+ ugi = UserGroupInformation.getLoginUser();
+ LOG.debug("Current login user: {}", ugi.getUserName());
+ if (ugi.hasKerberosCredentials() && StringUtils.equals(ugi.getUserName(), principal)) {
+ // if the current user is logged by kerberos and is the same user
+ // just use checkTGTAndReloginFromKeytab because this method will only relogin
+ // when the TGT is expired or is close to expiry
+ ugi.checkTGTAndReloginFromKeytab();
+ return ugi;
+ }
+ } catch (IOException e) {
+ LOG.warn("A SecurityException occurs with kerberos, do login immediately.", e);
+ }
+ try {
+ ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, krbConfig.getKerberosKeytab());
+ UserGroupInformation.setLoginUser(ugi);
+ LOG.debug("Login by kerberos authentication with principal: {}", principal);
+ return ugi;
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ } else {
+ String hadoopUserName = ((SimpleAuthenticationConfig) config).getUsername();
+ if (hadoopUserName == null) {
+ hadoopUserName = "hadoop";
+ LOG.debug(AuthenticationConfig.HADOOP_USER_NAME + " is unset, use default user: hadoop");
+ }
+ ugi = UserGroupInformation.createRemoteUser(hadoopUserName);
+ UserGroupInformation.setLoginUser(ugi);
+ LOG.debug("Login by proxy user, hadoop.username: {}", hadoopUserName);
+ return ugi;
+ }
+ }
+
+ /**
+ * use for HMSExternalCatalog to login
+ * @param config auth config
+ */
+ public static void tryKrbLogin(String catalogName, AuthenticationConfig config) {
+ if (config instanceof KerberosAuthenticationConfig) {
+ KerberosAuthenticationConfig krbConfig = (KerberosAuthenticationConfig) config;
+ try {
+ /**
+ * Because metastore client is created by using
+ * {@link org.apache.hadoop.hive.metastore.RetryingMetaStoreClient#getProxy}
+ * it will relogin when TGT is expired, so we don't need to relogin manually.
+ */
+ UserGroupInformation.loginUserFromKeytab(krbConfig.getKerberosPrincipal(),
+ krbConfig.getKerberosKeytab());
+ } catch (IOException e) {
+ throw new RuntimeException("login with kerberos auth failed for catalog: " + catalogName, e);
+ }
+ }
+ }
+}
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/KerberosAuthenticationConfig.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/KerberosAuthenticationConfig.java
new file mode 100644
index 0000000000..722cd0352b
--- /dev/null
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/KerberosAuthenticationConfig.java
@@ -0,0 +1,34 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.common.security.authentication;
+
+import lombok.Data;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+
+@Data
+public class KerberosAuthenticationConfig extends AuthenticationConfig {
+ private String kerberosPrincipal;
+ private String kerberosKeytab;
+ private Configuration conf;
+
+ @Override
+ public boolean isValid() {
+ return StringUtils.isNotEmpty(kerberosPrincipal) && StringUtils.isNotEmpty(kerberosKeytab);
+ }
+}
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/SimpleAuthenticationConfig.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/SimpleAuthenticationConfig.java
new file mode 100644
index 0000000000..57bb788737
--- /dev/null
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/SimpleAuthenticationConfig.java
@@ -0,0 +1,31 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.common.security.authentication;
+
+import lombok.Data;
+import org.apache.commons.lang3.StringUtils;
+
+@Data
+public class SimpleAuthenticationConfig extends AuthenticationConfig {
+ private String username;
+
+ @Override
+ public boolean isValid() {
+ return StringUtils.isNotEmpty(username);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/HdfsResource.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/HdfsResource.java
index d2a03aaf90..8ebd66c709 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/HdfsResource.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/HdfsResource.java
@@ -19,6 +19,7 @@ package org.apache.doris.catalog;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.proc.BaseProcResult;
+import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.thrift.THdfsConf;
import org.apache.doris.thrift.THdfsParams;
@@ -44,12 +45,6 @@ import java.util.Map;
public class HdfsResource extends Resource {
public static final String HADOOP_FS_PREFIX = "dfs.";
public static String HADOOP_FS_NAME = "fs.defaultFS";
- // simple or kerberos
- public static String HADOOP_USER_NAME = "hadoop.username";
- public static String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
- public static String HADOOP_KERBEROS_PRINCIPAL = "hadoop.kerberos.principal";
- public static String HADOOP_KERBEROS_AUTHORIZATION = "hadoop.security.authorization";
- public static String HADOOP_KERBEROS_KEYTAB = "hadoop.kerberos.keytab";
public static String HADOOP_SHORT_CIRCUIT = "dfs.client.read.shortcircuit";
public static String HADOOP_SOCKET_PATH = "dfs.domain.socket.path";
public static String DSF_NAMESERVICES = "dfs.nameservices";
@@ -107,11 +102,11 @@ public class HdfsResource extends Resource {
for (Map.Entry property : properties.entrySet()) {
if (property.getKey().equalsIgnoreCase(HADOOP_FS_NAME)) {
tHdfsParams.setFsName(property.getValue());
- } else if (property.getKey().equalsIgnoreCase(HADOOP_USER_NAME)) {
+ } else if (property.getKey().equalsIgnoreCase(AuthenticationConfig.HADOOP_USER_NAME)) {
tHdfsParams.setUser(property.getValue());
- } else if (property.getKey().equalsIgnoreCase(HADOOP_KERBEROS_PRINCIPAL)) {
+ } else if (property.getKey().equalsIgnoreCase(AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL)) {
tHdfsParams.setHdfsKerberosPrincipal(property.getValue());
- } else if (property.getKey().equalsIgnoreCase(HADOOP_KERBEROS_KEYTAB)) {
+ } else if (property.getKey().equalsIgnoreCase(AuthenticationConfig.HADOOP_KERBEROS_KEYTAB)) {
tHdfsParams.setHdfsKerberosKeytab(property.getValue());
} else {
THdfsConf hdfsConf = new THdfsConf();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/HiveMetaStoreClientHelper.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/HiveMetaStoreClientHelper.java
index 6273b78b66..f8ddcb7f0a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/HiveMetaStoreClientHelper.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/HiveMetaStoreClientHelper.java
@@ -33,6 +33,8 @@ import org.apache.doris.analysis.StringLiteral;
import org.apache.doris.catalog.external.HMSExternalTable;
import org.apache.doris.common.Config;
import org.apache.doris.common.DdlException;
+import org.apache.doris.common.security.authentication.AuthenticationConfig;
+import org.apache.doris.common.security.authentication.HadoopUGI;
import org.apache.doris.datasource.ExternalCatalog;
import org.apache.doris.datasource.property.constants.HMSProperties;
import org.apache.doris.thrift.TExprOpcode;
@@ -775,36 +777,22 @@ public class HiveMetaStoreClientHelper {
return hudiSchema;
}
- public static UserGroupInformation getUserGroupInformation(Configuration conf) {
- UserGroupInformation ugi = null;
- String authentication = conf.get(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, null);
- if (AuthType.KERBEROS.getDesc().equals(authentication)) {
- conf.set("hadoop.security.authorization", "true");
- UserGroupInformation.setConfiguration(conf);
- String principal = conf.get(HdfsResource.HADOOP_KERBEROS_PRINCIPAL);
- String keytab = conf.get(HdfsResource.HADOOP_KERBEROS_KEYTAB);
- try {
- ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytab);
- UserGroupInformation.setLoginUser(ugi);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- } else {
- String hadoopUserName = conf.get(HdfsResource.HADOOP_USER_NAME);
- if (hadoopUserName != null) {
- ugi = UserGroupInformation.createRemoteUser(hadoopUserName);
- }
- }
- return ugi;
- }
-
public static T ugiDoAs(long catalogId, PrivilegedExceptionAction action) {
return ugiDoAs(((ExternalCatalog) Env.getCurrentEnv().getCatalogMgr().getCatalog(catalogId)).getConfiguration(),
action);
}
public static T ugiDoAs(Configuration conf, PrivilegedExceptionAction action) {
- UserGroupInformation ugi = getUserGroupInformation(conf);
+ AuthenticationConfig krbConfig = AuthenticationConfig.getKerberosConfig(conf,
+ AuthenticationConfig.HIVE_KERBEROS_PRINCIPAL,
+ AuthenticationConfig.HIVE_KERBEROS_KEYTAB);
+ if (!krbConfig.isValid()) {
+ // if hive config is not ready, then use hadoop kerberos to login
+ krbConfig = AuthenticationConfig.getKerberosConfig(conf,
+ AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL,
+ AuthenticationConfig.HADOOP_KERBEROS_KEYTAB);
+ }
+ UserGroupInformation ugi = HadoopUGI.loginWithUGI(krbConfig);
try {
if (ugi != null) {
ugi.checkTGTAndReloginFromKeytab();
@@ -813,7 +801,7 @@ public class HiveMetaStoreClientHelper {
return action.run();
}
} catch (Exception e) {
- throw new RuntimeException(e.getMessage(), e.getCause());
+ throw new RuntimeException(e.getMessage(), e);
}
}
@@ -821,7 +809,7 @@ public class HiveMetaStoreClientHelper {
String hudiBasePath = table.getRemoteTable().getSd().getLocation();
Configuration conf = getConfiguration(table);
- UserGroupInformation ugi = getUserGroupInformation(conf);
+ UserGroupInformation ugi = HadoopUGI.loginWithUGI(AuthenticationConfig.getKerberosConfig(conf));
HoodieTableMetaClient metaClient;
if (ugi != null) {
try {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/HiveTable.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/HiveTable.java
index 550be96f74..385b79d493 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/HiveTable.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/HiveTable.java
@@ -19,6 +19,8 @@ package org.apache.doris.catalog;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.io.Text;
+import org.apache.doris.common.security.authentication.AuthType;
+import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.datasource.property.constants.HMSProperties;
import org.apache.doris.datasource.property.constants.S3Properties;
import org.apache.doris.thrift.THiveTable;
@@ -114,40 +116,41 @@ public class HiveTable extends Table {
}
// check auth type
- String authType = copiedProps.get(HdfsResource.HADOOP_SECURITY_AUTHENTICATION);
+ String authType = copiedProps.get(AuthenticationConfig.HADOOP_SECURITY_AUTHENTICATION);
if (Strings.isNullOrEmpty(authType)) {
authType = AuthType.SIMPLE.getDesc();
}
if (!AuthType.isSupportedAuthType(authType)) {
throw new DdlException(String.format(PROPERTY_ERROR_MSG,
- HdfsResource.HADOOP_SECURITY_AUTHENTICATION, authType));
+ AuthenticationConfig.HADOOP_SECURITY_AUTHENTICATION, authType));
}
- copiedProps.remove(HdfsResource.HADOOP_SECURITY_AUTHENTICATION);
- hiveProperties.put(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, authType);
+ copiedProps.remove(AuthenticationConfig.HADOOP_SECURITY_AUTHENTICATION);
+ hiveProperties.put(AuthenticationConfig.HADOOP_SECURITY_AUTHENTICATION, authType);
if (AuthType.KERBEROS.getDesc().equals(authType)) {
// check principal
- String principal = copiedProps.get(HdfsResource.HADOOP_KERBEROS_PRINCIPAL);
+ String principal = copiedProps.get(AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL);
if (Strings.isNullOrEmpty(principal)) {
throw new DdlException(String.format(PROPERTY_MISSING_MSG,
- HdfsResource.HADOOP_KERBEROS_PRINCIPAL, HdfsResource.HADOOP_KERBEROS_PRINCIPAL));
+ AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL,
+ AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL));
}
- hiveProperties.put(HdfsResource.HADOOP_KERBEROS_PRINCIPAL, principal);
- copiedProps.remove(HdfsResource.HADOOP_KERBEROS_PRINCIPAL);
+ hiveProperties.put(AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL, principal);
+ copiedProps.remove(AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL);
// check keytab
- String keytabPath = copiedProps.get(HdfsResource.HADOOP_KERBEROS_KEYTAB);
+ String keytabPath = copiedProps.get(AuthenticationConfig.HADOOP_KERBEROS_KEYTAB);
if (Strings.isNullOrEmpty(keytabPath)) {
throw new DdlException(String.format(PROPERTY_MISSING_MSG,
- HdfsResource.HADOOP_KERBEROS_KEYTAB, HdfsResource.HADOOP_KERBEROS_KEYTAB));
+ AuthenticationConfig.HADOOP_KERBEROS_KEYTAB, AuthenticationConfig.HADOOP_KERBEROS_KEYTAB));
} else {
- hiveProperties.put(HdfsResource.HADOOP_KERBEROS_KEYTAB, keytabPath);
- copiedProps.remove(HdfsResource.HADOOP_KERBEROS_KEYTAB);
+ hiveProperties.put(AuthenticationConfig.HADOOP_KERBEROS_KEYTAB, keytabPath);
+ copiedProps.remove(AuthenticationConfig.HADOOP_KERBEROS_KEYTAB);
}
}
- String hdfsUserName = copiedProps.get(HdfsResource.HADOOP_USER_NAME);
+ String hdfsUserName = copiedProps.get(AuthenticationConfig.HADOOP_USER_NAME);
if (!Strings.isNullOrEmpty(hdfsUserName)) {
- hiveProperties.put(HdfsResource.HADOOP_USER_NAME, hdfsUserName);
- copiedProps.remove(HdfsResource.HADOOP_USER_NAME);
+ hiveProperties.put(AuthenticationConfig.HADOOP_USER_NAME, hdfsUserName);
+ copiedProps.remove(AuthenticationConfig.HADOOP_USER_NAME);
}
if (!copiedProps.isEmpty()) {
Iterator> iter = copiedProps.entrySet().iterator();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/HMSExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/HMSExternalCatalog.java
index 4f385fad1e..92a15badef 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/HMSExternalCatalog.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/HMSExternalCatalog.java
@@ -17,7 +17,6 @@
package org.apache.doris.datasource;
-import org.apache.doris.catalog.AuthType;
import org.apache.doris.catalog.Env;
import org.apache.doris.catalog.HdfsResource;
import org.apache.doris.catalog.external.ExternalDatabase;
@@ -26,6 +25,8 @@ import org.apache.doris.catalog.external.HMSExternalDatabase;
import org.apache.doris.cluster.ClusterNamespace;
import org.apache.doris.common.Config;
import org.apache.doris.common.DdlException;
+import org.apache.doris.common.security.authentication.AuthenticationConfig;
+import org.apache.doris.common.security.authentication.HadoopUGI;
import org.apache.doris.datasource.hive.HMSCachedClient;
import org.apache.doris.datasource.hive.HMSCachedClientFactory;
import org.apache.doris.datasource.jdbc.client.JdbcClientConfig;
@@ -36,11 +37,9 @@ import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.security.UserGroupInformation;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
-import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@@ -143,27 +142,13 @@ public class HMSExternalCatalog extends ExternalCatalog {
}
hiveConf.set(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.name(),
String.valueOf(Config.hive_metastore_client_timeout_second));
- String authentication = catalogProperty.getOrDefault(
- HdfsResource.HADOOP_SECURITY_AUTHENTICATION, "");
- if (AuthType.KERBEROS.getDesc().equals(authentication)) {
- hiveConf.set(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, authentication);
- UserGroupInformation.setConfiguration(hiveConf);
- try {
- /**
- * Because metastore client is created by using
- * {@link org.apache.hadoop.hive.metastore.RetryingMetaStoreClient#getProxy}
- * it will relogin when TGT is expired, so we don't need to relogin manually.
- */
- UserGroupInformation.loginUserFromKeytab(
- catalogProperty.getOrDefault(HdfsResource.HADOOP_KERBEROS_PRINCIPAL, ""),
- catalogProperty.getOrDefault(HdfsResource.HADOOP_KERBEROS_KEYTAB, ""));
- } catch (IOException e) {
- throw new HMSClientException("login with kerberos auth failed for catalog %s", e, this.getName());
- }
- }
+ HadoopUGI.tryKrbLogin(this.getName(), AuthenticationConfig.getKerberosConfig(hiveConf,
+ AuthenticationConfig.HIVE_KERBEROS_PRINCIPAL,
+ AuthenticationConfig.HIVE_KERBEROS_KEYTAB));
+ client = HMSCachedClientFactory.createCachedClient(hiveConf,
+ Math.max(MIN_CLIENT_POOL_SIZE, Config.max_external_cache_loader_thread_pool_size),
+ jdbcClientConfig);
}
- client = HMSCachedClientFactory.createCachedClient(hiveConf,
- Math.max(MIN_CLIENT_POOL_SIZE, Config.max_external_cache_loader_thread_pool_size), jdbcClientConfig);
}
@Override
@@ -248,5 +233,4 @@ public class HMSExternalCatalog extends ExternalCatalog {
protected List listDatabaseNames() {
return client.getAllDatabases();
}
-
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
index b65594060a..c7bf28cda9 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
@@ -22,7 +22,6 @@ import org.apache.doris.backup.Status;
import org.apache.doris.backup.Status.ErrCode;
import org.apache.doris.catalog.Column;
import org.apache.doris.catalog.Env;
-import org.apache.doris.catalog.HdfsResource;
import org.apache.doris.catalog.ListPartitionItem;
import org.apache.doris.catalog.PartitionItem;
import org.apache.doris.catalog.PartitionKey;
@@ -33,6 +32,7 @@ import org.apache.doris.common.Config;
import org.apache.doris.common.FeConstants;
import org.apache.doris.common.Pair;
import org.apache.doris.common.UserException;
+import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.common.util.CacheBulkLoader;
import org.apache.doris.common.util.LocationPath;
import org.apache.doris.datasource.CacheException;
@@ -754,7 +754,7 @@ public class HiveMetaStoreCache {
public List getFilesByTransaction(List partitions, ValidWriteIdList validWriteIds,
boolean isFullAcid, long tableId, String bindBrokerName) {
List fileCacheValues = Lists.newArrayList();
- String remoteUser = jobConf.get(HdfsResource.HADOOP_USER_NAME);
+ String remoteUser = jobConf.get(AuthenticationConfig.HADOOP_USER_NAME);
try {
for (HivePartition partition : partitions) {
FileCacheValue fileCacheValue = new FileCacheValue();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java
index 79c83b8089..4d21ebbdf9 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java
@@ -19,9 +19,9 @@ package org.apache.doris.fs.remote.dfs;
import org.apache.doris.analysis.StorageBackend;
import org.apache.doris.backup.Status;
-import org.apache.doris.catalog.AuthType;
-import org.apache.doris.catalog.HdfsResource;
import org.apache.doris.common.UserException;
+import org.apache.doris.common.security.authentication.AuthenticationConfig;
+import org.apache.doris.common.security.authentication.HadoopUGI;
import org.apache.doris.common.util.URI;
import org.apache.doris.fs.operations.HDFSFileOperations;
import org.apache.doris.fs.operations.HDFSOpParams;
@@ -82,7 +82,7 @@ public class DFSFileSystem extends RemoteFileSystem {
conf.set(propEntry.getKey(), propEntry.getValue());
}
- UserGroupInformation ugi = login(conf);
+ UserGroupInformation ugi = HadoopUGI.loginWithUGI(AuthenticationConfig.getKerberosConfig(conf));
try {
dfsFileSystem = ugi.doAs((PrivilegedAction) () -> {
try {
@@ -100,58 +100,6 @@ public class DFSFileSystem extends RemoteFileSystem {
return dfsFileSystem;
}
- private UserGroupInformation login(Configuration conf) throws UserException {
- if (AuthType.KERBEROS.getDesc().equals(
- conf.get(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, null))) {
- try {
- UserGroupInformation ugi = UserGroupInformation.getLoginUser();
- String principal = conf.get(HdfsResource.HADOOP_KERBEROS_PRINCIPAL);
- LOG.debug("Current login user: {}", ugi.getUserName());
- if (ugi.hasKerberosCredentials() && ugi.getUserName().equals(principal)) {
- // if the current user is logged by kerberos and is the same user
- // just use checkTGTAndReloginFromKeytab because this method will only relogin
- // when the TGT is expired or is close to expiry
- ugi.checkTGTAndReloginFromKeytab();
- return ugi;
- }
- } catch (IOException e) {
- LOG.warn("A SecurityException occurs with kerberos, do login immediately.", e);
- return doLogin(conf);
- }
- }
-
- return doLogin(conf);
- }
-
- private UserGroupInformation doLogin(Configuration conf) throws UserException {
- if (AuthType.KERBEROS.getDesc().equals(
- conf.get(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, null))) {
- conf.set(HdfsResource.HADOOP_KERBEROS_AUTHORIZATION, "true");
- String principal = conf.get(HdfsResource.HADOOP_KERBEROS_PRINCIPAL);
- String keytab = conf.get(HdfsResource.HADOOP_KERBEROS_KEYTAB);
-
- UserGroupInformation.setConfiguration(conf);
- try {
- UserGroupInformation ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytab);
- UserGroupInformation.setLoginUser(ugi);
- LOG.info("Login by kerberos authentication with principal: {}", principal);
- return ugi;
- } catch (IOException e) {
- throw new UserException(e);
- }
- } else {
- String hadoopUserName = conf.get(HdfsResource.HADOOP_USER_NAME);
- if (hadoopUserName == null) {
- hadoopUserName = "hadoop";
- LOG.debug(HdfsResource.HADOOP_USER_NAME + " is unset, use default user: hadoop");
- }
- UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hadoopUserName);
- UserGroupInformation.setLoginUser(ugi);
- LOG.info("Login by proxy user, hadoop.username: {}", hadoopUserName);
- return ugi;
- }
- }
-
@Override
public Status downloadWithFileSize(String remoteFilePath, String localFilePath, long fileSize) {
LOG.debug("download from {} to {}, file size: {}.", remoteFilePath, localFilePath, fileSize);