[fix](multi-catalog)fix getting ugi methods and unify them (#30844)

put all ugi login methods to HadoopUGI
This commit is contained in:
slothever
2024-02-19 20:19:36 +08:00
committed by yiguolei
parent d55d796c74
commit 4a33d9820a
14 changed files with 305 additions and 163 deletions

View File

@ -1,60 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.catalog;
/**
* Define different auth type for external table such as hive/iceberg
* so that BE could call secured under fileStorageSystem (enable kerberos)
*/
public enum AuthType {
SIMPLE(0, "simple"),
KERBEROS(1, "kerberos");
private int code;
private String desc;
AuthType(int code, String desc) {
this.code = code;
this.desc = desc;
}
public static boolean isSupportedAuthType(String authType) {
for (AuthType auth : values()) {
if (auth.getDesc().equals(authType)) {
return true;
}
}
return false;
}
public int getCode() {
return code;
}
public void setCode(int code) {
this.code = code;
}
public String getDesc() {
return desc;
}
public void setDesc(String desc) {
this.desc = desc;
}
}

View File

@ -19,6 +19,7 @@ package org.apache.doris.catalog;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.proc.BaseProcResult;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.thrift.THdfsConf;
import org.apache.doris.thrift.THdfsParams;
@ -44,12 +45,6 @@ import java.util.Map;
public class HdfsResource extends Resource {
public static final String HADOOP_FS_PREFIX = "dfs.";
public static String HADOOP_FS_NAME = "fs.defaultFS";
// simple or kerberos
public static String HADOOP_USER_NAME = "hadoop.username";
public static String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
public static String HADOOP_KERBEROS_PRINCIPAL = "hadoop.kerberos.principal";
public static String HADOOP_KERBEROS_AUTHORIZATION = "hadoop.security.authorization";
public static String HADOOP_KERBEROS_KEYTAB = "hadoop.kerberos.keytab";
public static String HADOOP_SHORT_CIRCUIT = "dfs.client.read.shortcircuit";
public static String HADOOP_SOCKET_PATH = "dfs.domain.socket.path";
public static String DSF_NAMESERVICES = "dfs.nameservices";
@ -107,11 +102,11 @@ public class HdfsResource extends Resource {
for (Map.Entry<String, String> property : properties.entrySet()) {
if (property.getKey().equalsIgnoreCase(HADOOP_FS_NAME)) {
tHdfsParams.setFsName(property.getValue());
} else if (property.getKey().equalsIgnoreCase(HADOOP_USER_NAME)) {
} else if (property.getKey().equalsIgnoreCase(AuthenticationConfig.HADOOP_USER_NAME)) {
tHdfsParams.setUser(property.getValue());
} else if (property.getKey().equalsIgnoreCase(HADOOP_KERBEROS_PRINCIPAL)) {
} else if (property.getKey().equalsIgnoreCase(AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL)) {
tHdfsParams.setHdfsKerberosPrincipal(property.getValue());
} else if (property.getKey().equalsIgnoreCase(HADOOP_KERBEROS_KEYTAB)) {
} else if (property.getKey().equalsIgnoreCase(AuthenticationConfig.HADOOP_KERBEROS_KEYTAB)) {
tHdfsParams.setHdfsKerberosKeytab(property.getValue());
} else {
THdfsConf hdfsConf = new THdfsConf();

View File

@ -33,6 +33,8 @@ import org.apache.doris.analysis.StringLiteral;
import org.apache.doris.catalog.external.HMSExternalTable;
import org.apache.doris.common.Config;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.common.security.authentication.HadoopUGI;
import org.apache.doris.datasource.ExternalCatalog;
import org.apache.doris.datasource.property.constants.HMSProperties;
import org.apache.doris.thrift.TExprOpcode;
@ -775,36 +777,22 @@ public class HiveMetaStoreClientHelper {
return hudiSchema;
}
public static UserGroupInformation getUserGroupInformation(Configuration conf) {
UserGroupInformation ugi = null;
String authentication = conf.get(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, null);
if (AuthType.KERBEROS.getDesc().equals(authentication)) {
conf.set("hadoop.security.authorization", "true");
UserGroupInformation.setConfiguration(conf);
String principal = conf.get(HdfsResource.HADOOP_KERBEROS_PRINCIPAL);
String keytab = conf.get(HdfsResource.HADOOP_KERBEROS_KEYTAB);
try {
ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytab);
UserGroupInformation.setLoginUser(ugi);
} catch (IOException e) {
throw new RuntimeException(e);
}
} else {
String hadoopUserName = conf.get(HdfsResource.HADOOP_USER_NAME);
if (hadoopUserName != null) {
ugi = UserGroupInformation.createRemoteUser(hadoopUserName);
}
}
return ugi;
}
public static <T> T ugiDoAs(long catalogId, PrivilegedExceptionAction<T> action) {
return ugiDoAs(((ExternalCatalog) Env.getCurrentEnv().getCatalogMgr().getCatalog(catalogId)).getConfiguration(),
action);
}
public static <T> T ugiDoAs(Configuration conf, PrivilegedExceptionAction<T> action) {
UserGroupInformation ugi = getUserGroupInformation(conf);
AuthenticationConfig krbConfig = AuthenticationConfig.getKerberosConfig(conf,
AuthenticationConfig.HIVE_KERBEROS_PRINCIPAL,
AuthenticationConfig.HIVE_KERBEROS_KEYTAB);
if (!krbConfig.isValid()) {
// if hive config is not ready, then use hadoop kerberos to login
krbConfig = AuthenticationConfig.getKerberosConfig(conf,
AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL,
AuthenticationConfig.HADOOP_KERBEROS_KEYTAB);
}
UserGroupInformation ugi = HadoopUGI.loginWithUGI(krbConfig);
try {
if (ugi != null) {
ugi.checkTGTAndReloginFromKeytab();
@ -813,7 +801,7 @@ public class HiveMetaStoreClientHelper {
return action.run();
}
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e.getCause());
throw new RuntimeException(e.getMessage(), e);
}
}
@ -821,7 +809,7 @@ public class HiveMetaStoreClientHelper {
String hudiBasePath = table.getRemoteTable().getSd().getLocation();
Configuration conf = getConfiguration(table);
UserGroupInformation ugi = getUserGroupInformation(conf);
UserGroupInformation ugi = HadoopUGI.loginWithUGI(AuthenticationConfig.getKerberosConfig(conf));
HoodieTableMetaClient metaClient;
if (ugi != null) {
try {

View File

@ -19,6 +19,8 @@ package org.apache.doris.catalog;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.io.Text;
import org.apache.doris.common.security.authentication.AuthType;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.datasource.property.constants.HMSProperties;
import org.apache.doris.datasource.property.constants.S3Properties;
import org.apache.doris.thrift.THiveTable;
@ -114,40 +116,41 @@ public class HiveTable extends Table {
}
// check auth type
String authType = copiedProps.get(HdfsResource.HADOOP_SECURITY_AUTHENTICATION);
String authType = copiedProps.get(AuthenticationConfig.HADOOP_SECURITY_AUTHENTICATION);
if (Strings.isNullOrEmpty(authType)) {
authType = AuthType.SIMPLE.getDesc();
}
if (!AuthType.isSupportedAuthType(authType)) {
throw new DdlException(String.format(PROPERTY_ERROR_MSG,
HdfsResource.HADOOP_SECURITY_AUTHENTICATION, authType));
AuthenticationConfig.HADOOP_SECURITY_AUTHENTICATION, authType));
}
copiedProps.remove(HdfsResource.HADOOP_SECURITY_AUTHENTICATION);
hiveProperties.put(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, authType);
copiedProps.remove(AuthenticationConfig.HADOOP_SECURITY_AUTHENTICATION);
hiveProperties.put(AuthenticationConfig.HADOOP_SECURITY_AUTHENTICATION, authType);
if (AuthType.KERBEROS.getDesc().equals(authType)) {
// check principal
String principal = copiedProps.get(HdfsResource.HADOOP_KERBEROS_PRINCIPAL);
String principal = copiedProps.get(AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL);
if (Strings.isNullOrEmpty(principal)) {
throw new DdlException(String.format(PROPERTY_MISSING_MSG,
HdfsResource.HADOOP_KERBEROS_PRINCIPAL, HdfsResource.HADOOP_KERBEROS_PRINCIPAL));
AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL,
AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL));
}
hiveProperties.put(HdfsResource.HADOOP_KERBEROS_PRINCIPAL, principal);
copiedProps.remove(HdfsResource.HADOOP_KERBEROS_PRINCIPAL);
hiveProperties.put(AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL, principal);
copiedProps.remove(AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL);
// check keytab
String keytabPath = copiedProps.get(HdfsResource.HADOOP_KERBEROS_KEYTAB);
String keytabPath = copiedProps.get(AuthenticationConfig.HADOOP_KERBEROS_KEYTAB);
if (Strings.isNullOrEmpty(keytabPath)) {
throw new DdlException(String.format(PROPERTY_MISSING_MSG,
HdfsResource.HADOOP_KERBEROS_KEYTAB, HdfsResource.HADOOP_KERBEROS_KEYTAB));
AuthenticationConfig.HADOOP_KERBEROS_KEYTAB, AuthenticationConfig.HADOOP_KERBEROS_KEYTAB));
} else {
hiveProperties.put(HdfsResource.HADOOP_KERBEROS_KEYTAB, keytabPath);
copiedProps.remove(HdfsResource.HADOOP_KERBEROS_KEYTAB);
hiveProperties.put(AuthenticationConfig.HADOOP_KERBEROS_KEYTAB, keytabPath);
copiedProps.remove(AuthenticationConfig.HADOOP_KERBEROS_KEYTAB);
}
}
String hdfsUserName = copiedProps.get(HdfsResource.HADOOP_USER_NAME);
String hdfsUserName = copiedProps.get(AuthenticationConfig.HADOOP_USER_NAME);
if (!Strings.isNullOrEmpty(hdfsUserName)) {
hiveProperties.put(HdfsResource.HADOOP_USER_NAME, hdfsUserName);
copiedProps.remove(HdfsResource.HADOOP_USER_NAME);
hiveProperties.put(AuthenticationConfig.HADOOP_USER_NAME, hdfsUserName);
copiedProps.remove(AuthenticationConfig.HADOOP_USER_NAME);
}
if (!copiedProps.isEmpty()) {
Iterator<Map.Entry<String, String>> iter = copiedProps.entrySet().iterator();

View File

@ -17,7 +17,6 @@
package org.apache.doris.datasource;
import org.apache.doris.catalog.AuthType;
import org.apache.doris.catalog.Env;
import org.apache.doris.catalog.HdfsResource;
import org.apache.doris.catalog.external.ExternalDatabase;
@ -26,6 +25,8 @@ import org.apache.doris.catalog.external.HMSExternalDatabase;
import org.apache.doris.cluster.ClusterNamespace;
import org.apache.doris.common.Config;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.common.security.authentication.HadoopUGI;
import org.apache.doris.datasource.hive.HMSCachedClient;
import org.apache.doris.datasource.hive.HMSCachedClientFactory;
import org.apache.doris.datasource.jdbc.client.JdbcClientConfig;
@ -36,11 +37,9 @@ import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -143,27 +142,13 @@ public class HMSExternalCatalog extends ExternalCatalog {
}
hiveConf.set(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.name(),
String.valueOf(Config.hive_metastore_client_timeout_second));
String authentication = catalogProperty.getOrDefault(
HdfsResource.HADOOP_SECURITY_AUTHENTICATION, "");
if (AuthType.KERBEROS.getDesc().equals(authentication)) {
hiveConf.set(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, authentication);
UserGroupInformation.setConfiguration(hiveConf);
try {
/**
* Because metastore client is created by using
* {@link org.apache.hadoop.hive.metastore.RetryingMetaStoreClient#getProxy}
* it will relogin when TGT is expired, so we don't need to relogin manually.
*/
UserGroupInformation.loginUserFromKeytab(
catalogProperty.getOrDefault(HdfsResource.HADOOP_KERBEROS_PRINCIPAL, ""),
catalogProperty.getOrDefault(HdfsResource.HADOOP_KERBEROS_KEYTAB, ""));
} catch (IOException e) {
throw new HMSClientException("login with kerberos auth failed for catalog %s", e, this.getName());
}
}
HadoopUGI.tryKrbLogin(this.getName(), AuthenticationConfig.getKerberosConfig(hiveConf,
AuthenticationConfig.HIVE_KERBEROS_PRINCIPAL,
AuthenticationConfig.HIVE_KERBEROS_KEYTAB));
client = HMSCachedClientFactory.createCachedClient(hiveConf,
Math.max(MIN_CLIENT_POOL_SIZE, Config.max_external_cache_loader_thread_pool_size),
jdbcClientConfig);
}
client = HMSCachedClientFactory.createCachedClient(hiveConf,
Math.max(MIN_CLIENT_POOL_SIZE, Config.max_external_cache_loader_thread_pool_size), jdbcClientConfig);
}
@Override
@ -248,5 +233,4 @@ public class HMSExternalCatalog extends ExternalCatalog {
protected List<String> listDatabaseNames() {
return client.getAllDatabases();
}
}

View File

@ -22,7 +22,6 @@ import org.apache.doris.backup.Status;
import org.apache.doris.backup.Status.ErrCode;
import org.apache.doris.catalog.Column;
import org.apache.doris.catalog.Env;
import org.apache.doris.catalog.HdfsResource;
import org.apache.doris.catalog.ListPartitionItem;
import org.apache.doris.catalog.PartitionItem;
import org.apache.doris.catalog.PartitionKey;
@ -33,6 +32,7 @@ import org.apache.doris.common.Config;
import org.apache.doris.common.FeConstants;
import org.apache.doris.common.Pair;
import org.apache.doris.common.UserException;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.common.util.CacheBulkLoader;
import org.apache.doris.common.util.LocationPath;
import org.apache.doris.datasource.CacheException;
@ -754,7 +754,7 @@ public class HiveMetaStoreCache {
public List<FileCacheValue> getFilesByTransaction(List<HivePartition> partitions, ValidWriteIdList validWriteIds,
boolean isFullAcid, long tableId, String bindBrokerName) {
List<FileCacheValue> fileCacheValues = Lists.newArrayList();
String remoteUser = jobConf.get(HdfsResource.HADOOP_USER_NAME);
String remoteUser = jobConf.get(AuthenticationConfig.HADOOP_USER_NAME);
try {
for (HivePartition partition : partitions) {
FileCacheValue fileCacheValue = new FileCacheValue();

View File

@ -19,9 +19,9 @@ package org.apache.doris.fs.remote.dfs;
import org.apache.doris.analysis.StorageBackend;
import org.apache.doris.backup.Status;
import org.apache.doris.catalog.AuthType;
import org.apache.doris.catalog.HdfsResource;
import org.apache.doris.common.UserException;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.common.security.authentication.HadoopUGI;
import org.apache.doris.common.util.URI;
import org.apache.doris.fs.operations.HDFSFileOperations;
import org.apache.doris.fs.operations.HDFSOpParams;
@ -82,7 +82,7 @@ public class DFSFileSystem extends RemoteFileSystem {
conf.set(propEntry.getKey(), propEntry.getValue());
}
UserGroupInformation ugi = login(conf);
UserGroupInformation ugi = HadoopUGI.loginWithUGI(AuthenticationConfig.getKerberosConfig(conf));
try {
dfsFileSystem = ugi.doAs((PrivilegedAction<FileSystem>) () -> {
try {
@ -100,58 +100,6 @@ public class DFSFileSystem extends RemoteFileSystem {
return dfsFileSystem;
}
private UserGroupInformation login(Configuration conf) throws UserException {
if (AuthType.KERBEROS.getDesc().equals(
conf.get(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, null))) {
try {
UserGroupInformation ugi = UserGroupInformation.getLoginUser();
String principal = conf.get(HdfsResource.HADOOP_KERBEROS_PRINCIPAL);
LOG.debug("Current login user: {}", ugi.getUserName());
if (ugi.hasKerberosCredentials() && ugi.getUserName().equals(principal)) {
// if the current user is logged by kerberos and is the same user
// just use checkTGTAndReloginFromKeytab because this method will only relogin
// when the TGT is expired or is close to expiry
ugi.checkTGTAndReloginFromKeytab();
return ugi;
}
} catch (IOException e) {
LOG.warn("A SecurityException occurs with kerberos, do login immediately.", e);
return doLogin(conf);
}
}
return doLogin(conf);
}
private UserGroupInformation doLogin(Configuration conf) throws UserException {
if (AuthType.KERBEROS.getDesc().equals(
conf.get(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, null))) {
conf.set(HdfsResource.HADOOP_KERBEROS_AUTHORIZATION, "true");
String principal = conf.get(HdfsResource.HADOOP_KERBEROS_PRINCIPAL);
String keytab = conf.get(HdfsResource.HADOOP_KERBEROS_KEYTAB);
UserGroupInformation.setConfiguration(conf);
try {
UserGroupInformation ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytab);
UserGroupInformation.setLoginUser(ugi);
LOG.info("Login by kerberos authentication with principal: {}", principal);
return ugi;
} catch (IOException e) {
throw new UserException(e);
}
} else {
String hadoopUserName = conf.get(HdfsResource.HADOOP_USER_NAME);
if (hadoopUserName == null) {
hadoopUserName = "hadoop";
LOG.debug(HdfsResource.HADOOP_USER_NAME + " is unset, use default user: hadoop");
}
UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hadoopUserName);
UserGroupInformation.setLoginUser(ugi);
LOG.info("Login by proxy user, hadoop.username: {}", hadoopUserName);
return ugi;
}
}
@Override
public Status downloadWithFileSize(String remoteFilePath, String localFilePath, long fileSize) {
LOG.debug("download from {} to {}, file size: {}.", remoteFilePath, localFilePath, fileSize);