[regression](kerberos)add hive kerberos docker regression env (#37657)

## Proposed changes
pick:
[regression](kerberos)fix regression pipeline env when write hosts 
(#37057)
[regression](kerberos)add hive kerberos docker regression env (#36430)
This commit is contained in:
slothever
2024-07-15 09:35:39 +08:00
committed by GitHub
parent 8f39143c14
commit 16de141743
32 changed files with 744 additions and 19 deletions

View File

@ -51,6 +51,7 @@ import org.apache.doris.datasource.operations.ExternalMetadataOps;
import org.apache.doris.datasource.paimon.PaimonExternalDatabase;
import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.datasource.test.TestExternalDatabase;
import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import org.apache.doris.persist.gson.GsonPostProcessable;
import org.apache.doris.persist.gson.GsonUtils;
import org.apache.doris.qe.ConnectContext;
@ -67,7 +68,6 @@ import lombok.Data;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull;
@ -148,7 +148,7 @@ public abstract class ExternalCatalog
}
public Configuration getConfiguration() {
Configuration conf = new HdfsConfiguration();
Configuration conf = DFSFileSystem.getHdfsConf(ifNotSetFallbackToSimpleAuth());
Map<String, String> catalogProperties = catalogProperty.getHadoopProperties();
for (Map.Entry<String, String> entry : catalogProperties.entrySet()) {
conf.set(entry.getKey(), entry.getValue());
@ -181,6 +181,11 @@ public abstract class ExternalCatalog
Boolean.valueOf(catalogProperty.getOrDefault(USE_META_CACHE, String.valueOf(DEFAULT_USE_META_CACHE))));
}
// we need check auth fallback for kerberos or simple
public boolean ifNotSetFallbackToSimpleAuth() {
return catalogProperty.getOrDefault(DFSFileSystem.PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "").isEmpty();
}
// Will be called when creating catalog(not replaying).
// Subclass can override this method to do some check when creating catalog.
public void checkWhenCreating() throws DdlException {

View File

@ -37,6 +37,7 @@ import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.datasource.property.constants.HMSProperties;
import org.apache.doris.fs.FileSystemProvider;
import org.apache.doris.fs.FileSystemProviderImpl;
import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import org.apache.doris.transaction.TransactionManagerFactory;
import com.google.common.base.Strings;
@ -59,7 +60,6 @@ public class HMSExternalCatalog extends ExternalCatalog {
public static final String FILE_META_CACHE_TTL_SECOND = "file.meta.cache.ttl-second";
// broker name for file split and query scan.
public static final String BIND_BROKER_NAME = "broker.name";
private static final String PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH = "ipc.client.fallback-to-simple-auth-allowed";
// -1 means file cache no ttl set
public static final int FILE_META_CACHE_NO_TTL = -1;
@ -244,9 +244,9 @@ public class HMSExternalCatalog extends ExternalCatalog {
@Override
public void setDefaultPropsIfMissing(boolean isReplay) {
super.setDefaultPropsIfMissing(isReplay);
if (catalogProperty.getOrDefault(PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "").isEmpty()) {
if (ifNotSetFallbackToSimpleAuth()) {
// always allow fallback to simple auth, so to support both kerberos and simple auth
catalogProperty.addProperty(PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "true");
catalogProperty.addProperty(DFSFileSystem.PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "true");
}
}

View File

@ -41,6 +41,7 @@ import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.fs.FileSystemCache;
import org.apache.doris.fs.remote.RemoteFile;
import org.apache.doris.fs.remote.RemoteFileSystem;
import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import org.apache.doris.metric.GaugeMetric;
import org.apache.doris.metric.Metric;
import org.apache.doris.metric.MetricLabel;
@ -66,7 +67,6 @@ import org.apache.commons.lang3.math.NumberUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hive.common.ValidWriteIdList;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
@ -433,7 +433,7 @@ public class HiveMetaStoreCache {
}
private synchronized void setJobConf() {
Configuration configuration = new HdfsConfiguration();
Configuration configuration = DFSFileSystem.getHdfsConf(catalog.ifNotSetFallbackToSimpleAuth());
for (Map.Entry<String, String> entry : catalog.getCatalogProperty().getHadoopProperties().entrySet()) {
configuration.set(entry.getKey(), entry.getValue());
}

View File

@ -42,13 +42,13 @@ import org.apache.doris.common.DdlException;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.common.security.authentication.HadoopUGI;
import org.apache.doris.datasource.ExternalCatalog;
import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import org.apache.doris.thrift.TExprOpcode;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import org.apache.avro.Schema;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
@ -843,7 +843,7 @@ public class HiveMetaStoreClientHelper {
}
public static Configuration getConfiguration(HMSExternalTable table) {
Configuration conf = new HdfsConfiguration();
Configuration conf = DFSFileSystem.getHdfsConf(table.getCatalog().ifNotSetFallbackToSimpleAuth());
for (Map.Entry<String, String> entry : table.getHadoopProperties().entrySet()) {
conf.set(entry.getKey(), entry.getValue());
}

View File

@ -25,13 +25,13 @@ import org.apache.doris.datasource.CatalogIf;
import org.apache.doris.datasource.hive.HMSExternalCatalog;
import org.apache.doris.datasource.hive.HiveMetaStoreClientHelper;
import org.apache.doris.datasource.property.constants.HMSProperties;
import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import org.apache.doris.thrift.TIcebergMetadataParams;
import com.github.benmanes.caffeine.cache.LoadingCache;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.iceberg.ManifestFiles;
import org.apache.iceberg.SerializableTable;
import org.apache.iceberg.Snapshot;
@ -177,7 +177,8 @@ public class IcebergMetadataCache {
private Catalog createIcebergHiveCatalog(String uri, Map<String, String> hdfsConf, Map<String, String> props) {
// set hdfs configure
Configuration conf = new HdfsConfiguration();
Configuration conf = DFSFileSystem.getHdfsConf(
hdfsConf.getOrDefault(DFSFileSystem.PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "").isEmpty());
for (Map.Entry<String, String> entry : hdfsConf.entrySet()) {
conf.set(entry.getKey(), entry.getValue());
}

View File

@ -25,6 +25,7 @@ import org.apache.doris.datasource.InitCatalogLog;
import org.apache.doris.datasource.SessionContext;
import org.apache.doris.datasource.property.constants.HMSProperties;
import org.apache.doris.datasource.property.constants.PaimonProperties;
import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
@ -60,7 +61,7 @@ public abstract class PaimonExternalCatalog extends ExternalCatalog {
@Override
protected void initLocalObjectsImpl() {
Configuration conf = new Configuration();
Configuration conf = DFSFileSystem.getHdfsConf(ifNotSetFallbackToSimpleAuth());
for (Map.Entry<String, String> propEntry : this.catalogProperty.getHadoopProperties().entrySet()) {
conf.set(propEntry.getKey(), propEntry.getValue());
}

View File

@ -21,6 +21,7 @@ import org.apache.doris.analysis.StorageBackend;
import org.apache.doris.backup.Status;
import org.apache.doris.common.UserException;
import org.apache.doris.fs.PersistentFileSystem;
import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import com.google.common.collect.ImmutableSet;
import org.apache.hadoop.fs.FileStatus;
@ -46,6 +47,10 @@ public abstract class RemoteFileSystem extends PersistentFileSystem {
throw new UserException("Not support to getFileSystem.");
}
public boolean ifNotSetFallbackToSimpleAuth() {
return properties.getOrDefault(DFSFileSystem.PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "").isEmpty();
}
@Override
public Status listFiles(String remotePath, boolean recursive, List<RemoteFile> result) {
try {

View File

@ -22,6 +22,7 @@ import org.apache.doris.backup.Status;
import org.apache.doris.common.UserException;
import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.fs.obj.S3ObjStorage;
import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import com.amazonaws.services.s3.model.AmazonS3Exception;
import com.google.common.annotations.VisibleForTesting;
@ -60,7 +61,7 @@ public class S3FileSystem extends ObjFileSystem {
if (dfsFileSystem == null) {
synchronized (this) {
if (dfsFileSystem == null) {
Configuration conf = new Configuration();
Configuration conf = DFSFileSystem.getHdfsConf(ifNotSetFallbackToSimpleAuth());
System.setProperty("com.amazonaws.services.s3.enableV4", "true");
// the entry value in properties may be null, and
PropertyConverter.convertToHadoopFSProperties(properties).entrySet().stream()

View File

@ -56,6 +56,7 @@ import java.util.Map;
public class DFSFileSystem extends RemoteFileSystem {
public static final String PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH = "ipc.client.fallback-to-simple-auth-allowed";
private static final Logger LOG = LogManager.getLogger(DFSFileSystem.class);
private HDFSFileOperations operations = null;
@ -75,7 +76,7 @@ public class DFSFileSystem extends RemoteFileSystem {
if (dfsFileSystem == null) {
synchronized (this) {
if (dfsFileSystem == null) {
Configuration conf = new HdfsConfiguration();
Configuration conf = getHdfsConf(ifNotSetFallbackToSimpleAuth());
for (Map.Entry<String, String> propEntry : properties.entrySet()) {
conf.set(propEntry.getKey(), propEntry.getValue());
}
@ -87,13 +88,22 @@ public class DFSFileSystem extends RemoteFileSystem {
throw new RuntimeException(e);
}
});
operations = new HDFSFileOperations(dfsFileSystem);
}
}
}
operations = new HDFSFileOperations(dfsFileSystem);
return dfsFileSystem;
}
public static Configuration getHdfsConf(boolean fallbackToSimpleAuth) {
Configuration hdfsConf = new HdfsConfiguration();
if (fallbackToSimpleAuth) {
// need support fallback to simple if the cluster is a mixture of kerberos and simple auth.
hdfsConf.set(PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "true");
}
return hdfsConf;
}
@Override
public Status downloadWithFileSize(String remoteFilePath, String localFilePath, long fileSize) {
if (LOG.isDebugEnabled()) {