[Fix](multi-catalog) Fix some hive partition issues. (#19513)

Fix some hive partition issues.
1. Fix be will crash when using hive partitions field of `date`, `timestamp`, `decimal` type.
2. Fix hdfs uri decode error when using `timestamp` partition filed which will cause some url-encoding for special chars, such as `%3A` will encode `:`.
This commit is contained in:
Qi Chen
2023-05-11 07:49:46 +08:00
committed by GitHub
parent 95833426e8
commit 4418eb36a3
41 changed files with 337 additions and 16 deletions

View File

@ -70,6 +70,9 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.List;
@ -210,6 +213,12 @@ public class HiveMetaStoreCache {
Map<Long, List<UniqueId>> idToUniqueIdsMap = Maps.newHashMapWithExpectedSize(partitionNames.size());
long idx = 0;
for (String partitionName : partitionNames) {
try {
partitionName = URLDecoder.decode(partitionName, StandardCharsets.UTF_8.name());
} catch (UnsupportedEncodingException e) {
// It should not be here
throw new RuntimeException(e);
}
long partitionId = idx++;
ListPartitionItem listPartitionItem = toListPartitionItem(partitionName, key.types);
idToPartitionItem.put(partitionId, listPartitionItem);

View File

@ -32,7 +32,6 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.FileNotFoundException;
import java.net.URI;
import java.util.List;
import java.util.Map;
@ -58,7 +57,7 @@ public class S3FileSystem extends ObjFileSystem {
System.setProperty("com.amazonaws.services.s3.enableV4", "true");
PropertyConverter.convertToHadoopFSProperties(properties).forEach(conf::set);
try {
dfsFileSystem = FileSystem.get(new URI(remotePath), conf);
dfsFileSystem = FileSystem.get(new Path(remotePath).toUri(), conf);
} catch (Exception e) {
throw new UserException("Failed to get S3 FileSystem for " + e.getMessage(), e);
}

View File

@ -92,9 +92,9 @@ public class DFSFileSystem extends RemoteFileSystem {
properties.get(HdfsResource.HADOOP_KERBEROS_KEYTAB));
}
if (username == null) {
dfsFileSystem = FileSystem.get(java.net.URI.create(remotePath), conf);
dfsFileSystem = FileSystem.get(new Path(remotePath).toUri(), conf);
} else {
dfsFileSystem = FileSystem.get(java.net.URI.create(remotePath), conf, username);
dfsFileSystem = FileSystem.get(new Path(remotePath).toUri(), conf, username);
}
} catch (Exception e) {
LOG.error("errors while connect to " + remotePath, e);