diff --git a/be/src/agent/task_worker_pool.cpp b/be/src/agent/task_worker_pool.cpp index 41449fa82e..686de72e1b 100644 --- a/be/src/agent/task_worker_pool.cpp +++ b/be/src/agent/task_worker_pool.cpp @@ -1279,9 +1279,13 @@ void push_storage_policy_callback(StorageEngine& engine, const TAgentTaskRequest } else if (resource.__isset.hdfs_storage_param) { Status st; std::shared_ptr fs; + std::string root_path = resource.hdfs_storage_param.__isset.root_path + ? resource.hdfs_storage_param.root_path + : ""; if (existed_resource.fs == nullptr) { st = io::HdfsFileSystem::create(resource.hdfs_storage_param, - std::to_string(resource.id), "", nullptr, &fs); + std::to_string(resource.id), root_path, nullptr, + &fs); } else { fs = std::static_pointer_cast(existed_resource.fs); } @@ -1290,7 +1294,8 @@ void push_storage_policy_callback(StorageEngine& engine, const TAgentTaskRequest } else { LOG_INFO("successfully update hdfs resource") .tag("resource_id", resource.id) - .tag("resource_name", resource.name); + .tag("resource_name", resource.name) + .tag("root_path", fs->root_path().string()); put_storage_resource(resource.id, {std::move(fs), resource.version}); } } else { diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/HdfsResource.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/HdfsResource.java index 6d44170742..c9cb77fbd9 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/HdfsResource.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/HdfsResource.java @@ -45,6 +45,7 @@ import java.util.Map; public class HdfsResource extends Resource { public static final String HADOOP_FS_PREFIX = "dfs."; public static String HADOOP_FS_NAME = "fs.defaultFS"; + public static String HADOOP_FS_ROOT_PATH = "root_path"; public static String HADOOP_SHORT_CIRCUIT = "dfs.client.read.shortcircuit"; public static String HADOOP_SOCKET_PATH = "dfs.domain.socket.path"; public static String DSF_NAMESERVICES = "dfs.nameservices"; @@ -106,6 +107,8 @@ public class HdfsResource extends Resource { for (Map.Entry property : properties.entrySet()) { if (property.getKey().equalsIgnoreCase(HADOOP_FS_NAME)) { tHdfsParams.setFsName(property.getValue()); + } else if (property.getKey().equalsIgnoreCase(HADOOP_FS_ROOT_PATH)) { + tHdfsParams.setRootPath(property.getValue()); } else if (property.getKey().equalsIgnoreCase(AuthenticationConfig.HADOOP_USER_NAME)) { tHdfsParams.setUser(property.getValue()); } else if (property.getKey().equalsIgnoreCase(AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL)) { diff --git a/gensrc/thrift/PlanNodes.thrift b/gensrc/thrift/PlanNodes.thrift index 622611536b..5f34a261c5 100644 --- a/gensrc/thrift/PlanNodes.thrift +++ b/gensrc/thrift/PlanNodes.thrift @@ -154,6 +154,8 @@ struct THdfsParams { 3: optional string hdfs_kerberos_principal 4: optional string hdfs_kerberos_keytab 5: optional list hdfs_conf + // Used for Cold Heat Separation to specify the root path + 6: optional string root_path } // One broker range information.