[fix](select outfile) Remove optional properties check of hdfs storage (#7272)
This commit is contained in:
@ -300,41 +300,6 @@ Planning example for concurrent export:
|
||||
|
||||
**But because the query statement has a top-level sorting node, even if the query is enabled for concurrently exported session variables, it cannot be exported concurrently.**
|
||||
|
||||
8. Example 8
|
||||
|
||||
Use hdfs to export and export the simple query results to the file `hdfs://path/to/result.txt`. Specify the export format as csv.
|
||||
|
||||
```
|
||||
select * from tbl
|
||||
into outfile "hdfs://path/to/result_"
|
||||
format as csv
|
||||
properties
|
||||
(
|
||||
"hdfs.fs.defaultfs" = "hdfs://namenode:port",
|
||||
);
|
||||
```
|
||||
|
||||
9. Example 9
|
||||
|
||||
Export simple query results to the file `hdfs://path/to/result.txt`. Specify the export format as CSV. Use HDFS protocal directly and set kerberos authentication information.
|
||||
|
||||
```
|
||||
SELECT * FROM tbl
|
||||
INTO OUTFILE "hdfs://path/to/result_"
|
||||
FORMAT AS CSV
|
||||
PROPERTIES
|
||||
(
|
||||
"hdfs.fs.defaultFS" = "hdfs://namenode_ip:namenode_port",
|
||||
"hdfs.hadoop.security.authentication" = "kerberos",
|
||||
"hdfs.kerberos_principal" = "doris@YOUR.COM",
|
||||
"hdfs.kerberos_keytab" = "/home/doris/my.keytab",
|
||||
"max_file_size" = "100MB"
|
||||
);
|
||||
```
|
||||
|
||||
If the result is less than 100MB, file will be: `result_0.csv`.
|
||||
|
||||
If larger than 100MB, may be: `result_0.csv, result_1.csv, ...`.
|
||||
## Return result
|
||||
|
||||
The command is a synchronization command. The command returns, which means the operation is over.
|
||||
|
||||
@ -298,39 +298,6 @@ explain select xxx from xxx where xxx into outfile "s3://xxx" format as csv pro
|
||||
```
|
||||
|
||||
**但由于查询语句带了一个顶层的排序节点,所以这个查询即使开启并发导出的 session 变量,也是无法并发导出的。**
|
||||
|
||||
7. 示例7
|
||||
|
||||
使用 hdfs 方式导出,将简单查询结果导出到文件 `hdfs://path/to/result.txt`。指定导出格式为 csv。
|
||||
|
||||
```
|
||||
select * from tbl
|
||||
into outfile "hdfs://path/to/result_"
|
||||
format as csv
|
||||
properties
|
||||
(
|
||||
"hdfs.fs.defaultfs" = "hdfs://namenode:port",
|
||||
);
|
||||
```
|
||||
|
||||
|
||||
8. 示例8
|
||||
|
||||
使用 hdfs 方式导出,将简单查询结果导出到文件 `hdfs://path/to/result.txt`。指定导出格式为 csv。使用并设置 kerberos 认证信息。
|
||||
|
||||
```
|
||||
select * from tbl
|
||||
into outfile "hdfs://path/to/result_"
|
||||
format as csv
|
||||
properties
|
||||
(
|
||||
"hdfs.fs.defaultfs" = "hdfs://namenode:port",
|
||||
"hdfs.hadoop.security.authentication" = "kerberos",
|
||||
"hdfs.kerberos_principal" = "doris@your.com",
|
||||
"hdfs.kerberos_keytab" = "/home/doris/my.keytab"
|
||||
);
|
||||
```
|
||||
|
||||
|
||||
## 返回结果
|
||||
|
||||
|
||||
@ -408,7 +408,7 @@ public class OutFileClause {
|
||||
if (storageType == StorageBackend.StorageType.S3) {
|
||||
S3Storage.checkS3(new CaseInsensitiveMap(brokerProps));
|
||||
} else if (storageType == StorageBackend.StorageType.HDFS) {
|
||||
HDFSStorage.checkHDFS(brokerProps);
|
||||
HDFSStorage.checkHDFS(new CaseInsensitiveMap(brokerProps));
|
||||
}
|
||||
|
||||
brokerDesc = new BrokerDesc(brokerName, storageType, brokerProps);
|
||||
|
||||
@ -19,39 +19,15 @@ package org.apache.doris.backup;
|
||||
|
||||
import org.apache.doris.common.UserException;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
// TODO: extend BlobStorage
|
||||
public class HDFSStorage {
|
||||
public static final String HDFS_DEFAULT_FS = "fs.defaultFS";
|
||||
public static final String USER = "hdfs_user";
|
||||
public static final String NAME_SERVICES = "dfs.nameservices";
|
||||
public static final String NAME_NODES = "dfs.ha.namenodes";
|
||||
public static final String RPC_ADDRESS = "dfs.namenode.rpc-address";
|
||||
public static final String FAILOVER_PROXY = "dfs.client.failover.proxy.provider";
|
||||
public static final String AUTHENTICATION = "hadoop.security.authentication";
|
||||
public static final String KERBEROS_PRINCIPAL = "kerberos_principal";
|
||||
public static final String KERB_TICKET_CACHE_PATH = "kerb_ticket_cache_path";
|
||||
public static final String TOKEN = "token";
|
||||
|
||||
public static Set<String> keySets = new HashSet<>(Arrays.asList(HDFS_DEFAULT_FS, USER,
|
||||
NAME_SERVICES, NAME_NODES, RPC_ADDRESS, FAILOVER_PROXY,
|
||||
AUTHENTICATION,
|
||||
KERBEROS_PRINCIPAL, KERB_TICKET_CACHE_PATH,
|
||||
TOKEN));
|
||||
|
||||
|
||||
public static void checkHDFS(Map<String, String> properties) throws UserException {
|
||||
if (!properties.containsKey(HDFS_DEFAULT_FS)) {
|
||||
throw new UserException(HDFS_DEFAULT_FS + " not found. This is required field");
|
||||
}
|
||||
for (String key : properties.keySet()) {
|
||||
if (!keySets.contains(key)) {
|
||||
throw new UserException("Unknown properties " + key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user