[bug](multi-catalog) empty hadoop configuration when reading iceberg table (#10793)

This commit is contained in:
Ashin Gau
2022-07-14 10:18:59 +08:00
committed by GitHub
parent 4f4ce4674a
commit 077ec4b114
3 changed files with 3 additions and 8 deletions

View File

@ -88,8 +88,6 @@ public class DescribeStmt extends ShowStmt {
private boolean isAllTables;
private boolean isOlapTable;
private List<List<String>> hmsSchema = null;
public DescribeStmt(TableName dbTableName, boolean isAllTables) {
this.dbTableName = dbTableName;
this.totalRows = new LinkedList<List<String>>();
@ -236,9 +234,6 @@ public class DescribeStmt extends ShowStmt {
if (isAllTables) {
return totalRows;
} else {
if (hmsSchema != null) {
return hmsSchema;
}
Preconditions.checkNotNull(node);
return node.fetchResult().getRows();
}

View File

@ -107,13 +107,13 @@ public class ExternalHiveScanProvider implements ExternalFileScanProvider {
return inputFormat.getSplits(jobConf, 0);
}
private Configuration setConfiguration() {
protected Configuration setConfiguration() {
Configuration conf = new Configuration();
Map<String, String> dfsProperties = hmsTable.getDfsProperties();
for (Map.Entry<String, String> entry : dfsProperties.entrySet()) {
conf.set(entry.getKey(), entry.getValue());
}
Map<String, String> s3Properties = hmsTable.getDfsProperties();
Map<String, String> s3Properties = hmsTable.getS3Properties();
for (Map.Entry<String, String> entry : s3Properties.entrySet()) {
conf.set(entry.getKey(), entry.getValue());
}

View File

@ -101,7 +101,7 @@ public class ExternalIcebergScanProvider extends ExternalHiveScanProvider {
private org.apache.iceberg.Table getIcebergTable() throws MetaNotFoundException {
org.apache.iceberg.hive.HiveCatalog hiveCatalog = new org.apache.iceberg.hive.HiveCatalog();
Configuration conf = new Configuration();
Configuration conf = setConfiguration();
hiveCatalog.setConf(conf);
// initialize hive catalog
Map<String, String> catalogProperties = new HashMap<>();