@ -34,6 +34,7 @@ import org.apache.doris.common.UserException;
|
||||
import org.apache.doris.common.security.authentication.AuthenticationConfig;
|
||||
import org.apache.doris.common.util.CacheBulkLoader;
|
||||
import org.apache.doris.common.util.LocationPath;
|
||||
import org.apache.doris.common.util.Util;
|
||||
import org.apache.doris.datasource.CacheException;
|
||||
import org.apache.doris.datasource.hive.AcidInfo.DeleteDeltaInfo;
|
||||
import org.apache.doris.datasource.property.PropertyConverter;
|
||||
@ -250,9 +251,8 @@ public class HiveMetaStoreCache {
|
||||
Map<Long, PartitionItem> idToPartitionItem = Maps.newHashMapWithExpectedSize(partitionNames.size());
|
||||
BiMap<String, Long> partitionNameToIdMap = HashBiMap.create(partitionNames.size());
|
||||
Map<Long, List<UniqueId>> idToUniqueIdsMap = Maps.newHashMapWithExpectedSize(partitionNames.size());
|
||||
long idx = 0;
|
||||
for (String partitionName : partitionNames) {
|
||||
long partitionId = idx++;
|
||||
long partitionId = Util.genIdByName(catalog.getName(), key.dbName, key.tblName, partitionName);
|
||||
ListPartitionItem listPartitionItem = toListPartitionItem(partitionName, key.types);
|
||||
idToPartitionItem.put(partitionId, listPartitionItem);
|
||||
partitionNameToIdMap.put(partitionName, partitionId);
|
||||
@ -273,7 +273,7 @@ public class HiveMetaStoreCache {
|
||||
singleUidToColumnRangeMap = ListPartitionPrunerV2.genSingleUidToColumnRange(singleColumnRangeMap);
|
||||
}
|
||||
Map<Long, List<String>> partitionValuesMap = ListPartitionPrunerV2.getPartitionValuesMap(idToPartitionItem);
|
||||
return new HivePartitionValues(idToPartitionItem, uidToPartitionRange, rangeToId, singleColumnRangeMap, idx,
|
||||
return new HivePartitionValues(idToPartitionItem, uidToPartitionRange, rangeToId, singleColumnRangeMap,
|
||||
partitionNameToIdMap, idToUniqueIdsMap, singleUidToColumnRangeMap, partitionValuesMap);
|
||||
}
|
||||
|
||||
@ -638,13 +638,12 @@ public class HiveMetaStoreCache {
|
||||
Map<String, Long> partitionNameToIdMapBefore = copy.getPartitionNameToIdMap();
|
||||
Map<Long, List<UniqueId>> idToUniqueIdsMap = copy.getIdToUniqueIdsMap();
|
||||
Map<Long, PartitionItem> idToPartitionItem = new HashMap<>();
|
||||
long idx = copy.getNextPartitionId();
|
||||
for (String partitionName : partitionNames) {
|
||||
if (partitionNameToIdMapBefore.containsKey(partitionName)) {
|
||||
LOG.info("addPartitionsCache partitionName:[{}] has exist in table:[{}]", partitionName, tblName);
|
||||
continue;
|
||||
}
|
||||
long partitionId = idx++;
|
||||
long partitionId = Util.genIdByName(catalog.getName(), dbName, tblName, partitionName);
|
||||
ListPartitionItem listPartitionItem = toListPartitionItem(partitionName, key.types);
|
||||
idToPartitionItemBefore.put(partitionId, listPartitionItem);
|
||||
idToPartitionItem.put(partitionId, listPartitionItem);
|
||||
@ -653,7 +652,6 @@ public class HiveMetaStoreCache {
|
||||
Map<Long, List<String>> partitionValuesMapBefore = copy.getPartitionValuesMap();
|
||||
Map<Long, List<String>> partitionValuesMap = ListPartitionPrunerV2.getPartitionValuesMap(idToPartitionItem);
|
||||
partitionValuesMapBefore.putAll(partitionValuesMap);
|
||||
copy.setNextPartitionId(idx);
|
||||
if (key.types.size() > 1) {
|
||||
Map<UniqueId, Range<PartitionKey>> uidToPartitionRangeBefore = copy.getUidToPartitionRange();
|
||||
// uidToPartitionRange and rangeToId are only used for multi-column partition
|
||||
@ -1075,7 +1073,6 @@ public class HiveMetaStoreCache {
|
||||
|
||||
@Data
|
||||
public static class HivePartitionValues {
|
||||
private long nextPartitionId;
|
||||
private BiMap<String, Long> partitionNameToIdMap;
|
||||
private Map<Long, List<UniqueId>> idToUniqueIdsMap;
|
||||
private Map<Long, PartitionItem> idToPartitionItem;
|
||||
@ -1094,7 +1091,6 @@ public class HiveMetaStoreCache {
|
||||
Map<UniqueId, Range<PartitionKey>> uidToPartitionRange,
|
||||
Map<Range<PartitionKey>, UniqueId> rangeToId,
|
||||
RangeMap<ColumnBound, UniqueId> singleColumnRangeMap,
|
||||
long nextPartitionId,
|
||||
BiMap<String, Long> partitionNameToIdMap,
|
||||
Map<Long, List<UniqueId>> idToUniqueIdsMap,
|
||||
Map<UniqueId, Range<ColumnBound>> singleUidToColumnRangeMap,
|
||||
@ -1103,7 +1099,6 @@ public class HiveMetaStoreCache {
|
||||
this.uidToPartitionRange = uidToPartitionRange;
|
||||
this.rangeToId = rangeToId;
|
||||
this.singleColumnRangeMap = singleColumnRangeMap;
|
||||
this.nextPartitionId = nextPartitionId;
|
||||
this.partitionNameToIdMap = partitionNameToIdMap;
|
||||
this.idToUniqueIdsMap = idToUniqueIdsMap;
|
||||
this.singleUidToColumnRangeMap = singleUidToColumnRangeMap;
|
||||
@ -1112,7 +1107,6 @@ public class HiveMetaStoreCache {
|
||||
|
||||
public HivePartitionValues copy() {
|
||||
HivePartitionValues copy = new HivePartitionValues();
|
||||
copy.setNextPartitionId(nextPartitionId);
|
||||
copy.setPartitionNameToIdMap(partitionNameToIdMap == null ? null : HashBiMap.create(partitionNameToIdMap));
|
||||
copy.setIdToUniqueIdsMap(idToUniqueIdsMap == null ? null : Maps.newHashMap(idToUniqueIdsMap));
|
||||
copy.setIdToPartitionItem(idToPartitionItem == null ? null : Maps.newHashMap(idToPartitionItem));
|
||||
|
||||
@ -548,7 +548,7 @@ public class CatalogMgrTest extends TestWithFeService {
|
||||
singleUidToColumnRangeMap = ListPartitionPrunerV2.genSingleUidToColumnRange(singleColumnRangeMap);
|
||||
}
|
||||
Map<Long, List<String>> partitionValuesMap = ListPartitionPrunerV2.getPartitionValuesMap(idToPartitionItem);
|
||||
return new HivePartitionValues(idToPartitionItem, uidToPartitionRange, rangeToId, singleColumnRangeMap, idx,
|
||||
return new HivePartitionValues(idToPartitionItem, uidToPartitionRange, rangeToId, singleColumnRangeMap,
|
||||
partitionNameToIdMap, idToUniqueIdsMap, singleUidToColumnRangeMap, partitionValuesMap);
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user