[fix](hive) support partition prune for _HIVE_DEFAULT_PARTITION_ (#31736)
This PR #23026 support the partition prune for hive table with `_HIVE_DEFAULT_PARTITION`, but it will always select partition with `_HIVE_DEFAULT_PARTITION`. This PR #31613 support null partition for olap table's list partition, so we can treat `_HIVE_DEFAULT_PARTITION` as null partition of hive table. So this PR change the partition prune logic
This commit is contained in:
@ -26,8 +26,7 @@ public class PartitionValue {
|
||||
public static final PartitionValue MAX_VALUE = new PartitionValue();
|
||||
|
||||
private String value;
|
||||
private boolean isHiveDefaultPartition;
|
||||
private boolean isNullPartition;
|
||||
private boolean isNullPartition = false;
|
||||
|
||||
private PartitionValue() {
|
||||
|
||||
@ -41,21 +40,12 @@ public class PartitionValue {
|
||||
this.value = value.toString();
|
||||
}
|
||||
|
||||
public PartitionValue(String value, boolean isHiveDefaultPartition) {
|
||||
this.value = value;
|
||||
this.isHiveDefaultPartition = isHiveDefaultPartition;
|
||||
}
|
||||
|
||||
public PartitionValue(String value, boolean isNullPartition, boolean isHiveDefaultPartition) {
|
||||
public PartitionValue(String value, boolean isNullPartition) {
|
||||
this.value = value;
|
||||
this.isNullPartition = isNullPartition;
|
||||
this.isHiveDefaultPartition = isHiveDefaultPartition;
|
||||
}
|
||||
|
||||
public LiteralExpr getValue(Type type) throws AnalysisException {
|
||||
if (isHiveDefaultPartition) {
|
||||
return new StringLiteral(value);
|
||||
}
|
||||
if (isMax()) {
|
||||
return LiteralExpr.createInfinity(type, true);
|
||||
} else {
|
||||
@ -75,10 +65,6 @@ public class PartitionValue {
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isHiveDefaultPartition() {
|
||||
return isHiveDefaultPartition;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
@ -88,13 +74,13 @@ public class PartitionValue {
|
||||
return false;
|
||||
}
|
||||
PartitionValue that = (PartitionValue) o;
|
||||
return isHiveDefaultPartition == that.isHiveDefaultPartition && isNullPartition == that.isNullPartition
|
||||
return isNullPartition == that.isNullPartition
|
||||
&& Objects.equal(value, that.value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(value, isHiveDefaultPartition, isNullPartition);
|
||||
return Objects.hashCode(value, isNullPartition);
|
||||
}
|
||||
|
||||
public boolean isNullPartition() {
|
||||
|
||||
@ -180,15 +180,4 @@ public class ListPartitionItem extends PartitionItem {
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
// If any partition key is hive default partition, return true.
|
||||
// Only used for hive table.
|
||||
public boolean isHiveDefaultPartition() {
|
||||
for (PartitionKey partitionKey : partitionKeys) {
|
||||
if (partitionKey.isHiveDefaultPartition()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@ -29,7 +29,6 @@ import org.apache.doris.analysis.StringLiteral;
|
||||
import org.apache.doris.common.AnalysisException;
|
||||
import org.apache.doris.common.io.Text;
|
||||
import org.apache.doris.common.io.Writable;
|
||||
import org.apache.doris.datasource.hive.HiveMetaStoreCache;
|
||||
import org.apache.doris.qe.SessionVariable;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
@ -148,10 +147,6 @@ public class PartitionKey implements Comparable<PartitionKey>, Writable {
|
||||
partitionKey.originHiveKeys.add(values.get(i).getStringValue());
|
||||
}
|
||||
partitionKey.types.add(types.get(i).getPrimitiveType());
|
||||
//If there is one default value, set `isDefaultListPartitionKey` to true
|
||||
if (values.get(i).isHiveDefaultPartition()) {
|
||||
partitionKey.setDefaultListPartition(true);
|
||||
}
|
||||
}
|
||||
if (values.isEmpty()) {
|
||||
for (int i = 0; i < types.size(); ++i) {
|
||||
@ -557,19 +552,4 @@ public class PartitionKey implements Comparable<PartitionKey>, Writable {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// if any of partition value is HIVE_DEFAULT_PARTITION
|
||||
// return true to indicate that this is a hive default partition
|
||||
public boolean isHiveDefaultPartition() {
|
||||
for (LiteralExpr literalExpr : keys) {
|
||||
if (!(literalExpr instanceof StringLiteral)) {
|
||||
continue;
|
||||
}
|
||||
StringLiteral key = (StringLiteral) literalExpr;
|
||||
if (key.getValue().equals(HiveMetaStoreCache.HIVE_DEFAULT_PARTITION)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@ -296,8 +296,6 @@ public class HiveMetaStoreCache {
|
||||
try {
|
||||
PartitionKey key = PartitionKey.createListPartitionKeyWithTypes(values, types, true);
|
||||
ListPartitionItem listPartitionItem = new ListPartitionItem(Lists.newArrayList(key));
|
||||
// if `PartitionKey` is default, set `PartitionItem` to default
|
||||
listPartitionItem.setDefaultPartition(key.isHiveDefaultPartition());
|
||||
return listPartitionItem;
|
||||
} catch (AnalysisException e) {
|
||||
throw new CacheException("failed to convert hive partition %s to list partition in catalog %s",
|
||||
|
||||
@ -1,68 +0,0 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package org.apache.doris.nereids.rules.expression.rules;
|
||||
|
||||
import org.apache.doris.nereids.trees.expressions.Expression;
|
||||
import org.apache.doris.nereids.trees.expressions.Slot;
|
||||
import org.apache.doris.nereids.trees.expressions.literal.BooleanLiteral;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Represents a hive default partition.
|
||||
* For any partition predicate, the evaluate() will always return true.
|
||||
*/
|
||||
public class HiveDefaultPartitionEvaluator implements OnePartitionEvaluator {
|
||||
private final long id;
|
||||
private final List<Slot> partitionSlots;
|
||||
|
||||
public HiveDefaultPartitionEvaluator(long id, List<Slot> partitionSlots) {
|
||||
this.id = id;
|
||||
this.partitionSlots = partitionSlots;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getPartitionId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Map<Slot, PartitionSlotInput>> getOnePartitionInputs() {
|
||||
// this is mocked result.
|
||||
PartitionSlotInput partitionSlotInput = new PartitionSlotInput(BooleanLiteral.TRUE, Maps.newHashMap());
|
||||
Map<Slot, PartitionSlotInput> map = Maps.newHashMap();
|
||||
map.put(partitionSlots.get(0), partitionSlotInput);
|
||||
List<Map<Slot, PartitionSlotInput>> list = Lists.newArrayList();
|
||||
list.add(map);
|
||||
return list;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Expression evaluate(Expression expression, Map<Slot, PartitionSlotInput> currentInputs) {
|
||||
return BooleanLiteral.TRUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDefaultPartition() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -125,13 +125,8 @@ public class PartitionPruner extends DefaultExpressionRewriter<Void> {
|
||||
public static final OnePartitionEvaluator toPartitionEvaluator(long id, PartitionItem partitionItem,
|
||||
List<Slot> partitionSlots, CascadesContext cascadesContext, PartitionTableType partitionTableType) {
|
||||
if (partitionItem instanceof ListPartitionItem) {
|
||||
if (partitionTableType == PartitionTableType.HIVE
|
||||
&& ((ListPartitionItem) partitionItem).isHiveDefaultPartition()) {
|
||||
return new HiveDefaultPartitionEvaluator(id, partitionSlots);
|
||||
} else {
|
||||
return new OneListPartitionEvaluator(
|
||||
id, partitionSlots, (ListPartitionItem) partitionItem, cascadesContext);
|
||||
}
|
||||
return new OneListPartitionEvaluator(
|
||||
id, partitionSlots, (ListPartitionItem) partitionItem, cascadesContext);
|
||||
} else if (partitionItem instanceof RangePartitionItem) {
|
||||
return new OneRangePartitionEvaluator(
|
||||
id, partitionSlots, (RangePartitionItem) partitionItem, cascadesContext);
|
||||
|
||||
@ -161,7 +161,7 @@ public abstract class PartitionDefinition {
|
||||
*/
|
||||
protected PartitionValue toLegacyPartitionValueStmt(Expression e) {
|
||||
if (e.isLiteral()) {
|
||||
return new PartitionValue(((Literal) e).getStringValue(), e.isNullLiteral(), false);
|
||||
return new PartitionValue(((Literal) e).getStringValue(), e.isNullLiteral());
|
||||
} else if (e instanceof MaxValue) {
|
||||
return PartitionValue.MAX_VALUE;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user