[fix](nereids) Analyze failed for SQL that has count distinct with same col (#17928)

This problem is caused by the slots with same hashcodes was put in the hashset results into the wrong rules was selected.Use list instead of set as return type of getDistinctArguments method
This commit is contained in:
AKIRA
2023-03-19 22:31:47 +09:00
committed by GitHub
parent 74dfdc00dc
commit 5c990fb737
5 changed files with 25 additions and 8 deletions

View File

@ -71,6 +71,7 @@ import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -395,8 +396,7 @@ public class AggregateStrategies implements ImplementationRuleFactory {
private List<PhysicalHashAggregate<Plan>> twoPhaseAggregateWithCountDistinctMulti(
LogicalAggregate<? extends Plan> logicalAgg, CascadesContext cascadesContext) {
AggregateParam inputToBufferParam = new AggregateParam(AggPhase.LOCAL, AggMode.INPUT_TO_BUFFER);
Set<Expression> countDistinctArguments = logicalAgg.getDistinctArguments();
Collection<Expression> countDistinctArguments = logicalAgg.getDistinctArguments();
List<Expression> localAggGroupBy = ImmutableList.copyOf(ImmutableSet.<Expression>builder()
.addAll(logicalAgg.getGroupByExpressions())
@ -513,7 +513,7 @@ public class AggregateStrategies implements ImplementationRuleFactory {
LogicalAggregate<? extends Plan> logicalAgg, CascadesContext cascadesContext) {
AggregateParam inputToBufferParam = new AggregateParam(AggPhase.LOCAL, AggMode.INPUT_TO_BUFFER);
Set<Expression> countDistinctArguments = logicalAgg.getDistinctArguments();
Collection<Expression> countDistinctArguments = logicalAgg.getDistinctArguments();
List<Expression> localAggGroupBy = ImmutableList.copyOf(ImmutableSet.<Expression>builder()
.addAll(logicalAgg.getGroupByExpressions())
@ -1134,7 +1134,7 @@ public class AggregateStrategies implements ImplementationRuleFactory {
RequireProperties.of(PhysicalProperties.GATHER), anyLocalAgg);
if (logicalAgg.getGroupByExpressions().isEmpty()) {
Set<Expression> distinctArguments = logicalAgg.getDistinctArguments();
Collection<Expression> distinctArguments = logicalAgg.getDistinctArguments();
RequireProperties requireDistinctHash = RequireProperties.of(PhysicalProperties.createHash(
distinctArguments, ShuffleType.AGGREGATE));
PhysicalHashAggregate<? extends Plan> hashLocalGatherGlobalAgg = anyLocalGatherGlobalAgg

View File

@ -23,6 +23,7 @@ import org.apache.doris.common.util.ProfileManager;
import org.apache.doris.nereids.trees.plans.AbstractPlan;
import org.apache.doris.persist.gson.GsonUtils;
import org.apache.doris.planner.PlanNode;
import org.apache.doris.statistics.Statistics;
import org.apache.doris.thrift.TReportExecStatusParams;
import org.apache.doris.thrift.TRuntimeProfileNode;
import org.apache.doris.thrift.TUniqueId;
@ -50,8 +51,15 @@ public class StatsErrorEstimator {
legacyPlanIdStats = new HashMap<>();
}
/**
* Map plan id to stats.
*/
public void updateLegacyPlanIdToPhysicalPlan(PlanNode planNode, AbstractPlan physicalPlan) {
legacyPlanIdStats.put(planNode.getId().asInt(), Pair.of(physicalPlan.getStats().getRowCount(),
Statistics statistics = physicalPlan.getStats();
if (statistics == null) {
return;
}
legacyPlanIdStats.put(planNode.getId().asInt(), Pair.of(statistics.getRowCount(),
(double) 0));
}

View File

@ -24,7 +24,7 @@ import org.apache.doris.nereids.trees.plans.Plan;
import org.apache.doris.nereids.trees.plans.UnaryPlan;
import org.apache.doris.nereids.util.ExpressionUtils;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.Set;
@ -47,10 +47,10 @@ public interface Aggregate<CHILD_TYPE extends Plan> extends UnaryPlan<CHILD_TYPE
return ExpressionUtils.collect(getOutputExpressions(), AggregateFunction.class::isInstance);
}
default Set<Expression> getDistinctArguments() {
default List<Expression> getDistinctArguments() {
return getAggregateFunctions().stream()
.filter(AggregateFunction::isDistinct)
.flatMap(aggregateExpression -> aggregateExpression.getArguments().stream())
.collect(ImmutableSet.toImmutableSet());
.collect(ImmutableList.toImmutableList());
}
}

View File

@ -241,3 +241,10 @@ name_4 1 4
-- !group_by_count_distinct --
5
-- !sql_distinct_same_col --
1
1
1
1
1

View File

@ -211,4 +211,6 @@ suite("aggregate_strategies") {
from numbers('number' = '10000', 'backend_num'='1')"""
result([[10000L]])
}
qt_sql_distinct_same_col """SELECT COUNT(DISTINCT id, id) FROM test_bucket10_table GROUP BY id """
}