[fix](Nereids) refactor CTE and EliminateAliasNode and fix the bug that CTE reuse relationId (#14534)
This pr contribute: - support explain CTE; - refine CTE, fix the bug: reuse the same analyzed plan which LogicalOlapScan has the same relationId; - change EliminateAliasNode to LogicalSubQueryAliasToLogicalProject and move to the top of rewrite stage, so we can simply observe the analyzed plan by the LogicalSubQueryAlias with alias; - job traverse left child first, so the ExprId growth from left child to right child.
This commit is contained in:
@ -49,9 +49,12 @@ singleStatement
|
||||
;
|
||||
|
||||
statement
|
||||
: cte? query #statementDefault
|
||||
| (EXPLAIN planType? | DESC | DESCRIBE)
|
||||
level=(VERBOSE | GRAPH | PLAN)? query #explain
|
||||
: explain? cte? query #statementDefault
|
||||
;
|
||||
|
||||
explain
|
||||
: (EXPLAIN planType? | DESC | DESCRIBE)
|
||||
level=(VERBOSE | GRAPH | PLAN)?
|
||||
;
|
||||
|
||||
planType
|
||||
|
||||
@ -32,6 +32,7 @@ import org.apache.doris.nereids.properties.PhysicalProperties;
|
||||
import org.apache.doris.nereids.rules.Rule;
|
||||
import org.apache.doris.nereids.rules.RuleFactory;
|
||||
import org.apache.doris.nereids.rules.RuleSet;
|
||||
import org.apache.doris.nereids.rules.analysis.CTEContext;
|
||||
import org.apache.doris.nereids.rules.analysis.Scope;
|
||||
import org.apache.doris.nereids.trees.expressions.SubqueryExpr;
|
||||
import org.apache.doris.nereids.trees.plans.Plan;
|
||||
@ -50,6 +51,8 @@ import java.util.Optional;
|
||||
public class CascadesContext {
|
||||
private final Memo memo;
|
||||
private final StatementContext statementContext;
|
||||
|
||||
private CTEContext cteContext;
|
||||
private RuleSet ruleSet;
|
||||
private JobPool jobPool;
|
||||
private final JobScheduler jobScheduler;
|
||||
@ -58,13 +61,17 @@ public class CascadesContext {
|
||||
private final Map<SubqueryExpr, Boolean> subqueryExprIsAnalyzed;
|
||||
private final RuntimeFilterContext runtimeFilterContext;
|
||||
|
||||
public CascadesContext(Memo memo, StatementContext statementContext) {
|
||||
this(memo, statementContext, new CTEContext());
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor of OptimizerContext.
|
||||
*
|
||||
* @param memo {@link Memo} reference
|
||||
* @param statementContext {@link StatementContext} reference
|
||||
*/
|
||||
public CascadesContext(Memo memo, StatementContext statementContext) {
|
||||
public CascadesContext(Memo memo, StatementContext statementContext, CTEContext cteContext) {
|
||||
this.memo = memo;
|
||||
this.statementContext = statementContext;
|
||||
this.ruleSet = new RuleSet();
|
||||
@ -73,6 +80,7 @@ public class CascadesContext {
|
||||
this.currentJobContext = new JobContext(this, PhysicalProperties.ANY, Double.MAX_VALUE);
|
||||
this.subqueryExprIsAnalyzed = new HashMap<>();
|
||||
this.runtimeFilterContext = new RuntimeFilterContext(getConnectContext().getSessionVariable());
|
||||
this.cteContext = cteContext;
|
||||
}
|
||||
|
||||
public static CascadesContext newContext(StatementContext statementContext, Plan initPlan) {
|
||||
@ -176,6 +184,14 @@ public class CascadesContext {
|
||||
return execute(new RewriteTopDownJob(memo.getRoot(), rules, currentJobContext));
|
||||
}
|
||||
|
||||
public CTEContext getCteContext() {
|
||||
return cteContext;
|
||||
}
|
||||
|
||||
public void setCteContext(CTEContext cteContext) {
|
||||
this.cteContext = cteContext;
|
||||
}
|
||||
|
||||
private CascadesContext execute(Job job) {
|
||||
pushJob(job);
|
||||
jobScheduler.executeJobPool(this);
|
||||
|
||||
@ -19,7 +19,6 @@ package org.apache.doris.nereids;
|
||||
|
||||
import org.apache.doris.analysis.StatementBase;
|
||||
import org.apache.doris.common.IdGenerator;
|
||||
import org.apache.doris.nereids.rules.analysis.CTEContext;
|
||||
import org.apache.doris.nereids.trees.expressions.ExprId;
|
||||
import org.apache.doris.nereids.trees.plans.RelationId;
|
||||
import org.apache.doris.qe.ConnectContext;
|
||||
@ -40,21 +39,13 @@ public class StatementContext {
|
||||
|
||||
private StatementBase parsedStatement;
|
||||
|
||||
private CTEContext cteContext;
|
||||
|
||||
public StatementContext() {
|
||||
this.connectContext = ConnectContext.get();
|
||||
this.cteContext = new CTEContext();
|
||||
}
|
||||
|
||||
public StatementContext(ConnectContext connectContext, OriginStatement originStatement) {
|
||||
this(connectContext, originStatement, new CTEContext());
|
||||
}
|
||||
|
||||
public StatementContext(ConnectContext connectContext, OriginStatement originStatement, CTEContext cteContext) {
|
||||
this.connectContext = connectContext;
|
||||
this.originStatement = originStatement;
|
||||
this.cteContext = cteContext;
|
||||
}
|
||||
|
||||
public void setConnectContext(ConnectContext connectContext) {
|
||||
@ -85,14 +76,6 @@ public class StatementContext {
|
||||
return relationIdGenerator.getNextId();
|
||||
}
|
||||
|
||||
public CTEContext getCteContext() {
|
||||
return cteContext;
|
||||
}
|
||||
|
||||
public void setCteContext(CTEContext cteContext) {
|
||||
this.cteContext = cteContext;
|
||||
}
|
||||
|
||||
public void setParsedStatement(StatementBase parsedStatement) {
|
||||
this.parsedStatement = parsedStatement;
|
||||
}
|
||||
|
||||
@ -21,7 +21,6 @@ import org.apache.doris.nereids.CascadesContext;
|
||||
import org.apache.doris.nereids.jobs.batch.AnalyzeRulesJob;
|
||||
import org.apache.doris.nereids.jobs.batch.AnalyzeSubqueryRulesJob;
|
||||
import org.apache.doris.nereids.jobs.batch.CheckAnalysisJob;
|
||||
import org.apache.doris.nereids.jobs.batch.FinalizeAnalyzeJob;
|
||||
import org.apache.doris.nereids.jobs.batch.TypeCoercionJob;
|
||||
import org.apache.doris.nereids.rules.analysis.Scope;
|
||||
|
||||
@ -52,7 +51,6 @@ public class NereidsAnalyzer {
|
||||
new AnalyzeRulesJob(cascadesContext, outerScope).execute();
|
||||
new AnalyzeSubqueryRulesJob(cascadesContext).execute();
|
||||
new TypeCoercionJob(cascadesContext).execute();
|
||||
new FinalizeAnalyzeJob(cascadesContext).execute();
|
||||
// check whether analyze result is meaningful
|
||||
new CheckAnalysisJob(cascadesContext).execute();
|
||||
}
|
||||
|
||||
@ -33,11 +33,11 @@ import com.google.common.collect.ImmutableList;
|
||||
* For the project and filter on AGG, try to adjust them to apply.
|
||||
* For the project and filter under AGG, bring the filter under AGG and merge it with agg.
|
||||
*/
|
||||
public class AdjustApplyFromCorrelatToUnCorrelatJob extends BatchRulesJob {
|
||||
public class AdjustApplyFromCorrelateToUnCorrelateJob extends BatchRulesJob {
|
||||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
public AdjustApplyFromCorrelatToUnCorrelatJob(CascadesContext cascadesContext) {
|
||||
public AdjustApplyFromCorrelateToUnCorrelateJob(CascadesContext cascadesContext) {
|
||||
super(cascadesContext);
|
||||
rulesJob.addAll(ImmutableList.of(
|
||||
topDownBatch(ImmutableList.of(
|
||||
@ -1,40 +0,0 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package org.apache.doris.nereids.jobs.batch;
|
||||
|
||||
import org.apache.doris.nereids.CascadesContext;
|
||||
import org.apache.doris.nereids.rules.analysis.EliminateAliasNode;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
|
||||
/**
|
||||
* Job to eliminate the logical node of sub query and alias
|
||||
*/
|
||||
public class FinalizeAnalyzeJob extends BatchRulesJob {
|
||||
|
||||
/**
|
||||
* constructor
|
||||
* @param cascadesContext ctx
|
||||
*/
|
||||
public FinalizeAnalyzeJob(CascadesContext cascadesContext) {
|
||||
super(cascadesContext);
|
||||
rulesJob.addAll(ImmutableList.of(
|
||||
bottomUpBatch(ImmutableList.of(new EliminateAliasNode()))
|
||||
));
|
||||
}
|
||||
}
|
||||
@ -21,6 +21,7 @@ import org.apache.doris.nereids.CascadesContext;
|
||||
import org.apache.doris.nereids.jobs.Job;
|
||||
import org.apache.doris.nereids.rules.RuleSet;
|
||||
import org.apache.doris.nereids.rules.analysis.CheckAfterRewrite;
|
||||
import org.apache.doris.nereids.rules.analysis.LogicalSubQueryAliasToLogicalProject;
|
||||
import org.apache.doris.nereids.rules.expression.rewrite.ExpressionNormalization;
|
||||
import org.apache.doris.nereids.rules.expression.rewrite.ExpressionOptimization;
|
||||
import org.apache.doris.nereids.rules.mv.SelectMaterializedIndexWithAggregate;
|
||||
@ -33,6 +34,7 @@ import org.apache.doris.nereids.rules.rewrite.logical.ExtractSingleTableExpressi
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.FindHashConditionForJoin;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.InferPredicates;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.LimitPushDown;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.MergeProjects;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.NormalizeAggregate;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.PruneOlapScanPartition;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.PushFilterInsideJoin;
|
||||
@ -53,6 +55,11 @@ public class NereidsRewriteJobExecutor extends BatchRulesJob {
|
||||
public NereidsRewriteJobExecutor(CascadesContext cascadesContext) {
|
||||
super(cascadesContext);
|
||||
ImmutableList<Job> jobs = new ImmutableList.Builder<Job>()
|
||||
// MergeProjects depends on this rule
|
||||
.add(bottomUpBatch(ImmutableList.of(new LogicalSubQueryAliasToLogicalProject())))
|
||||
// AdjustApplyFromCorrelateToUnCorrelateJob and ConvertApplyToJoinJob
|
||||
// and SelectMaterializedIndexWithAggregate depends on this rule
|
||||
.add(topDownBatch(ImmutableList.of(new MergeProjects())))
|
||||
/*
|
||||
* Subquery unnesting.
|
||||
* 1. Adjust the plan in correlated logicalApply
|
||||
@ -60,7 +67,7 @@ public class NereidsRewriteJobExecutor extends BatchRulesJob {
|
||||
* 2. Convert logicalApply to a logicalJoin.
|
||||
* TODO: group these rules to make sure the result plan is what we expected.
|
||||
*/
|
||||
.addAll(new AdjustApplyFromCorrelatToUnCorrelatJob(cascadesContext).rulesJob)
|
||||
.addAll(new AdjustApplyFromCorrelateToUnCorrelateJob(cascadesContext).rulesJob)
|
||||
.addAll(new ConvertApplyToJoinJob(cascadesContext).rulesJob)
|
||||
.add(topDownBatch(ImmutableList.of(new ExpressionNormalization(cascadesContext.getConnectContext()))))
|
||||
.add(topDownBatch(ImmutableList.of(new ExpressionOptimization())))
|
||||
|
||||
@ -23,6 +23,8 @@ import org.apache.doris.nereids.jobs.JobType;
|
||||
import org.apache.doris.nereids.memo.Group;
|
||||
import org.apache.doris.nereids.memo.GroupExpression;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Job to optimize {@link Group} in {@link org.apache.doris.nereids.memo.Memo}.
|
||||
*/
|
||||
@ -41,12 +43,16 @@ public class OptimizeGroupJob extends Job {
|
||||
return;
|
||||
}
|
||||
if (!group.isExplored()) {
|
||||
for (GroupExpression logicalGroupExpression : group.getLogicalExpressions()) {
|
||||
context.getCascadesContext().pushJob(new OptimizeGroupExpressionJob(logicalGroupExpression, context));
|
||||
List<GroupExpression> logicalExpressions = group.getLogicalExpressions();
|
||||
for (int i = logicalExpressions.size() - 1; i >= 0; i--) {
|
||||
context.getCascadesContext().pushJob(
|
||||
new OptimizeGroupExpressionJob(logicalExpressions.get(i), context));
|
||||
}
|
||||
}
|
||||
for (GroupExpression physicalGroupExpression : group.getPhysicalExpressions()) {
|
||||
context.getCascadesContext().pushJob(new CostAndEnforcerJob(physicalGroupExpression, context));
|
||||
|
||||
List<GroupExpression> physicalExpressions = group.getPhysicalExpressions();
|
||||
for (int i = physicalExpressions.size() - 1; i >= 0; i--) {
|
||||
context.getCascadesContext().pushJob(new CostAndEnforcerJob(physicalExpressions.get(i), context));
|
||||
}
|
||||
group.setExplored(true);
|
||||
}
|
||||
|
||||
@ -65,8 +65,9 @@ public class RewriteBottomUpJob extends Job {
|
||||
GroupExpression logicalExpression = group.getLogicalExpression();
|
||||
if (!childrenOptimized) {
|
||||
pushJob(new RewriteBottomUpJob(group, rules, context, true));
|
||||
for (Group childGroup : logicalExpression.children()) {
|
||||
pushJob(new RewriteBottomUpJob(childGroup, rules, context, false));
|
||||
List<Group> children = logicalExpression.children();
|
||||
for (int i = children.size() - 1; i >= 0; i--) {
|
||||
pushJob(new RewriteBottomUpJob(children.get(i), rules, context, false));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
@ -95,8 +95,9 @@ public class RewriteTopDownJob extends Job {
|
||||
}
|
||||
}
|
||||
|
||||
for (Group childGroup : group.getLogicalExpression().children()) {
|
||||
pushJob(new RewriteTopDownJob(childGroup, rules, context));
|
||||
List<Group> children = group.getLogicalExpression().children();
|
||||
for (int i = children.size() - 1; i >= 0; i--) {
|
||||
pushJob(new RewriteTopDownJob(children.get(i), rules, context));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -21,6 +21,7 @@ import org.apache.doris.common.IdGenerator;
|
||||
import org.apache.doris.nereids.CascadesContext;
|
||||
import org.apache.doris.nereids.StatementContext;
|
||||
import org.apache.doris.nereids.properties.LogicalProperties;
|
||||
import org.apache.doris.nereids.rules.analysis.CTEContext;
|
||||
import org.apache.doris.nereids.trees.expressions.Expression;
|
||||
import org.apache.doris.nereids.trees.plans.GroupPlan;
|
||||
import org.apache.doris.nereids.trees.plans.Plan;
|
||||
@ -174,6 +175,10 @@ public class Memo {
|
||||
return new CascadesContext(this, statementContext);
|
||||
}
|
||||
|
||||
public CascadesContext newCascadesContext(StatementContext statementContext, CTEContext cteContext) {
|
||||
return new CascadesContext(this, statementContext, cteContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* init memo by a first plan.
|
||||
* @param plan first plan
|
||||
|
||||
@ -139,7 +139,6 @@ import org.apache.doris.nereids.trees.expressions.literal.VarcharLiteral;
|
||||
import org.apache.doris.nereids.trees.plans.JoinType;
|
||||
import org.apache.doris.nereids.trees.plans.Plan;
|
||||
import org.apache.doris.nereids.trees.plans.algebra.Aggregate;
|
||||
import org.apache.doris.nereids.trees.plans.commands.Command;
|
||||
import org.apache.doris.nereids.trees.plans.commands.ExplainCommand;
|
||||
import org.apache.doris.nereids.trees.plans.commands.ExplainCommand.ExplainLevel;
|
||||
import org.apache.doris.nereids.trees.plans.logical.LogicalAggregate;
|
||||
@ -212,8 +211,9 @@ public class LogicalPlanBuilder extends DorisParserBaseVisitor<Object> {
|
||||
|
||||
@Override
|
||||
public LogicalPlan visitStatementDefault(StatementDefaultContext ctx) {
|
||||
LogicalPlan plan = visitQuery(ctx.query());
|
||||
return ctx.cte() == null ? plan : withCte(ctx.cte(), plan);
|
||||
LogicalPlan plan = plan(ctx.query());
|
||||
plan = withCte(plan, ctx.cte());
|
||||
return withExplain(plan, ctx.explain());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -242,8 +242,11 @@ public class LogicalPlanBuilder extends DorisParserBaseVisitor<Object> {
|
||||
/**
|
||||
* process CTE and store the results in a logical plan node LogicalCTE
|
||||
*/
|
||||
public LogicalPlan withCte(CteContext ctx, LogicalPlan plan) {
|
||||
return new LogicalCTE<>(visit(ctx.aliasQuery(), LogicalSubQueryAlias.class), plan);
|
||||
public LogicalPlan withCte(LogicalPlan plan, CteContext ctx) {
|
||||
if (ctx == null) {
|
||||
return plan;
|
||||
}
|
||||
return new LogicalCTE<>((List) visit(ctx.aliasQuery(), LogicalSubQueryAlias.class), plan);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -263,29 +266,6 @@ public class LogicalPlanBuilder extends DorisParserBaseVisitor<Object> {
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public Command visitExplain(ExplainContext ctx) {
|
||||
return ParserUtils.withOrigin(ctx, () -> {
|
||||
LogicalPlan logicalPlan = plan(ctx.query());
|
||||
ExplainLevel explainLevel = ExplainLevel.NORMAL;
|
||||
|
||||
if (ctx.planType() != null) {
|
||||
if (ctx.level == null || !ctx.level.getText().equalsIgnoreCase("plan")) {
|
||||
throw new ParseException("Only explain plan can use plan type: " + ctx.planType().getText(), ctx);
|
||||
}
|
||||
}
|
||||
|
||||
if (ctx.level != null) {
|
||||
if (!ctx.level.getText().equalsIgnoreCase("plan")) {
|
||||
explainLevel = ExplainLevel.valueOf(ctx.level.getText().toUpperCase(Locale.ROOT));
|
||||
} else {
|
||||
explainLevel = parseExplainPlanType(ctx.planType());
|
||||
}
|
||||
}
|
||||
return new ExplainCommand(explainLevel, logicalPlan);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public LogicalPlan visitQuery(QueryContext ctx) {
|
||||
return ParserUtils.withOrigin(ctx, () -> {
|
||||
@ -298,18 +278,22 @@ public class LogicalPlanBuilder extends DorisParserBaseVisitor<Object> {
|
||||
@Override
|
||||
public LogicalPlan visitRegularQuerySpecification(RegularQuerySpecificationContext ctx) {
|
||||
return ParserUtils.withOrigin(ctx, () -> {
|
||||
SelectClauseContext selectCtx = ctx.selectClause();
|
||||
LogicalPlan selectPlan;
|
||||
if (ctx.fromClause() == null) {
|
||||
return withOneRowRelation(ctx.selectClause());
|
||||
selectPlan = withOneRowRelation(selectCtx);
|
||||
} else {
|
||||
LogicalPlan relation = visitFromClause(ctx.fromClause());
|
||||
selectPlan = withSelectQuerySpecification(
|
||||
ctx, relation,
|
||||
selectCtx,
|
||||
Optional.ofNullable(ctx.whereClause()),
|
||||
Optional.ofNullable(ctx.aggClause()),
|
||||
Optional.ofNullable(ctx.havingClause())
|
||||
);
|
||||
}
|
||||
|
||||
LogicalPlan relation = visitFromClause(ctx.fromClause());
|
||||
return withSelectQuerySpecification(
|
||||
ctx, relation,
|
||||
ctx.selectClause(),
|
||||
Optional.ofNullable(ctx.whereClause()),
|
||||
Optional.ofNullable(ctx.aggClause()),
|
||||
Optional.ofNullable(ctx.havingClause())
|
||||
);
|
||||
return withSelectHint(selectPlan, selectCtx.selectHint());
|
||||
});
|
||||
}
|
||||
|
||||
@ -866,6 +850,30 @@ public class LogicalPlanBuilder extends DorisParserBaseVisitor<Object> {
|
||||
return typedVisit(ctx);
|
||||
}
|
||||
|
||||
private LogicalPlan withExplain(LogicalPlan inputPlan, ExplainContext ctx) {
|
||||
if (ctx == null) {
|
||||
return inputPlan;
|
||||
}
|
||||
return ParserUtils.withOrigin(ctx, () -> {
|
||||
ExplainLevel explainLevel = ExplainLevel.NORMAL;
|
||||
|
||||
if (ctx.planType() != null) {
|
||||
if (ctx.level == null || !ctx.level.getText().equalsIgnoreCase("plan")) {
|
||||
throw new ParseException("Only explain plan can use plan type: " + ctx.planType().getText(), ctx);
|
||||
}
|
||||
}
|
||||
|
||||
if (ctx.level != null) {
|
||||
if (!ctx.level.getText().equalsIgnoreCase("plan")) {
|
||||
explainLevel = ExplainLevel.valueOf(ctx.level.getText().toUpperCase(Locale.ROOT));
|
||||
} else {
|
||||
explainLevel = parseExplainPlanType(ctx.planType());
|
||||
}
|
||||
}
|
||||
return new ExplainCommand(explainLevel, inputPlan);
|
||||
});
|
||||
}
|
||||
|
||||
private LogicalPlan withQueryOrganization(LogicalPlan inputPlan, QueryOrganizationContext ctx) {
|
||||
Optional<SortClauseContext> sortClauseContext = Optional.ofNullable(ctx.sortClause());
|
||||
Optional<LimitClauseContext> limitClauseContext = Optional.ofNullable(ctx.limitClause());
|
||||
@ -929,8 +937,7 @@ public class LogicalPlanBuilder extends DorisParserBaseVisitor<Object> {
|
||||
LogicalPlan aggregate = withAggregate(filter, selectClause, aggClause);
|
||||
// TODO: replace and process having at this position
|
||||
LogicalPlan having = withHaving(aggregate, havingClause);
|
||||
LogicalPlan projection = withProjection(having, selectClause, aggClause);
|
||||
return withSelectHint(projection, selectClause.selectHint());
|
||||
return withProjection(having, selectClause, aggClause);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@ -19,6 +19,7 @@ package org.apache.doris.nereids.pattern;
|
||||
|
||||
import org.apache.doris.nereids.CascadesContext;
|
||||
import org.apache.doris.nereids.StatementContext;
|
||||
import org.apache.doris.nereids.rules.analysis.CTEContext;
|
||||
import org.apache.doris.nereids.trees.plans.Plan;
|
||||
import org.apache.doris.qe.ConnectContext;
|
||||
|
||||
@ -31,6 +32,7 @@ public class MatchingContext<TYPE extends Plan> {
|
||||
public final CascadesContext cascadesContext;
|
||||
public final StatementContext statementContext;
|
||||
public final ConnectContext connectContext;
|
||||
public final CTEContext cteContext;
|
||||
|
||||
/**
|
||||
* the MatchingContext is the param pass through the MatchedAction.
|
||||
@ -45,5 +47,6 @@ public class MatchingContext<TYPE extends Plan> {
|
||||
this.cascadesContext = cascadesContext;
|
||||
this.statementContext = cascadesContext.getStatementContext();
|
||||
this.connectContext = cascadesContext.getConnectContext();
|
||||
this.cteContext = cascadesContext.getCteContext();
|
||||
}
|
||||
}
|
||||
|
||||
@ -77,14 +77,7 @@ public enum RuleType {
|
||||
AGGREGATE_DISASSEMBLE(RuleTypeClass.REWRITE),
|
||||
COLUMN_PRUNE_PROJECTION(RuleTypeClass.REWRITE),
|
||||
ELIMINATE_UNNECESSARY_PROJECT(RuleTypeClass.REWRITE),
|
||||
ELIMINATE_ALIAS_NODE(RuleTypeClass.REWRITE),
|
||||
|
||||
PROJECT_ELIMINATE_ALIAS_NODE(RuleTypeClass.REWRITE),
|
||||
FILTER_ELIMINATE_ALIAS_NODE(RuleTypeClass.REWRITE),
|
||||
JOIN_ELIMINATE_ALIAS_NODE(RuleTypeClass.REWRITE),
|
||||
JOIN_LEFT_CHILD_ELIMINATE_ALIAS_NODE(RuleTypeClass.REWRITE),
|
||||
JOIN_RIGHT_CHILD_ELIMINATE_ALIAS_NODE(RuleTypeClass.REWRITE),
|
||||
AGGREGATE_ELIMINATE_ALIAS_NODE(RuleTypeClass.REWRITE),
|
||||
LOGICAL_SUB_QUERY_ALIAS_TO_LOGICAL_PROJECT(RuleTypeClass.REWRITE),
|
||||
|
||||
// subquery analyze
|
||||
ANALYZE_FILTER_SUBQUERY(RuleTypeClass.REWRITE),
|
||||
|
||||
@ -50,23 +50,23 @@ public class AnalyzeSubquery implements AnalysisRuleFactory {
|
||||
@Override
|
||||
public List<Rule> buildRules() {
|
||||
return ImmutableList.of(
|
||||
RuleType.ANALYZE_FILTER_SUBQUERY.build(
|
||||
logicalFilter().thenApply(ctx -> {
|
||||
LogicalFilter filter = ctx.root;
|
||||
Set<SubqueryExpr> subqueryExprs = filter.getPredicates()
|
||||
.collect(SubqueryExpr.class::isInstance);
|
||||
if (subqueryExprs.isEmpty()) {
|
||||
return filter;
|
||||
}
|
||||
RuleType.ANALYZE_FILTER_SUBQUERY.build(
|
||||
logicalFilter().thenApply(ctx -> {
|
||||
LogicalFilter filter = ctx.root;
|
||||
Set<SubqueryExpr> subqueryExprs = filter.getPredicates()
|
||||
.collect(SubqueryExpr.class::isInstance);
|
||||
if (subqueryExprs.isEmpty()) {
|
||||
return filter;
|
||||
}
|
||||
|
||||
// first step: Replace the subquery of predicate in LogicalFilter
|
||||
// second step: Replace subquery with LogicalApply
|
||||
return new LogicalFilter<>(new ReplaceSubquery().replace(filter.getPredicates()),
|
||||
analyzedSubquery(
|
||||
subqueryExprs, (LogicalPlan) filter.child(), ctx.cascadesContext
|
||||
));
|
||||
})
|
||||
)
|
||||
// first step: Replace the subquery of predicate in LogicalFilter
|
||||
// second step: Replace subquery with LogicalApply
|
||||
return new LogicalFilter<>(new ReplaceSubquery().replace(filter.getPredicates()),
|
||||
analyzedSubquery(
|
||||
subqueryExprs, (LogicalPlan) filter.child(), ctx.cascadesContext
|
||||
));
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@ -37,6 +37,7 @@ import org.apache.doris.qe.ConnectContext;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Rule to bind relations in query plan.
|
||||
@ -76,9 +77,15 @@ public class BindRelation extends OneAnalysisRuleFactory {
|
||||
|
||||
private LogicalPlan bindWithCurrentDb(CascadesContext cascadesContext, String tableName) {
|
||||
// check if it is a CTE's name
|
||||
CTEContext cteContext = cascadesContext.getStatementContext().getCteContext();
|
||||
if (cteContext.containsCTE(tableName)) {
|
||||
return new LogicalSubQueryAlias<>(tableName, cteContext.getAnalyzedCTEPlan(tableName));
|
||||
CTEContext cteContext = cascadesContext.getCteContext();
|
||||
Optional<LogicalPlan> analyzedCte = cteContext.getAnalyzedCTE(tableName);
|
||||
if (analyzedCte.isPresent()) {
|
||||
LogicalPlan ctePlan = analyzedCte.get();
|
||||
if (ctePlan instanceof LogicalSubQueryAlias
|
||||
&& ((LogicalSubQueryAlias<?>) ctePlan).getAlias().equals(tableName)) {
|
||||
return ctePlan;
|
||||
}
|
||||
return new LogicalSubQueryAlias<>(tableName, ctePlan);
|
||||
}
|
||||
|
||||
String dbName = cascadesContext.getConnectContext().getDatabase();
|
||||
|
||||
@ -556,7 +556,7 @@ public class BindSlotReference implements AnalysisRuleFactory {
|
||||
|
||||
private AnalyzedResult analyzeSubquery(SubqueryExpr expr) {
|
||||
CascadesContext subqueryContext = new Memo(expr.getQueryPlan())
|
||||
.newCascadesContext((cascadesContext.getStatementContext()));
|
||||
.newCascadesContext((cascadesContext.getStatementContext()), cascadesContext.getCteContext());
|
||||
Scope subqueryScope = genScopeWithSubquery(expr);
|
||||
subqueryContext
|
||||
.newAnalyzer(Optional.of(subqueryScope))
|
||||
|
||||
@ -17,48 +17,89 @@
|
||||
|
||||
package org.apache.doris.nereids.rules.analysis;
|
||||
|
||||
import org.apache.doris.nereids.exceptions.AnalysisException;
|
||||
import org.apache.doris.nereids.trees.plans.Plan;
|
||||
import org.apache.doris.nereids.trees.plans.logical.LogicalPlan;
|
||||
import org.apache.doris.nereids.trees.plans.logical.LogicalSubQueryAlias;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
/**
|
||||
* Context used for CTE analysis and register
|
||||
*/
|
||||
public class CTEContext {
|
||||
private Map<String, CTEContext> cteContextMap;
|
||||
|
||||
// store CTE name and both initial and analyzed LogicalPlan of with query;
|
||||
// The initial LogicalPlan is used to inline a CTE if it is referenced by another CTE,
|
||||
// and the analyzed LogicalPlan will be if it is referenced by the main query.
|
||||
private Map<String, LogicalPlan> initialCtePlans;
|
||||
private Map<String, LogicalPlan> analyzedCtePlans;
|
||||
private String name;
|
||||
private LogicalSubQueryAlias<Plan> parsedPlan;
|
||||
// this cache only use once
|
||||
private LogicalPlan analyzedPlanCacheOnce;
|
||||
private Function<Plan, LogicalPlan> analyzePlanBuilder;
|
||||
|
||||
/* build head CTEContext */
|
||||
public CTEContext() {
|
||||
initialCtePlans = new HashMap<>();
|
||||
analyzedCtePlans = new HashMap<>();
|
||||
this(null, null);
|
||||
}
|
||||
|
||||
/** CTEContext */
|
||||
public CTEContext(@Nullable LogicalSubQueryAlias<Plan> parsedPlan, @Nullable CTEContext previousCteContext) {
|
||||
if ((parsedPlan == null && previousCteContext != null) || (parsedPlan != null && previousCteContext == null)) {
|
||||
throw new AnalysisException("Only first CteContext can contains null cte plan or previousCteContext");
|
||||
}
|
||||
this.parsedPlan = parsedPlan;
|
||||
this.name = parsedPlan == null ? null : parsedPlan.getAlias();
|
||||
this.cteContextMap = previousCteContext == null
|
||||
? ImmutableMap.of()
|
||||
: ImmutableMap.<String, CTEContext>builder()
|
||||
.putAll(previousCteContext.cteContextMap)
|
||||
.put(name, this)
|
||||
.build();
|
||||
}
|
||||
|
||||
public void setAnalyzedPlanCacheOnce(LogicalPlan analyzedPlan) {
|
||||
this.analyzedPlanCacheOnce = analyzedPlan;
|
||||
}
|
||||
|
||||
public void setAnalyzePlanBuilder(Function<Plan, LogicalPlan> analyzePlanBuilder) {
|
||||
this.analyzePlanBuilder = analyzePlanBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
* check if cteName can be found in current order
|
||||
*/
|
||||
public boolean containsCTE(String cteName) {
|
||||
return initialCtePlans.containsKey(cteName);
|
||||
return findCTEContext(cteName).isPresent();
|
||||
}
|
||||
|
||||
public LogicalPlan getInitialCTEPlan(String cteName) {
|
||||
return initialCtePlans.get(cteName);
|
||||
public Optional<LogicalSubQueryAlias<Plan>> getParsedCtePlan(String cteName) {
|
||||
return findCTEContext(cteName).map(cte -> cte.parsedPlan);
|
||||
}
|
||||
|
||||
public LogicalPlan getAnalyzedCTEPlan(String cteName) {
|
||||
return analyzedCtePlans.get(cteName);
|
||||
/** getAnalyzedCTE */
|
||||
public Optional<LogicalPlan> getAnalyzedCTE(String cteName) {
|
||||
return findCTEContext(cteName).map(CTEContext::doAnalyzeCTE);
|
||||
}
|
||||
|
||||
public void putInitialPlan(String cteName, LogicalPlan plan) {
|
||||
initialCtePlans.put(cteName, plan);
|
||||
/** findCTEContext */
|
||||
public Optional<CTEContext> findCTEContext(String cteName) {
|
||||
CTEContext cteContext = cteContextMap.get(cteName);
|
||||
return Optional.ofNullable(cteContext);
|
||||
}
|
||||
|
||||
public void putAnalyzedPlan(String cteName, LogicalPlan plan) {
|
||||
analyzedCtePlans.put(cteName, plan);
|
||||
private LogicalPlan doAnalyzeCTE() {
|
||||
// we always analyze a cte as least once, if the cte only use once, we can return analyzedPlanCacheOnce.
|
||||
// but if the cte use more then once, we should return difference analyzed plan to generate difference
|
||||
// relation id, so the relation will not conflict in the memo.
|
||||
if (analyzedPlanCacheOnce != null) {
|
||||
LogicalPlan analyzedPlan = analyzedPlanCacheOnce;
|
||||
analyzedPlanCacheOnce = null;
|
||||
return analyzedPlan;
|
||||
}
|
||||
return analyzePlanBuilder.apply(parsedPlan);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -1,66 +0,0 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package org.apache.doris.nereids.rules.analysis;
|
||||
|
||||
import org.apache.doris.nereids.rules.Rule;
|
||||
import org.apache.doris.nereids.rules.RuleType;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Eliminate the logical sub query and alias node after analyze and before rewrite
|
||||
* If we match the alias node and return its child node, in the execute() of the job
|
||||
* <p>
|
||||
* TODO: refactor group merge strategy to support the feature above
|
||||
*/
|
||||
public class EliminateAliasNode implements AnalysisRuleFactory {
|
||||
@Override
|
||||
public List<Rule> buildRules() {
|
||||
return ImmutableList.of(
|
||||
RuleType.PROJECT_ELIMINATE_ALIAS_NODE.build(
|
||||
logicalProject(logicalSubQueryAlias())
|
||||
.then(project -> project.withChildren(ImmutableList.of(project.child().child())))
|
||||
),
|
||||
RuleType.FILTER_ELIMINATE_ALIAS_NODE.build(
|
||||
logicalFilter(logicalSubQueryAlias())
|
||||
.then(filter -> filter.withChildren(ImmutableList.of(filter.child().child())))
|
||||
),
|
||||
RuleType.AGGREGATE_ELIMINATE_ALIAS_NODE.build(
|
||||
aggregate(logicalSubQueryAlias())
|
||||
.then(agg -> agg.withChildren(ImmutableList.of(agg.child().child())))
|
||||
),
|
||||
RuleType.JOIN_ELIMINATE_ALIAS_NODE.build(
|
||||
logicalJoin(logicalSubQueryAlias(), logicalSubQueryAlias())
|
||||
.then(join -> join.withChildren(
|
||||
ImmutableList.of(join.left().child(), join.right().child())))
|
||||
),
|
||||
RuleType.JOIN_LEFT_CHILD_ELIMINATE_ALIAS_NODE.build(
|
||||
logicalJoin(logicalSubQueryAlias(), group())
|
||||
.then(join -> join.withChildren(
|
||||
ImmutableList.of(join.left().child(), join.right())))
|
||||
),
|
||||
RuleType.JOIN_RIGHT_CHILD_ELIMINATE_ALIAS_NODE.build(
|
||||
logicalJoin(group(), logicalSubQueryAlias())
|
||||
.then(join -> join.withChildren(
|
||||
ImmutableList.of(join.left(), join.right().child())))
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,43 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package org.apache.doris.nereids.rules.analysis;
|
||||
|
||||
import org.apache.doris.nereids.rules.Rule;
|
||||
import org.apache.doris.nereids.rules.RuleType;
|
||||
import org.apache.doris.nereids.trees.expressions.Slot;
|
||||
import org.apache.doris.nereids.trees.plans.logical.LogicalProject;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Eliminate the logical sub query and alias node after analyze and before rewrite
|
||||
* If we match the alias node and return its child node, in the execute() of the job
|
||||
* <p>
|
||||
* TODO: refactor group merge strategy to support the feature above
|
||||
*/
|
||||
public class LogicalSubQueryAliasToLogicalProject extends OneAnalysisRuleFactory {
|
||||
@Override
|
||||
public Rule build() {
|
||||
return RuleType.LOGICAL_SUB_QUERY_ALIAS_TO_LOGICAL_PROJECT.build(
|
||||
logicalSubQueryAlias().then(alias -> {
|
||||
List<Slot> output = alias.getOutput();
|
||||
return new LogicalProject<>((List) output, alias.child());
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -18,30 +18,23 @@
|
||||
package org.apache.doris.nereids.rules.analysis;
|
||||
|
||||
import org.apache.doris.nereids.CascadesContext;
|
||||
import org.apache.doris.nereids.StatementContext;
|
||||
import org.apache.doris.nereids.analyzer.UnboundAlias;
|
||||
import org.apache.doris.nereids.analyzer.UnboundRelation;
|
||||
import org.apache.doris.nereids.analyzer.UnboundSlot;
|
||||
import org.apache.doris.nereids.exceptions.AnalysisException;
|
||||
import org.apache.doris.nereids.memo.Memo;
|
||||
import org.apache.doris.nereids.rules.Rule;
|
||||
import org.apache.doris.nereids.rules.RuleType;
|
||||
import org.apache.doris.nereids.trees.expressions.Alias;
|
||||
import org.apache.doris.nereids.trees.expressions.NamedExpression;
|
||||
import org.apache.doris.nereids.trees.expressions.Slot;
|
||||
import org.apache.doris.nereids.trees.plans.GroupPlan;
|
||||
import org.apache.doris.nereids.trees.plans.Plan;
|
||||
import org.apache.doris.nereids.trees.plans.logical.LogicalCTE;
|
||||
import org.apache.doris.nereids.trees.plans.logical.LogicalPlan;
|
||||
import org.apache.doris.nereids.trees.plans.logical.LogicalProject;
|
||||
import org.apache.doris.nereids.trees.plans.logical.LogicalSubQueryAlias;
|
||||
import org.apache.doris.nereids.trees.plans.visitor.DefaultPlanRewriter;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* Register CTE, includes checking columnAliases, checking CTE name, analyzing each CTE and store the
|
||||
@ -55,78 +48,50 @@ public class RegisterCTE extends OneAnalysisRuleFactory {
|
||||
public Rule build() {
|
||||
return logicalCTE().thenApply(ctx -> {
|
||||
LogicalCTE<GroupPlan> logicalCTE = ctx.root;
|
||||
register(logicalCTE.getAliasQueries(), ctx.statementContext);
|
||||
return (LogicalPlan) logicalCTE.child();
|
||||
register(logicalCTE.getAliasQueries(), ctx.cascadesContext);
|
||||
return logicalCTE.child();
|
||||
}).toRule(RuleType.REGISTER_CTE);
|
||||
}
|
||||
|
||||
/**
|
||||
* register and store CTEs in CTEContext
|
||||
*/
|
||||
private void register(List<LogicalSubQueryAlias> aliasQueryList, StatementContext statementContext) {
|
||||
CTEContext cteContext = statementContext.getCteContext();
|
||||
|
||||
for (LogicalSubQueryAlias<LogicalPlan> aliasQuery : aliasQueryList) {
|
||||
private void register(List<LogicalSubQueryAlias<Plan>> aliasQueryList, CascadesContext cascadesContext) {
|
||||
CTEContext cteCtx = cascadesContext.getCteContext();
|
||||
for (LogicalSubQueryAlias<Plan> aliasQuery : aliasQueryList) {
|
||||
String cteName = aliasQuery.getAlias();
|
||||
if (cteContext.containsCTE(cteName)) {
|
||||
if (cteCtx.containsCTE(cteName)) {
|
||||
throw new AnalysisException("CTE name [" + cteName + "] cannot be used more than once.");
|
||||
}
|
||||
|
||||
// inline CTE's initialPlan if it is referenced by another CTE
|
||||
LogicalPlan plan = aliasQuery.child();
|
||||
plan = (LogicalPlan) new CTEVisitor().inlineCTE(cteContext, plan);
|
||||
cteContext.putInitialPlan(cteName, plan);
|
||||
// we should use a chain to ensure visible of cte
|
||||
CTEContext localCteContext = cteCtx;
|
||||
|
||||
// analyze CTE's initialPlan
|
||||
CascadesContext cascadesContext = new Memo(plan).newCascadesContext(statementContext);
|
||||
cascadesContext.newAnalyzer().analyze();
|
||||
LogicalPlan analyzedPlan = (LogicalPlan) cascadesContext.getMemo().copyOut(false);
|
||||
Function<Plan, LogicalPlan> analyzeCte = parsePlan -> {
|
||||
CascadesContext localCascadesContext = new Memo(parsePlan)
|
||||
.newCascadesContext(cascadesContext.getStatementContext(), localCteContext);
|
||||
localCascadesContext.newAnalyzer().analyze();
|
||||
return (LogicalPlan) localCascadesContext.getMemo().copyOut(false);
|
||||
};
|
||||
|
||||
LogicalPlan analyzedCteBody = analyzeCte.apply(aliasQuery.child());
|
||||
if (aliasQuery.getColumnAliases().isPresent()) {
|
||||
analyzedPlan = withColumnAliases(analyzedPlan, aliasQuery, cteContext);
|
||||
checkColumnAlias(aliasQuery, analyzedCteBody.getOutput());
|
||||
}
|
||||
|
||||
cteContext.putAnalyzedPlan(cteName, analyzedPlan);
|
||||
cteCtx = new CTEContext(aliasQuery, localCteContext);
|
||||
|
||||
// now we can simply wrap aliasQuery for the first usage of this cte
|
||||
cteCtx.setAnalyzedPlanCacheOnce(aliasQuery.withChildren(ImmutableList.of(analyzedCteBody)));
|
||||
cteCtx.setAnalyzePlanBuilder(analyzeCte);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* deal with columnAliases of CTE
|
||||
*/
|
||||
private LogicalPlan withColumnAliases(LogicalPlan analyzedPlan,
|
||||
LogicalSubQueryAlias<LogicalPlan> aliasQuery, CTEContext cteContext) {
|
||||
List<Slot> outputSlots = analyzedPlan.getOutput();
|
||||
List<String> columnAliases = aliasQuery.getColumnAliases().get();
|
||||
|
||||
checkColumnAlias(aliasQuery, outputSlots);
|
||||
|
||||
// if this CTE has columnAlias, we should add an extra LogicalProject to both its initialPlan and analyzedPlan,
|
||||
// which is used to store columnAlias
|
||||
|
||||
// projects for initialPlan
|
||||
List<NamedExpression> unboundProjects = IntStream.range(0, outputSlots.size())
|
||||
.mapToObj(i -> i >= columnAliases.size()
|
||||
? new UnboundSlot(outputSlots.get(i).getName())
|
||||
: new UnboundAlias(new UnboundSlot(outputSlots.get(i).getName()), columnAliases.get(i)))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
String name = aliasQuery.getAlias();
|
||||
LogicalPlan initialPlan = cteContext.getInitialCTEPlan(name);
|
||||
cteContext.putInitialPlan(name, new LogicalProject<>(unboundProjects, initialPlan));
|
||||
|
||||
// projects for analyzedPlan
|
||||
List<NamedExpression> boundedProjects = IntStream.range(0, outputSlots.size())
|
||||
.mapToObj(i -> i >= columnAliases.size()
|
||||
? outputSlots.get(i)
|
||||
: new Alias(outputSlots.get(i), columnAliases.get(i)))
|
||||
.collect(Collectors.toList());
|
||||
return new LogicalProject<>(boundedProjects, analyzedPlan);
|
||||
cascadesContext.setCteContext(cteCtx);
|
||||
}
|
||||
|
||||
/**
|
||||
* check columnAliases' size and name
|
||||
*/
|
||||
private void checkColumnAlias(LogicalSubQueryAlias<LogicalPlan> aliasQuery, List<Slot> outputSlots) {
|
||||
private void checkColumnAlias(LogicalSubQueryAlias<Plan> aliasQuery, List<Slot> outputSlots) {
|
||||
List<String> columnAlias = aliasQuery.getColumnAliases().get();
|
||||
// if the size of columnAlias is smaller than outputSlots' size, we will replace the corresponding number
|
||||
// of front slots with columnAlias.
|
||||
@ -146,24 +111,4 @@ public class RegisterCTE extends OneAnalysisRuleFactory {
|
||||
names.add(alias);
|
||||
});
|
||||
}
|
||||
|
||||
private class CTEVisitor extends DefaultPlanRewriter<CTEContext> {
|
||||
@Override
|
||||
public LogicalPlan visitUnboundRelation(UnboundRelation unboundRelation, CTEContext cteContext) {
|
||||
// confirm if it is a CTE
|
||||
if (unboundRelation.getNameParts().size() != 1) {
|
||||
return unboundRelation;
|
||||
}
|
||||
String name = unboundRelation.getTableName();
|
||||
if (cteContext.containsCTE(name)) {
|
||||
return new LogicalSubQueryAlias<>(name, cteContext.getInitialCTEPlan(name));
|
||||
}
|
||||
return unboundRelation;
|
||||
}
|
||||
|
||||
public Plan inlineCTE(CTEContext cteContext, LogicalPlan ctePlan) {
|
||||
return ctePlan.accept(this, cteContext);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -38,4 +38,8 @@ public abstract class Slot extends NamedExpression implements LeafExpression {
|
||||
public Slot withQualifier(List<String> qualifiers) {
|
||||
throw new RuntimeException("Do not implement");
|
||||
}
|
||||
|
||||
public Slot withName(String name) {
|
||||
throw new RuntimeException("Do not implement");
|
||||
}
|
||||
}
|
||||
|
||||
@ -182,6 +182,11 @@ public class SlotReference extends Slot {
|
||||
return new SlotReference(exprId, name, dataType, nullable, qualifiers, column);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Slot withName(String name) {
|
||||
return new SlotReference(exprId, name, dataType, nullable, qualifier, column);
|
||||
}
|
||||
|
||||
/** withCommonGroupingSetExpression */
|
||||
public Slot withCommonGroupingSetExpression(boolean isCommonGroupingSetExpression) {
|
||||
if (!isCommonGroupingSetExpression) {
|
||||
|
||||
@ -38,19 +38,19 @@ import java.util.Optional;
|
||||
*/
|
||||
public class LogicalCTE<CHILD_TYPE extends Plan> extends LogicalUnary<CHILD_TYPE> {
|
||||
|
||||
private final List<LogicalSubQueryAlias> aliasQueries;
|
||||
private final List<LogicalSubQueryAlias<Plan>> aliasQueries;
|
||||
|
||||
public LogicalCTE(List<LogicalSubQueryAlias> aliasQueries, CHILD_TYPE child) {
|
||||
public LogicalCTE(List<LogicalSubQueryAlias<Plan>> aliasQueries, CHILD_TYPE child) {
|
||||
this(aliasQueries, Optional.empty(), Optional.empty(), child);
|
||||
}
|
||||
|
||||
public LogicalCTE(List<LogicalSubQueryAlias> aliasQueries, Optional<GroupExpression> groupExpression,
|
||||
public LogicalCTE(List<LogicalSubQueryAlias<Plan>> aliasQueries, Optional<GroupExpression> groupExpression,
|
||||
Optional<LogicalProperties> logicalProperties, CHILD_TYPE child) {
|
||||
super(PlanType.LOGICAL_CTE, groupExpression, logicalProperties, child);
|
||||
this.aliasQueries = aliasQueries;
|
||||
}
|
||||
|
||||
public List<LogicalSubQueryAlias> getAliasQueries() {
|
||||
public List<LogicalSubQueryAlias<Plan>> getAliasQueries() {
|
||||
return aliasQueries;
|
||||
}
|
||||
|
||||
@ -105,12 +105,12 @@ public class LogicalCTE<CHILD_TYPE extends Plan> extends LogicalUnary<CHILD_TYPE
|
||||
}
|
||||
|
||||
@Override
|
||||
public Plan withGroupExpression(Optional<GroupExpression> groupExpression) {
|
||||
public LogicalCTE<CHILD_TYPE> withGroupExpression(Optional<GroupExpression> groupExpression) {
|
||||
return new LogicalCTE<>(aliasQueries, groupExpression, Optional.of(getLogicalProperties()), child());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Plan withLogicalProperties(Optional<LogicalProperties> logicalProperties) {
|
||||
public LogicalCTE<CHILD_TYPE> withLogicalProperties(Optional<LogicalProperties> logicalProperties) {
|
||||
return new LogicalCTE<>(aliasQueries, Optional.empty(), logicalProperties, child());
|
||||
}
|
||||
}
|
||||
|
||||
@ -33,7 +33,6 @@ import org.apache.commons.lang.StringUtils;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* The node of logical plan for sub query and alias
|
||||
@ -63,9 +62,26 @@ public class LogicalSubQueryAlias<CHILD_TYPE extends Plan> extends LogicalUnary<
|
||||
|
||||
@Override
|
||||
public List<Slot> computeOutput() {
|
||||
return child().getOutput().stream()
|
||||
.map(slot -> slot.withQualifier(ImmutableList.of(alias)))
|
||||
.collect(Collectors.toList());
|
||||
List<Slot> childOutput = child().getOutput();
|
||||
List<String> columnAliases = this.columnAliases.isPresent()
|
||||
? this.columnAliases.get()
|
||||
: ImmutableList.of();
|
||||
ImmutableList.Builder<Slot> currentOutput = ImmutableList.builder();
|
||||
String qualifier = alias;
|
||||
for (int i = 0; i < childOutput.size(); i++) {
|
||||
Slot originSlot = childOutput.get(i);
|
||||
String columnAlias;
|
||||
if (i < columnAliases.size()) {
|
||||
columnAlias = columnAliases.get(i);
|
||||
} else {
|
||||
columnAlias = originSlot.getName();
|
||||
}
|
||||
Slot qualified = originSlot
|
||||
.withQualifier(ImmutableList.of(qualifier))
|
||||
.withName(columnAlias);
|
||||
currentOutput.add(qualified);
|
||||
}
|
||||
return currentOutput.build();
|
||||
}
|
||||
|
||||
public String getAlias() {
|
||||
@ -107,14 +123,14 @@ public class LogicalSubQueryAlias<CHILD_TYPE extends Plan> extends LogicalUnary<
|
||||
}
|
||||
|
||||
@Override
|
||||
public Plan withChildren(List<Plan> children) {
|
||||
public LogicalSubQueryAlias<Plan> withChildren(List<Plan> children) {
|
||||
Preconditions.checkArgument(children.size() == 1);
|
||||
return new LogicalSubQueryAlias<>(alias, children.get(0));
|
||||
return new LogicalSubQueryAlias<>(alias, columnAliases, children.get(0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(PlanVisitor<R, C> visitor, C context) {
|
||||
return visitor.visitSubQueryAlias((LogicalSubQueryAlias<Plan>) this, context);
|
||||
return visitor.visitSubQueryAlias(this, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -123,13 +139,13 @@ public class LogicalSubQueryAlias<CHILD_TYPE extends Plan> extends LogicalUnary<
|
||||
}
|
||||
|
||||
@Override
|
||||
public Plan withGroupExpression(Optional<GroupExpression> groupExpression) {
|
||||
public LogicalSubQueryAlias<CHILD_TYPE> withGroupExpression(Optional<GroupExpression> groupExpression) {
|
||||
return new LogicalSubQueryAlias<>(alias, columnAliases, groupExpression,
|
||||
Optional.of(getLogicalProperties()), child());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Plan withLogicalProperties(Optional<LogicalProperties> logicalProperties) {
|
||||
public LogicalSubQueryAlias<CHILD_TYPE> withLogicalProperties(Optional<LogicalProperties> logicalProperties) {
|
||||
return new LogicalSubQueryAlias<>(alias, columnAliases, Optional.empty(),
|
||||
logicalProperties, child());
|
||||
}
|
||||
|
||||
@ -142,7 +142,8 @@ public class PhysicalLimit<CHILD_TYPE extends Plan> extends PhysicalUnary<CHILD_
|
||||
public String toString() {
|
||||
return Utils.toSqlString("PhysicalLimit",
|
||||
"limit", limit,
|
||||
"offset", offset
|
||||
"offset", offset,
|
||||
"stats", statsDeriveResult
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -93,7 +93,6 @@ import org.apache.doris.mysql.privilege.PrivPredicate;
|
||||
import org.apache.doris.nereids.NereidsPlanner;
|
||||
import org.apache.doris.nereids.StatementContext;
|
||||
import org.apache.doris.nereids.glue.LogicalPlanAdapter;
|
||||
import org.apache.doris.nereids.rules.analysis.CTEContext;
|
||||
import org.apache.doris.planner.OlapScanNode;
|
||||
import org.apache.doris.planner.OriginalPlanner;
|
||||
import org.apache.doris.planner.Planner;
|
||||
@ -214,7 +213,6 @@ public class StmtExecutor implements ProfileWriter {
|
||||
this.statementContext.setConnectContext(ctx);
|
||||
this.statementContext.setOriginStatement(originStmt);
|
||||
this.statementContext.setParsedStatement(parsedStmt);
|
||||
this.statementContext.setCteContext(new CTEContext());
|
||||
} else {
|
||||
this.statementContext = new StatementContext(ctx, originStmt);
|
||||
this.statementContext.setParsedStatement(parsedStmt);
|
||||
|
||||
@ -105,11 +105,15 @@ public class AnalyzeSubQueryTest extends TestWithFeService implements PatternMat
|
||||
public void testCaseSubQuery() {
|
||||
PlanChecker.from(connectContext)
|
||||
.analyze(testSql.get(0))
|
||||
.applyTopDown(new EliminateAliasNode())
|
||||
.applyTopDown(new LogicalSubQueryAliasToLogicalProject())
|
||||
.matchesFromRoot(
|
||||
logicalProject(
|
||||
logicalProject(
|
||||
logicalOlapScan().when(o -> true)
|
||||
logicalProject(
|
||||
logicalProject(
|
||||
logicalOlapScan().when(o -> true)
|
||||
)
|
||||
)
|
||||
).when(FieldChecker.check("projects", ImmutableList.of(
|
||||
new SlotReference(new ExprId(0), "id", BigIntType.INSTANCE, true, ImmutableList.of("T")),
|
||||
new SlotReference(new ExprId(1), "score", BigIntType.INSTANCE, true, ImmutableList.of("T"))))
|
||||
@ -125,13 +129,19 @@ public class AnalyzeSubQueryTest extends TestWithFeService implements PatternMat
|
||||
public void testCaseMixed() {
|
||||
PlanChecker.from(connectContext)
|
||||
.analyze(testSql.get(1))
|
||||
.applyTopDown(new EliminateAliasNode())
|
||||
.applyTopDown(new LogicalSubQueryAliasToLogicalProject())
|
||||
.matchesFromRoot(
|
||||
logicalProject(
|
||||
logicalJoin(
|
||||
logicalOlapScan(),
|
||||
logicalProject(
|
||||
logicalOlapScan()
|
||||
),
|
||||
logicalProject(
|
||||
logicalProject(
|
||||
logicalProject(
|
||||
logicalOlapScan()
|
||||
)
|
||||
)
|
||||
).when(FieldChecker.check("projects", ImmutableList.of(
|
||||
new SlotReference(new ExprId(0), "id", BigIntType.INSTANCE, true, ImmutableList.of("TT2")),
|
||||
new SlotReference(new ExprId(1), "score", BigIntType.INSTANCE, true, ImmutableList.of("TT2"))))
|
||||
@ -156,12 +166,14 @@ public class AnalyzeSubQueryTest extends TestWithFeService implements PatternMat
|
||||
public void testCaseJoinSameTable() {
|
||||
PlanChecker.from(connectContext)
|
||||
.analyze(testSql.get(5))
|
||||
.applyTopDown(new EliminateAliasNode())
|
||||
.applyTopDown(new LogicalSubQueryAliasToLogicalProject())
|
||||
.matchesFromRoot(
|
||||
logicalProject(
|
||||
logicalJoin(
|
||||
logicalOlapScan(),
|
||||
logicalOlapScan()
|
||||
logicalProject(
|
||||
logicalOlapScan()
|
||||
)
|
||||
)
|
||||
.when(FieldChecker.check("joinType", JoinType.INNER_JOIN))
|
||||
.when(FieldChecker.check("otherJoinConjuncts", ImmutableList.of(new EqualTo(
|
||||
|
||||
@ -30,6 +30,7 @@ import org.apache.doris.nereids.rules.rewrite.logical.ApplyPullFilterOnAgg;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.ApplyPullFilterOnProjectUnderAgg;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.ExistsApplyToJoin;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.InApplyToJoin;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.MergeProjects;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.PushApplyUnderFilter;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.PushApplyUnderProject;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.ScalarApplyToJoin;
|
||||
@ -369,27 +370,41 @@ public class AnalyzeWhereSubqueryTest extends TestWithFeService implements Patte
|
||||
// select * from t6 where t6.k1 < (select max(aa) from (select v1 as aa from t7 where t6.k2=t7.v2) t2 )
|
||||
PlanChecker.from(connectContext)
|
||||
.analyze(sql10)
|
||||
.matches(
|
||||
logicalApply(
|
||||
any(),
|
||||
logicalAggregate(
|
||||
logicalProject(
|
||||
.matchesFromRoot(
|
||||
logicalProject(
|
||||
logicalFilter(
|
||||
logicalProject(
|
||||
logicalApply(
|
||||
any(),
|
||||
logicalAggregate(
|
||||
logicalSubQueryAlias(
|
||||
logicalProject(
|
||||
logicalFilter()
|
||||
).when(FieldChecker.check("projects", ImmutableList.of(
|
||||
).when(p -> p.getProjects().equals(ImmutableList.of(
|
||||
new Alias(new ExprId(7), new SlotReference(new ExprId(5), "v1", BigIntType.INSTANCE,
|
||||
true,
|
||||
ImmutableList.of("default_cluster:test", "t7")), "aa")
|
||||
)))
|
||||
)
|
||||
.when(a -> a.getAlias().equals("t2"))
|
||||
.when(a -> a.getOutput().equals(ImmutableList.of(
|
||||
new SlotReference(new ExprId(7), "aa", BigIntType.INSTANCE,
|
||||
true, ImmutableList.of("t2"))
|
||||
)))
|
||||
).when(FieldChecker.check("outputExpressions", ImmutableList.of(
|
||||
).when(agg -> agg.getOutputExpressions().equals(ImmutableList.of(
|
||||
new Alias(new ExprId(8),
|
||||
new Max(new SlotReference(new ExprId(7), "aa", BigIntType.INSTANCE,
|
||||
true,
|
||||
ImmutableList.of("t2"))), "max(aa)")
|
||||
)))
|
||||
.when(FieldChecker.check("groupByExpressions", ImmutableList.of()))
|
||||
).when(FieldChecker.check("correlationSlot", ImmutableList.of(
|
||||
new SlotReference(new ExprId(1), "k2", BigIntType.INSTANCE, true,
|
||||
ImmutableList.of("default_cluster:test", "t6")))))
|
||||
)))
|
||||
.when(agg -> agg.getGroupByExpressions().equals(ImmutableList.of()))
|
||||
)
|
||||
.when(apply -> apply.getCorrelationSlot().equals(ImmutableList.of(
|
||||
new SlotReference(new ExprId(1), "k2", BigIntType.INSTANCE, true,
|
||||
ImmutableList.of("default_cluster:test", "t6")))))
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@ -397,6 +412,8 @@ public class AnalyzeWhereSubqueryTest extends TestWithFeService implements Patte
|
||||
public void testSql10AfterChangeProjectFilter() {
|
||||
PlanChecker.from(connectContext)
|
||||
.analyze(sql10)
|
||||
.applyBottomUp(new LogicalSubQueryAliasToLogicalProject())
|
||||
.applyTopDown(new MergeProjects())
|
||||
.applyBottomUp(new ApplyPullFilterOnProjectUnderAgg())
|
||||
.matches(
|
||||
logicalApply(
|
||||
@ -427,6 +444,8 @@ public class AnalyzeWhereSubqueryTest extends TestWithFeService implements Patte
|
||||
public void testSql10AfterChangeAggFilter() {
|
||||
PlanChecker.from(connectContext)
|
||||
.analyze(sql10)
|
||||
.applyBottomUp(new LogicalSubQueryAliasToLogicalProject())
|
||||
.applyTopDown(new MergeProjects())
|
||||
.applyBottomUp(new ApplyPullFilterOnProjectUnderAgg())
|
||||
.applyBottomUp(new ApplyPullFilterOnAgg())
|
||||
.matches(
|
||||
@ -448,9 +467,11 @@ public class AnalyzeWhereSubqueryTest extends TestWithFeService implements Patte
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSql10AfterScalarTOJoin() {
|
||||
public void testSql10AfterScalarToJoin() {
|
||||
PlanChecker.from(connectContext)
|
||||
.analyze(sql10)
|
||||
.applyBottomUp(new LogicalSubQueryAliasToLogicalProject())
|
||||
.applyTopDown(new MergeProjects())
|
||||
.applyBottomUp(new ApplyPullFilterOnProjectUnderAgg())
|
||||
.applyBottomUp(new ApplyPullFilterOnAgg())
|
||||
.applyBottomUp(new ScalarApplyToJoin())
|
||||
@ -460,13 +481,14 @@ public class AnalyzeWhereSubqueryTest extends TestWithFeService implements Patte
|
||||
logicalAggregate(
|
||||
logicalProject()
|
||||
)
|
||||
).when(FieldChecker.check("joinType", JoinType.LEFT_OUTER_JOIN))
|
||||
.when(FieldChecker.check("otherJoinConjuncts", ImmutableList.of(
|
||||
new EqualTo(new SlotReference(new ExprId(1), "k2", BigIntType.INSTANCE, true,
|
||||
ImmutableList.of("default_cluster:test", "t6")),
|
||||
new SlotReference(new ExprId(6), "v2", BigIntType.INSTANCE, true,
|
||||
ImmutableList.of("default_cluster:test", "t7")))
|
||||
)))
|
||||
)
|
||||
.when(j -> j.getJoinType().equals(JoinType.LEFT_OUTER_JOIN))
|
||||
.when(j -> j.getOtherJoinConjuncts().equals(ImmutableList.of(
|
||||
new EqualTo(new SlotReference(new ExprId(1), "k2", BigIntType.INSTANCE, true,
|
||||
ImmutableList.of("default_cluster:test", "t6")),
|
||||
new SlotReference(new ExprId(6), "v2", BigIntType.INSTANCE, true,
|
||||
ImmutableList.of("default_cluster:test", "t7")))
|
||||
)))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -20,8 +20,6 @@ package org.apache.doris.nereids.rules.analysis;
|
||||
import org.apache.doris.common.NereidsException;
|
||||
import org.apache.doris.nereids.NereidsPlanner;
|
||||
import org.apache.doris.nereids.StatementContext;
|
||||
import org.apache.doris.nereids.analyzer.UnboundAlias;
|
||||
import org.apache.doris.nereids.analyzer.UnboundSlot;
|
||||
import org.apache.doris.nereids.datasets.ssb.SSBUtils;
|
||||
import org.apache.doris.nereids.exceptions.AnalysisException;
|
||||
import org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator;
|
||||
@ -37,7 +35,6 @@ import org.apache.doris.nereids.rules.rewrite.logical.PushApplyUnderProject;
|
||||
import org.apache.doris.nereids.trees.expressions.Alias;
|
||||
import org.apache.doris.nereids.trees.expressions.EqualTo;
|
||||
import org.apache.doris.nereids.trees.expressions.ExprId;
|
||||
import org.apache.doris.nereids.trees.expressions.NamedExpression;
|
||||
import org.apache.doris.nereids.trees.expressions.NamedExpressionUtil;
|
||||
import org.apache.doris.nereids.trees.expressions.SlotReference;
|
||||
import org.apache.doris.nereids.trees.expressions.functions.agg.Count;
|
||||
@ -60,7 +57,6 @@ import mockit.MockUp;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class RegisterCTETest extends TestWithFeService implements PatternMatchSupported {
|
||||
@ -111,7 +107,8 @@ public class RegisterCTETest extends TestWithFeService implements PatternMatchSu
|
||||
private CTEContext getCTEContextAfterRegisterCTE(String sql) {
|
||||
return PlanChecker.from(connectContext)
|
||||
.analyze(sql)
|
||||
.getCascadesContext().getStatementContext().getCteContext();
|
||||
.getCascadesContext()
|
||||
.getCteContext();
|
||||
}
|
||||
|
||||
/* ********************************************************************************************
|
||||
@ -145,21 +142,17 @@ public class RegisterCTETest extends TestWithFeService implements PatternMatchSu
|
||||
|
||||
Assertions.assertTrue(cteContext.containsCTE("cte1")
|
||||
&& cteContext.containsCTE("cte2"));
|
||||
|
||||
LogicalPlan cte2InitialPlan = cteContext.getInitialCTEPlan("cte2");
|
||||
PlanChecker.from(connectContext, cte2InitialPlan).matchesFromRoot(
|
||||
logicalProject(
|
||||
logicalFilter(
|
||||
logicalSubQueryAlias(
|
||||
logicalProject(
|
||||
logicalFilter(
|
||||
unboundRelation()
|
||||
)
|
||||
LogicalPlan cte2parsedPlan = cteContext.getParsedCtePlan("cte2").get();
|
||||
PlanChecker.from(connectContext, cte2parsedPlan)
|
||||
.matchesFromRoot(
|
||||
logicalSubQueryAlias(
|
||||
logicalProject(
|
||||
logicalFilter(
|
||||
unboundRelation()
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -169,32 +162,31 @@ public class RegisterCTETest extends TestWithFeService implements PatternMatchSu
|
||||
Assertions.assertTrue(cteContext.containsCTE("cte1")
|
||||
&& cteContext.containsCTE("cte2"));
|
||||
|
||||
// check initial plan
|
||||
LogicalPlan cte1InitialPlan = cteContext.getInitialCTEPlan("cte1");
|
||||
|
||||
List<NamedExpression> targetProjects = new ArrayList<>();
|
||||
targetProjects.add(new UnboundAlias(new UnboundSlot("s_suppkey"), "skey"));
|
||||
targetProjects.add(new UnboundSlot("s_nation"));
|
||||
|
||||
PlanChecker.from(connectContext, cte1InitialPlan)
|
||||
.matches(
|
||||
logicalProject(
|
||||
).when(FieldChecker.check("projects", targetProjects))
|
||||
);
|
||||
|
||||
// check analyzed plan
|
||||
LogicalPlan cte1AnalyzedPlan = cteContext.getAnalyzedCTEPlan("cte1");
|
||||
LogicalPlan cte1AnalyzedPlan = cteContext.getAnalyzedCTE("cte1").get();
|
||||
|
||||
targetProjects = new ArrayList<>();
|
||||
targetProjects.add(new Alias(new ExprId(7),
|
||||
new SlotReference(new ExprId(0), "s_suppkey", VarcharType.INSTANCE,
|
||||
false, ImmutableList.of("defaulst_cluster:test", "supplier")), "skey"));
|
||||
targetProjects.add(new SlotReference(new ExprId(4), "s_nation", VarcharType.INSTANCE,
|
||||
false, ImmutableList.of("defaulst_cluster:test", "supplier")));
|
||||
PlanChecker.from(connectContext, cte1AnalyzedPlan)
|
||||
.matches(
|
||||
logicalProject(
|
||||
).when(FieldChecker.check("projects", targetProjects))
|
||||
.matchesFromRoot(
|
||||
logicalSubQueryAlias(
|
||||
logicalProject()
|
||||
.when(p -> p.getProjects().size() == 2
|
||||
&& p.getProjects().get(0).getName().equals("s_suppkey")
|
||||
&& p.getProjects().get(0).getExprId().asInt() == 14
|
||||
&& p.getProjects().get(0).getQualifier().equals(ImmutableList.of("default_cluster:test", "supplier"))
|
||||
&& p.getProjects().get(1).getName().equals("s_nation")
|
||||
&& p.getProjects().get(1).getExprId().asInt() == 18
|
||||
&& p.getProjects().get(1).getQualifier().equals(ImmutableList.of("default_cluster:test", "supplier"))
|
||||
)
|
||||
)
|
||||
.when(a -> a.getAlias().equals("cte1"))
|
||||
.when(a -> a.getOutput().size() == 2
|
||||
&& a.getOutput().get(0).getName().equals("skey")
|
||||
&& a.getOutput().get(0).getExprId().asInt() == 14
|
||||
&& a.getOutput().get(0).getQualifier().equals(ImmutableList.of("cte1"))
|
||||
&& a.getOutput().get(1).getName().equals("s_nation")
|
||||
&& a.getOutput().get(1).getExprId().asInt() == 18
|
||||
&& a.getOutput().get(1).getQualifier().equals(ImmutableList.of("cte1"))
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@ -230,45 +222,52 @@ public class RegisterCTETest extends TestWithFeService implements PatternMatchSu
|
||||
|
||||
@Test
|
||||
public void testCTEWithAlias() {
|
||||
SlotReference skInCTE1 = new SlotReference(new ExprId(7), "sk", IntegerType.INSTANCE,
|
||||
SlotReference skInCTE1 = new SlotReference(new ExprId(15), "sk", IntegerType.INSTANCE,
|
||||
false, ImmutableList.of("cte1"));
|
||||
SlotReference skInCTE2 = new SlotReference(new ExprId(15), "sk", IntegerType.INSTANCE,
|
||||
SlotReference skInCTE2 = new SlotReference(new ExprId(7), "sk", IntegerType.INSTANCE,
|
||||
false, ImmutableList.of("cte2"));
|
||||
Alias skAlias = new Alias(new ExprId(7),
|
||||
new SlotReference(new ExprId(0), "s_suppkey", IntegerType.INSTANCE,
|
||||
false, ImmutableList.of("default_cluster:test", "supplier")), "sk");
|
||||
Alias skAlias = new Alias(new ExprId(15),
|
||||
new SlotReference(new ExprId(8), "sk", IntegerType.INSTANCE,
|
||||
false, ImmutableList.of("default_cluster:test", "supplier")), "sk");
|
||||
|
||||
PlanChecker.from(connectContext)
|
||||
.analyze(sql4)
|
||||
.matches(
|
||||
.matchesFromRoot(
|
||||
logicalProject(
|
||||
logicalJoin(
|
||||
logicalProject().when(FieldChecker.check("projects", ImmutableList.of(skAlias))),
|
||||
logicalProject().when(FieldChecker.check("projects", ImmutableList.of(skInCTE2)))
|
||||
logicalSubQueryAlias(
|
||||
logicalProject().when(p -> p.getProjects().equals(ImmutableList.of(skAlias)))
|
||||
).when(a -> a.getAlias().equals("cte1")),
|
||||
logicalSubQueryAlias(
|
||||
logicalProject().when(p -> p.getProjects().equals(ImmutableList.of(skInCTE2)))
|
||||
).when(a -> a.getAlias().equals("cte2"))
|
||||
).when(FieldChecker.check("joinType", JoinType.INNER_JOIN))
|
||||
.when(FieldChecker.check("otherJoinConjuncts", ImmutableList.of(
|
||||
new EqualTo(skInCTE1, skInCTE2)
|
||||
)))
|
||||
).when(FieldChecker.check("projects", ImmutableList.of(skInCTE1, skInCTE2)))
|
||||
.when(j -> j.getOtherJoinConjuncts().equals(ImmutableList.of(new EqualTo(skInCTE1, skInCTE2))))
|
||||
).when(p -> p.getProjects().equals(ImmutableList.of(skInCTE1, skInCTE2)))
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCTEWithAnExistedTableOrViewName() {
|
||||
SlotReference suppkeyInV1 = new SlotReference(new ExprId(7), "s_suppkey", IntegerType.INSTANCE,
|
||||
SlotReference suppkeyInV1 = new SlotReference(new ExprId(0), "s_suppkey", IntegerType.INSTANCE,
|
||||
false, ImmutableList.of("V1"));
|
||||
SlotReference suppkeyInV2 = new SlotReference(new ExprId(7), "s_suppkey", IntegerType.INSTANCE,
|
||||
SlotReference suppkeyInV2 = new SlotReference(new ExprId(0), "s_suppkey", IntegerType.INSTANCE,
|
||||
false, ImmutableList.of("V2"));
|
||||
SlotReference suppkeyInSupplier = new SlotReference(new ExprId(7), "s_suppkey", IntegerType.INSTANCE,
|
||||
SlotReference suppkeyInSupplier = new SlotReference(new ExprId(0), "s_suppkey", IntegerType.INSTANCE,
|
||||
false, ImmutableList.of("default_cluster:test", "supplier"));
|
||||
PlanChecker.from(connectContext)
|
||||
.analyze(sql5)
|
||||
.matches(
|
||||
.matchesFromRoot(
|
||||
logicalProject(
|
||||
logicalProject(
|
||||
logicalProject()
|
||||
.when(FieldChecker.check("projects", ImmutableList.of(suppkeyInSupplier)))
|
||||
).when(FieldChecker.check("projects", ImmutableList.of(suppkeyInV1)))
|
||||
).when(FieldChecker.check("projects", ImmutableList.of(suppkeyInV2)))
|
||||
logicalSubQueryAlias(
|
||||
logicalProject(
|
||||
logicalSubQueryAlias(
|
||||
logicalProject()
|
||||
.when(p -> p.getProjects().equals(ImmutableList.of(suppkeyInSupplier)))
|
||||
).when(a -> a.getAlias().equals("V1"))
|
||||
).when(p -> p.getProjects().equals(ImmutableList.of(suppkeyInV1)))
|
||||
).when(a -> a.getAlias().equals("V2"))
|
||||
).when(p -> p.getProjects().equals(ImmutableList.of(suppkeyInV2)))
|
||||
);
|
||||
|
||||
}
|
||||
@ -337,4 +336,30 @@ public class RegisterCTETest extends TestWithFeService implements PatternMatchSu
|
||||
}, "Not throw expected exception.");
|
||||
Assertions.assertTrue(exception.getMessage().contains("[cte1] cannot be used more than once"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDifferenceRelationId() {
|
||||
PlanChecker.from(connectContext)
|
||||
.analyze("with s as (select * from supplier) select * from s as s1, s as s2")
|
||||
.matchesFromRoot(
|
||||
logicalProject(
|
||||
logicalJoin(
|
||||
logicalSubQueryAlias(// as s1
|
||||
logicalSubQueryAlias(// as s
|
||||
logicalProject(// select * from supplier
|
||||
logicalOlapScan().when(scan -> scan.getId().asInt() == 0)
|
||||
)
|
||||
).when(a -> a.getAlias().equals("s"))
|
||||
).when(a -> a.getAlias().equals("s1")),
|
||||
logicalSubQueryAlias(
|
||||
logicalSubQueryAlias(
|
||||
logicalProject(
|
||||
logicalOlapScan().when(scan -> scan.getId().asInt() == 1)
|
||||
)
|
||||
).when(a -> a.getAlias().equals("s"))
|
||||
).when(a -> a.getAlias().equals("s2"))
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -18,6 +18,8 @@
|
||||
package org.apache.doris.nereids.rules.mv;
|
||||
|
||||
import org.apache.doris.common.FeConstants;
|
||||
import org.apache.doris.nereids.rules.analysis.LogicalSubQueryAliasToLogicalProject;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.MergeProjects;
|
||||
import org.apache.doris.nereids.trees.plans.PreAggStatus;
|
||||
import org.apache.doris.nereids.util.PatternMatchSupported;
|
||||
import org.apache.doris.nereids.util.PlanChecker;
|
||||
@ -182,6 +184,8 @@ class SelectRollupIndexTest extends BaseMaterializedIndexSelectTest implements P
|
||||
+ " where c3>0 group by c2";
|
||||
PlanChecker.from(connectContext)
|
||||
.analyze(sql)
|
||||
.applyBottomUp(new LogicalSubQueryAliasToLogicalProject())
|
||||
.applyTopDown(new MergeProjects())
|
||||
.applyTopDown(new SelectMaterializedIndexWithAggregate())
|
||||
.applyTopDown(new SelectMaterializedIndexWithoutAggregate())
|
||||
.matches(logicalOlapScan().when(scan -> {
|
||||
|
||||
@ -106,21 +106,25 @@ public class PushdownExpressionsInHashConditionTest extends TestWithFeService im
|
||||
"SELECT * FROM (SELECT * FROM T1) X JOIN (SELECT * FROM T2) Y ON X.ID + 1 = Y.ID + 2 AND X.ID + 1 > 2")
|
||||
.applyTopDown(new FindHashConditionForJoin())
|
||||
.applyTopDown(new PushdownExpressionsInHashCondition())
|
||||
.matches(
|
||||
logicalProject(
|
||||
logicalJoin(
|
||||
logicalProject(
|
||||
logicalProject(
|
||||
logicalOlapScan()
|
||||
)
|
||||
),
|
||||
logicalProject(
|
||||
logicalProject(
|
||||
logicalOlapScan()
|
||||
)
|
||||
)
|
||||
.matchesFromRoot(
|
||||
logicalProject(
|
||||
logicalJoin(
|
||||
logicalProject(
|
||||
logicalSubQueryAlias(
|
||||
logicalProject(
|
||||
logicalOlapScan()
|
||||
)
|
||||
)
|
||||
),
|
||||
logicalProject(
|
||||
logicalSubQueryAlias(
|
||||
logicalProject(
|
||||
logicalOlapScan()
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@ -131,19 +135,21 @@ public class PushdownExpressionsInHashConditionTest extends TestWithFeService im
|
||||
"SELECT * FROM T1 JOIN (SELECT ID, SUM(SCORE) SCORE FROM T2 GROUP BY ID) T ON T1.ID + 1 = T.ID AND T.SCORE = T1.SCORE + 10")
|
||||
.applyTopDown(new FindHashConditionForJoin())
|
||||
.applyTopDown(new PushdownExpressionsInHashCondition())
|
||||
.matches(
|
||||
logicalProject(
|
||||
logicalJoin(
|
||||
logicalProject(
|
||||
logicalOlapScan()
|
||||
),
|
||||
logicalProject(
|
||||
logicalAggregate(
|
||||
logicalOlapScan()
|
||||
)
|
||||
)
|
||||
.matchesFromRoot(
|
||||
logicalProject(
|
||||
logicalJoin(
|
||||
logicalProject(
|
||||
logicalOlapScan()
|
||||
),
|
||||
logicalProject(
|
||||
logicalSubQueryAlias(
|
||||
logicalAggregate(
|
||||
logicalOlapScan()
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@ -154,21 +160,23 @@ public class PushdownExpressionsInHashConditionTest extends TestWithFeService im
|
||||
"SELECT * FROM T1 JOIN (SELECT ID, SUM(SCORE) SCORE FROM T2 GROUP BY ID ORDER BY ID) T ON T1.ID + 1 = T.ID AND T.SCORE = T1.SCORE + 10")
|
||||
.applyTopDown(new FindHashConditionForJoin())
|
||||
.applyTopDown(new PushdownExpressionsInHashCondition())
|
||||
.matches(
|
||||
logicalProject(
|
||||
logicalJoin(
|
||||
logicalProject(
|
||||
.matchesFromRoot(
|
||||
logicalProject(
|
||||
logicalJoin(
|
||||
logicalProject(
|
||||
logicalOlapScan()
|
||||
),
|
||||
logicalProject(
|
||||
logicalSubQueryAlias(
|
||||
logicalSort(
|
||||
logicalAggregate(
|
||||
logicalOlapScan()
|
||||
),
|
||||
logicalProject(
|
||||
logicalSort(
|
||||
logicalAggregate(
|
||||
logicalOlapScan()
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -23,7 +23,7 @@ import org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator;
|
||||
import org.apache.doris.nereids.glue.translator.PlanTranslatorContext;
|
||||
import org.apache.doris.nereids.parser.NereidsParser;
|
||||
import org.apache.doris.nereids.properties.PhysicalProperties;
|
||||
import org.apache.doris.nereids.rules.analysis.EliminateAliasNode;
|
||||
import org.apache.doris.nereids.rules.analysis.LogicalSubQueryAliasToLogicalProject;
|
||||
import org.apache.doris.nereids.rules.rewrite.logical.MergeProjects;
|
||||
import org.apache.doris.nereids.trees.plans.physical.PhysicalPlan;
|
||||
import org.apache.doris.nereids.util.MemoTestUtils;
|
||||
@ -113,7 +113,7 @@ public class ViewTest extends TestWithFeService implements PatternMatchSupported
|
||||
public void testSimpleViewMergeProjects() {
|
||||
PlanChecker.from(connectContext)
|
||||
.analyze("SELECT * FROM V1")
|
||||
.applyTopDown(new EliminateAliasNode())
|
||||
.applyTopDown(new LogicalSubQueryAliasToLogicalProject())
|
||||
.applyTopDown(new MergeProjects())
|
||||
.matchesFromRoot(
|
||||
logicalProject(
|
||||
@ -140,7 +140,7 @@ public class ViewTest extends TestWithFeService implements PatternMatchSupported
|
||||
+ ") Y\n"
|
||||
+ "ON X.ID1 = Y.ID3"
|
||||
)
|
||||
.applyTopDown(new EliminateAliasNode())
|
||||
.applyTopDown(new LogicalSubQueryAliasToLogicalProject())
|
||||
.applyTopDown(new MergeProjects())
|
||||
.matchesFromRoot(
|
||||
logicalProject(
|
||||
|
||||
@ -1,13 +1,13 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !cte_1 --
|
||||
15 15
|
||||
15 15
|
||||
15 15
|
||||
15 29
|
||||
15 9
|
||||
29 15
|
||||
29 29
|
||||
29 29
|
||||
29 29
|
||||
9 9
|
||||
9 9
|
||||
29 9
|
||||
9 15
|
||||
9 29
|
||||
9 9
|
||||
|
||||
-- !cte_2 --
|
||||
|
||||
Reference in New Issue
Block a user