[enhancement](Nereids) Support show process time and process steps by explain statement (#31339)

## Proposed changes

1. show process time when execute `explain plan xxx` by nereids
2. add `explain xxx plan process select ...` statement to show the process of the plan, not support show memo shape (physical plan) currently

example:
show process time:
```
mysql> explain plan select * from tt;
+---------------------------------------------------------------------------------------------------------------+
| Explain String(Nereids Planner)                                                                               |
+---------------------------------------------------------------------------------------------------------------+
| ========== PARSED PLAN (time: 3ms) ==========                                                                 |
| UnboundResultSink[3] (  )                                                                                     |
| +--LogicalProject[2] ( distinct=false, projects=[*], excepts=[] )                                             |
|    +--LogicalCheckPolicy (  )                                                                                 |
|       +--UnboundRelation ( id=RelationId#0, nameParts=tt )                                                    |
|                                                                                                               |
| ========== ANALYZED PLAN (time: 6ms) ==========                                                               |
| LogicalResultSink[11] ( outputExprs=[id#0, name#1] )                                                          |
| +--LogicalProject[9] ( distinct=false, projects=[id#0, name#1], excepts=[] )                                  |
|    +--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON ) |
|                                                                                                               |
| ========== REWRITTEN PLAN (time: 0ms)==========                                                               |
| LogicalResultSink[11] ( outputExprs=[id#0, name#1] )                                                          |
| +--LogicalProject[9] ( distinct=false, projects=[id#0, name#1], excepts=[] )                                  |
|    +--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON ) |
|                                                                                                               |
| ========== OPTIMIZED PLAN (time: 2ms) ==========                                                              |
| PhysicalResultSink[56] ( outputExprs=[id#0, name#1] )                                                         |
| +--PhysicalDistribute[53]@1 ( stats=2, distributionSpec=DistributionSpecGather )                              |
|    +--PhysicalProject[50]@1 ( stats=2, projects=[id#0, name#1] )                                              |
|       +--PhysicalOlapScan[tt]@0 ( stats=2 )                                                                   |
+---------------------------------------------------------------------------------------------------------------+
21 rows in set (0.01 sec)
```

explain plan process:
```
mysql> explain plan process select * from tt\G
*************************** 1. row ***************************
  Rule: BINDING_RELATION
Before: UnboundResultSink[8] (  )
+--LogicalProject[7] ( distinct=false, projects=[*], excepts=[] )
   +--LogicalCheckPolicy (  )
      +--UnboundRelation ( id=RelationId#0, nameParts=tt )
 After: UnboundResultSink[11] (  )
+--LogicalProject[10] ( distinct=false, projects=[*], excepts=[] )
   +--LogicalCheckPolicy (  )
      +--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON )
*************************** 2. row ***************************
  Rule: CHECK_ROW_POLICY
Before: UnboundResultSink[15] (  )
+--LogicalProject[14] ( distinct=false, projects=[*], excepts=[] )
   +--LogicalCheckPolicy (  )
      +--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON )
 After: UnboundResultSink[17] (  )
+--LogicalProject[16] ( distinct=false, projects=[*], excepts=[] )
   +--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON )
*************************** 3. row ***************************
  Rule: BINDING_PROJECT_SLOT
Before: UnboundResultSink[22] (  )
+--LogicalProject[21] ( distinct=false, projects=[*], excepts=[] )
   +--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON )
 After: UnboundResultSink[23] (  )
+--LogicalProject[20] ( distinct=false, projects=[id#0, name#1], excepts=[] )
   +--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON )
*************************** 4. row ***************************
  Rule: BINDING_RESULT_SINK
Before: UnboundResultSink[26] (  )
+--LogicalProject[20] ( distinct=false, projects=[id#0, name#1], excepts=[] )
   +--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON )
 After: LogicalResultSink[25] ( outputExprs=[id#0, name#1] )
+--LogicalProject[20] ( distinct=false, projects=[id#0, name#1], excepts=[] )
   +--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON )
*************************** 5. row ***************************
  Rule: ELIMINATE_UNNECESSARY_PROJECT
Before: LogicalResultSink[25] ( outputExprs=[id#0, name#1] )
+--LogicalProject[20] ( distinct=false, projects=[id#0, name#1], excepts=[] )
   +--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON )
 After: LogicalResultSink[27] ( outputExprs=[id#0, name#1] )
+--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON )
*************************** 6. row ***************************
  Rule: PRUNE_EMPTY_PARTITION
Before: LogicalResultSink[29] ( outputExprs=[id#0, name#1] )
+--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON )
 After: LogicalResultSink[30] ( outputExprs=[id#0, name#1] )
+--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON )
*************************** 7. row ***************************
  Rule: MATERIALIZED_INDEX_SCAN
Before: LogicalResultSink[36] ( outputExprs=[id#0, name#1] )
+--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON )
 After: LogicalResultSink[37] ( outputExprs=[id#0, name#1] )
+--LogicalProject[35] ( distinct=false, projects=[id#0, name#1], excepts=[] )
   +--LogicalProject[34] ( distinct=false, projects=[id#0, name#1], excepts=[] )
      +--LogicalOlapScan ( qualified=test.tt, indexName=tt, selectedIndexId=10361, preAgg=ON )
*************************** 8. row ***************************
  Rule: MERGE_PROJECTS
Before: LogicalResultSink[40] ( outputExprs=[id#0, name#1] )
+--LogicalProject[39] ( distinct=false, projects=[id#0, name#1], excepts=[] )
   +--LogicalProject[34] ( distinct=false, projects=[id#0, name#1], excepts=[] )
      +--LogicalOlapScan ( qualified=test.tt, indexName=tt, selectedIndexId=10361, preAgg=ON )
 After: LogicalResultSink[41] ( outputExprs=[id#0, name#1] )
+--LogicalProject[38] ( distinct=false, projects=[id#0, name#1], excepts=[] )
   +--LogicalOlapScan ( qualified=test.tt, indexName=tt, selectedIndexId=10361, preAgg=ON )
*************************** 9. row ***************************
  Rule: ELIMINATE_UNNECESSARY_PROJECT
Before: LogicalResultSink[42] ( outputExprs=[id#0, name#1] )
+--LogicalProject[38] ( distinct=false, projects=[id#0, name#1], excepts=[] )
   +--LogicalOlapScan ( qualified=test.tt, indexName=tt, selectedIndexId=10361, preAgg=ON )
 After: LogicalResultSink[43] ( outputExprs=[id#0, name#1] )
+--LogicalOlapScan ( qualified=test.tt, indexName=tt, selectedIndexId=10361, preAgg=ON )
*************************** 10. row ***************************
  Rule: REWRITE_CTE_CHILDREN
Before: LogicalResultSink[25] ( outputExprs=[id#0, name#1] )
+--LogicalProject[20] ( distinct=false, projects=[id#0, name#1], excepts=[] )
   +--LogicalOlapScan ( qualified=test.tt, indexName=<index_not_selected>, selectedIndexId=10361, preAgg=ON )
 After: LogicalResultSink[43] ( outputExprs=[id#0, name#1] )
+--LogicalOlapScan ( qualified=test.tt, indexName=tt, selectedIndexId=10361, preAgg=ON )
10 rows in set (0.00 sec)
```
This commit is contained in:
924060929
2024-02-27 18:40:56 +08:00
committed by yiguolei
parent 47d112e4e6
commit 9ffcf48cce
17 changed files with 337 additions and 52 deletions

View File

@ -409,6 +409,7 @@ PERIOD: 'PERIOD';
PERMISSIVE: 'PERMISSIVE';
PHYSICAL: 'PHYSICAL';
PLAN: 'PLAN';
PROCESS: 'PROCESS';
PLUGIN: 'PLUGIN';
PLUGINS: 'PLUGINS';
POLICY: 'POLICY';

View File

@ -189,6 +189,7 @@ userIdentify
explain
: (EXPLAIN planType? | DESC | DESCRIBE)
level=(VERBOSE | TREE | GRAPH | PLAN)?
PROCESS?
;
planType
@ -1112,6 +1113,7 @@ nonReserved
| PERMISSIVE
| PHYSICAL
| PLAN
| PROCESS
| PLUGIN
| PLUGINS
| POLICY

View File

@ -26,11 +26,13 @@ public class ExplainOptions {
private boolean isTree;
private boolean isGraph;
private boolean showPlanProcess;
private ExplainCommand.ExplainLevel explainLevel;
public ExplainOptions(ExplainCommand.ExplainLevel explainLevel) {
public ExplainOptions(ExplainCommand.ExplainLevel explainLevel, boolean showPlanProcess) {
this.explainLevel = explainLevel;
this.showPlanProcess = showPlanProcess;
}
public ExplainOptions(boolean isVerbose, boolean isTree, boolean isGraph) {
@ -58,4 +60,8 @@ public class ExplainOptions {
public ExplainLevel getExplainLevel() {
return explainLevel;
}
public boolean showPlanProcess() {
return showPlanProcess;
}
}

View File

@ -71,6 +71,7 @@ public class SummaryProfile {
public static final String WRITE_RESULT_TIME = "Write Result Time";
public static final String WAIT_FETCH_RESULT_TIME = "Wait and Fetch Result Time";
public static final String PARSE_SQL_TIME = "Parse SQL Time";
public static final String NEREIDS_ANALYSIS_TIME = "Nereids Analysis Time";
public static final String NEREIDS_REWRITE_TIME = "Nereids Rewrite Time";
public static final String NEREIDS_OPTIMIZE_TIME = "Nereids Optimize Time";
@ -83,7 +84,7 @@ public class SummaryProfile {
START_TIME, END_TIME, TOTAL_TIME, TASK_STATE, USER, DEFAULT_DB, SQL_STATEMENT);
public static final ImmutableList<String> EXECUTION_SUMMARY_KEYS = ImmutableList.of(
NEREIDS_ANALYSIS_TIME, NEREIDS_REWRITE_TIME, NEREIDS_OPTIMIZE_TIME, NEREIDS_TRANSLATE_TIME,
PARSE_SQL_TIME, NEREIDS_ANALYSIS_TIME, NEREIDS_REWRITE_TIME, NEREIDS_OPTIMIZE_TIME, NEREIDS_TRANSLATE_TIME,
WORKLOAD_GROUP, ANALYSIS_TIME,
PLAN_TIME, JOIN_REORDER_TIME, CREATE_SINGLE_NODE_TIME, QUERY_DISTRIBUTED_TIME,
INIT_SCAN_NODE_TIME, FINALIZE_SCAN_NODE_TIME, GET_SPLITS_TIME, GET_PARTITIONS_TIME,
@ -108,6 +109,8 @@ public class SummaryProfile {
private RuntimeProfile summaryProfile;
private RuntimeProfile executionSummaryProfile;
private long parseSqlStartTime = -1;
private long parseSqlFinishTime = -1;
private long nereidsAnalysisFinishTime = -1;
private long nereidsRewriteFinishTime = -1;
private long nereidsOptimizeFinishTime = -1;
@ -189,6 +192,7 @@ public class SummaryProfile {
}
private void updateExecutionSummaryProfile() {
executionSummaryProfile.addInfoString(PARSE_SQL_TIME, getPrettyParseSqlTime());
executionSummaryProfile.addInfoString(NEREIDS_ANALYSIS_TIME, getPrettyNereidsAnalysisTime());
executionSummaryProfile.addInfoString(NEREIDS_REWRITE_TIME, getPrettyNereidsRewriteTime());
executionSummaryProfile.addInfoString(NEREIDS_OPTIMIZE_TIME, getPrettyNereidsOptimizeTime());
@ -212,6 +216,14 @@ public class SummaryProfile {
executionSummaryProfile.addInfoString(WAIT_FETCH_RESULT_TIME, getPrettyQueryFetchResultFinishTime());
}
public void setParseSqlStartTime(long parseSqlStartTime) {
this.parseSqlStartTime = parseSqlStartTime;
}
public void setParseSqlFinishTime(long parseSqlFinishTime) {
this.parseSqlFinishTime = parseSqlFinishTime;
}
public void setNereidsAnalysisTime() {
this.nereidsAnalysisFinishTime = TimeUtils.getStartTimeMs();
}
@ -410,28 +422,35 @@ public class SummaryProfile {
}
}
private String getPrettyNereidsAnalysisTime() {
public String getPrettyParseSqlTime() {
if (parseSqlStartTime == -1 || parseSqlFinishTime == -1) {
return "N/A";
}
return RuntimeProfile.printCounter(parseSqlFinishTime - parseSqlStartTime, TUnit.TIME_MS);
}
public String getPrettyNereidsAnalysisTime() {
if (nereidsAnalysisFinishTime == -1 || queryAnalysisFinishTime == -1) {
return "N/A";
}
return RuntimeProfile.printCounter(nereidsAnalysisFinishTime - queryBeginTime, TUnit.TIME_MS);
}
private String getPrettyNereidsRewriteTime() {
public String getPrettyNereidsRewriteTime() {
if (nereidsRewriteFinishTime == -1 || nereidsAnalysisFinishTime == -1) {
return "N/A";
}
return RuntimeProfile.printCounter(nereidsRewriteFinishTime - nereidsAnalysisFinishTime, TUnit.TIME_MS);
}
private String getPrettyNereidsOptimizeTime() {
public String getPrettyNereidsOptimizeTime() {
if (nereidsOptimizeFinishTime == -1 || nereidsRewriteFinishTime == -1) {
return "N/A";
}
return RuntimeProfile.printCounter(nereidsOptimizeFinishTime - nereidsRewriteFinishTime, TUnit.TIME_MS);
}
private String getPrettyNereidsTranslateTime() {
public String getPrettyNereidsTranslateTime() {
if (nereidsTranslateFinishTime == -1 || nereidsOptimizeFinishTime == -1) {
return "N/A";
}

View File

@ -334,5 +334,4 @@ public class TimeUtils {
parts.length > 3 ? String.format(" %02d:%02d:%02d", Integer.parseInt(parts[3]),
Integer.parseInt(parts[4]), Integer.parseInt(parts[5])) : "");
}
}

View File

@ -33,6 +33,7 @@ import org.apache.doris.nereids.jobs.JobContext;
import org.apache.doris.nereids.jobs.executor.Analyzer;
import org.apache.doris.nereids.jobs.rewrite.RewriteBottomUpJob;
import org.apache.doris.nereids.jobs.rewrite.RewriteTopDownJob;
import org.apache.doris.nereids.jobs.rewrite.RootPlanTreeRewriteJob.RootRewriteJobContext;
import org.apache.doris.nereids.jobs.scheduler.JobPool;
import org.apache.doris.nereids.jobs.scheduler.JobScheduler;
import org.apache.doris.nereids.jobs.scheduler.JobStack;
@ -93,6 +94,7 @@ public class CascadesContext implements ScheduleContext {
// in analyze/rewrite stage, the plan will storage in this field
private Plan plan;
private Optional<RootRewriteJobContext> currentRootRewriteJobContext;
// in optimize stage, the plan will storage in the memo
private Memo memo;
private final StatementContext statementContext;
@ -120,6 +122,11 @@ public class CascadesContext implements ScheduleContext {
private final Map<String, Hint> hintMap = Maps.newLinkedHashMap();
private final boolean shouldCheckRelationAuthentication;
private final ThreadLocal<Boolean> showPlanProcess = new ThreadLocal<>();
// This list is used to listen the change event of the plan which
// trigger by rule and show by `explain plan process` statement
private final List<PlanProcess> planProcesses = new ArrayList<>();
/**
* Constructor of OptimizerContext.
@ -653,4 +660,40 @@ public class CascadesContext implements ScheduleContext {
public Map<String, Hint> getHintMap() {
return hintMap;
}
public void addPlanProcess(PlanProcess planProcess) {
planProcesses.add(planProcess);
}
public List<PlanProcess> getPlanProcesses() {
return planProcesses;
}
public Optional<RootRewriteJobContext> getCurrentRootRewriteJobContext() {
return currentRootRewriteJobContext;
}
public void setCurrentRootRewriteJobContext(RootRewriteJobContext currentRootRewriteJobContext) {
this.currentRootRewriteJobContext = Optional.ofNullable(currentRootRewriteJobContext);
}
public boolean showPlanProcess() {
Boolean show = showPlanProcess.get();
return show != null && show;
}
/** set showPlanProcess in task scope */
public void withPlanProcess(boolean showPlanProcess, Runnable task) {
Boolean originSetting = this.showPlanProcess.get();
try {
this.showPlanProcess.set(showPlanProcess);
task.run();
} finally {
if (originSetting == null) {
this.showPlanProcess.remove();
} else {
this.showPlanProcess.set(originSetting);
}
}
}
}

View File

@ -25,6 +25,7 @@ import org.apache.doris.catalog.Column;
import org.apache.doris.catalog.MTMV;
import org.apache.doris.common.NereidsException;
import org.apache.doris.common.Pair;
import org.apache.doris.common.profile.SummaryProfile;
import org.apache.doris.nereids.CascadesContext.Lock;
import org.apache.doris.nereids.exceptions.AnalysisException;
import org.apache.doris.nereids.glue.LogicalPlanAdapter;
@ -73,6 +74,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
@ -118,7 +120,8 @@ public class NereidsPlanner extends Planner {
setParsedPlan(parsedPlan);
PhysicalProperties requireProperties = buildInitRequireProperties();
statementContext.getStopwatch().start();
Plan resultPlan = plan(parsedPlan, requireProperties, explainLevel);
boolean showPlanProcess = showPlanProcess(queryStmt.getExplainOptions());
Plan resultPlan = plan(parsedPlan, requireProperties, explainLevel, showPlanProcess);
statementContext.getStopwatch().stop();
setOptimizedPlan(resultPlan);
if (explainLevel.isPlanLevel) {
@ -163,7 +166,11 @@ public class NereidsPlanner extends Planner {
}
public PhysicalPlan plan(LogicalPlan plan, PhysicalProperties outputProperties) {
return (PhysicalPlan) plan(plan, outputProperties, ExplainLevel.NONE);
return (PhysicalPlan) plan(plan, outputProperties, ExplainLevel.NONE, false);
}
public Plan plan(LogicalPlan plan, PhysicalProperties requireProperties, ExplainLevel explainLevel) {
return plan(plan, requireProperties, explainLevel, false);
}
/**
@ -174,7 +181,8 @@ public class NereidsPlanner extends Planner {
* @return plan generated by this planner
* @throws AnalysisException throw exception if failed in ant stage
*/
public Plan plan(LogicalPlan plan, PhysicalProperties requireProperties, ExplainLevel explainLevel) {
public Plan plan(LogicalPlan plan, PhysicalProperties requireProperties,
ExplainLevel explainLevel, boolean showPlanProcess) {
if (explainLevel == ExplainLevel.PARSED_PLAN || explainLevel == ExplainLevel.ALL_PLAN) {
parsedPlan = plan;
if (explainLevel == ExplainLevel.PARSED_PLAN) {
@ -190,7 +198,7 @@ public class NereidsPlanner extends Planner {
try (Lock lock = new Lock(plan, cascadesContext)) {
// resolve column, table and function
// analyze this query
analyze();
analyze(showAnalyzeProcess(explainLevel, showPlanProcess));
// minidump of input must be serialized first, this process ensure minidump string not null
try {
MinidumpUtils.serializeInputsToDumpFile(plan, cascadesContext.getTables());
@ -211,7 +219,11 @@ public class NereidsPlanner extends Planner {
}
// rule-based optimize
rewrite();
rewrite(showRewriteProcess(explainLevel, showPlanProcess));
if (statementContext.getConnectContext().getExecutor() != null) {
statementContext.getConnectContext().getExecutor().getSummaryProfile().setNereidsRewriteTime();
}
if (explainLevel == ExplainLevel.REWRITTEN_PLAN || explainLevel == ExplainLevel.ALL_PLAN) {
rewrittenPlan = cascadesContext.getRewritePlan();
if (explainLevel == ExplainLevel.REWRITTEN_PLAN) {
@ -219,10 +231,6 @@ public class NereidsPlanner extends Planner {
}
}
if (statementContext.getConnectContext().getExecutor() != null) {
statementContext.getConnectContext().getExecutor().getSummaryProfile().setNereidsRewriteTime();
}
optimize();
if (statementContext.getConnectContext().getExecutor() != null) {
statementContext.getConnectContext().getExecutor().getSummaryProfile().setNereidsOptimizeTime();
@ -267,11 +275,11 @@ public class NereidsPlanner extends Planner {
}
}
private void analyze() {
private void analyze(boolean showPlanProcess) {
if (LOG.isDebugEnabled()) {
LOG.debug("Start analyze plan");
}
cascadesContext.newAnalyzer().analyze();
keepOrShowPlanProcess(showPlanProcess, () -> cascadesContext.newAnalyzer().analyze());
getHooks().forEach(hook -> hook.afterAnalyze(this));
NereidsTracer.logImportantTime("EndAnalyzePlan");
if (LOG.isDebugEnabled()) {
@ -282,11 +290,11 @@ public class NereidsPlanner extends Planner {
/**
* Logical plan rewrite based on a series of heuristic rules.
*/
private void rewrite() {
private void rewrite(boolean showPlanProcess) {
if (LOG.isDebugEnabled()) {
LOG.debug("Start rewrite plan");
}
Rewriter.getWholeTreeRewriter(cascadesContext).execute();
keepOrShowPlanProcess(showPlanProcess, () -> Rewriter.getWholeTreeRewriter(cascadesContext).execute());
NereidsTracer.logImportantTime("EndRewritePlan");
if (LOG.isDebugEnabled()) {
LOG.debug("End rewrite plan");
@ -422,19 +430,23 @@ public class NereidsPlanner extends Planner {
break;
case MEMO_PLAN:
plan = cascadesContext.getMemo().toString()
+ "\n\n========== OPTIMIZED PLAN ==========\n"
+ optimizedPlan.treeString()
+ "\n\n========== MATERIALIZATIONS ==========\n"
+ MaterializationContext.toString(cascadesContext.getMaterializationContexts());
+ "\n\n========== OPTIMIZED PLAN ==========\n"
+ optimizedPlan.treeString()
+ "\n\n========== MATERIALIZATIONS ==========\n"
+ MaterializationContext.toString(cascadesContext.getMaterializationContexts());
break;
case ALL_PLAN:
plan = "========== PARSED PLAN ==========\n"
plan = "========== PARSED PLAN "
+ getTimeMetricString(SummaryProfile::getPrettyParseSqlTime) + " ==========\n"
+ parsedPlan.treeString() + "\n\n"
+ "========== ANALYZED PLAN ==========\n"
+ "========== ANALYZED PLAN "
+ getTimeMetricString(SummaryProfile::getPrettyNereidsAnalysisTime) + " ==========\n"
+ analyzedPlan.treeString() + "\n\n"
+ "========== REWRITTEN PLAN ==========\n"
+ "========== REWRITTEN PLAN "
+ getTimeMetricString(SummaryProfile::getPrettyNereidsRewriteTime) + " ==========\n"
+ rewrittenPlan.treeString() + "\n\n"
+ "========== OPTIMIZED PLAN ==========\n"
+ "========== OPTIMIZED PLAN "
+ getTimeMetricString(SummaryProfile::getPrettyNereidsOptimizeTime) + " ==========\n"
+ optimizedPlan.treeString();
break;
default:
@ -564,4 +576,44 @@ public class NereidsPlanner extends Planner {
public void addHook(PlannerHook hook) {
this.hooks.add(hook);
}
private String getTimeMetricString(Function<SummaryProfile, String> profileSupplier) {
return getProfile(summaryProfile -> {
String metricString = profileSupplier.apply(summaryProfile);
return (metricString == null || "N/A".equals(metricString)) ? "" : "(time: " + metricString + ")";
}, "");
}
private <T> T getProfile(Function<SummaryProfile, T> profileSupplier, T defaultMetric) {
T metric = null;
if (statementContext.getConnectContext().getExecutor() != null) {
SummaryProfile summaryProfile = statementContext.getConnectContext().getExecutor().getSummaryProfile();
if (summaryProfile != null) {
metric = profileSupplier.apply(summaryProfile);
}
}
return metric == null ? defaultMetric : metric;
}
private boolean showAnalyzeProcess(ExplainLevel explainLevel, boolean showPlanProcess) {
return showPlanProcess
&& (explainLevel == ExplainLevel.ANALYZED_PLAN || explainLevel == ExplainLevel.ALL_PLAN);
}
private boolean showRewriteProcess(ExplainLevel explainLevel, boolean showPlanProcess) {
return showPlanProcess
&& (explainLevel == ExplainLevel.REWRITTEN_PLAN || explainLevel == ExplainLevel.ALL_PLAN);
}
private boolean showPlanProcess(ExplainOptions explainOptions) {
return explainOptions == null ? false : explainOptions.showPlanProcess();
}
private void keepOrShowPlanProcess(boolean showPlanProcess, Runnable task) {
if (showPlanProcess) {
cascadesContext.withPlanProcess(showPlanProcess, task);
} else {
task.run();
}
}
}

View File

@ -0,0 +1,31 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.nereids;
/** PlanProcess */
public class PlanProcess {
public final String ruleName;
public final String beforeShape;
public final String afterShape;
public PlanProcess(String ruleName, String beforeShape, String afterShape) {
this.ruleName = ruleName;
this.beforeShape = beforeShape;
this.afterShape = afterShape;
}
}

View File

@ -71,9 +71,12 @@ public class LogicalPlanAdapter extends StatementBase implements Queriable {
@Override
public ExplainOptions getExplainOptions() {
return logicalPlan instanceof ExplainCommand
? new ExplainOptions(((ExplainCommand) logicalPlan).getLevel())
: super.getExplainOptions();
if (logicalPlan instanceof ExplainCommand) {
ExplainCommand explain = (ExplainCommand) logicalPlan;
return new ExplainOptions(explain.getLevel(), explain.showPlanProcess());
} else {
return super.getExplainOptions();
}
}
public ArrayList<String> getColLabels() {

View File

@ -17,6 +17,8 @@
package org.apache.doris.nereids.jobs.rewrite;
import org.apache.doris.nereids.CascadesContext;
import org.apache.doris.nereids.PlanProcess;
import org.apache.doris.nereids.jobs.Job;
import org.apache.doris.nereids.jobs.JobContext;
import org.apache.doris.nereids.rules.RuleType;
@ -52,7 +54,8 @@ public class CustomRewriteJob implements RewriteJob {
if (disableRules.contains(ruleType.type())) {
return;
}
Plan root = context.getCascadesContext().getRewritePlan();
CascadesContext cascadesContext = context.getCascadesContext();
Plan root = cascadesContext.getRewritePlan();
// COUNTER_TRACER.log(CounterEvent.of(Memo.get=-StateId(), CounterType.JOB_EXECUTION, group, logicalExpression,
// root));
Plan rewrittenRoot = customRewriter.get().rewriteRoot(root, context);
@ -62,12 +65,14 @@ public class CustomRewriteJob implements RewriteJob {
// don't remove this comment, it can help us to trace some bug when developing.
// if (!root.deepEquals(rewrittenRoot)) {
// String traceBefore = root.treeString();
// String traceAfter = root.treeString();
// printTraceLog(ruleType, traceBefore, traceAfter);
// }
context.getCascadesContext().setRewritePlan(rewrittenRoot);
if (!root.deepEquals(rewrittenRoot)) {
if (cascadesContext.showPlanProcess()) {
PlanProcess planProcess = new PlanProcess(
ruleType.name(), root.treeString(), rewrittenRoot.treeString());
cascadesContext.addPlanProcess(planProcess);
}
}
cascadesContext.setRewritePlan(rewrittenRoot);
}
@Override

View File

@ -18,6 +18,7 @@
package org.apache.doris.nereids.jobs.rewrite;
import org.apache.doris.nereids.CascadesContext;
import org.apache.doris.nereids.PlanProcess;
import org.apache.doris.nereids.jobs.Job;
import org.apache.doris.nereids.jobs.JobContext;
import org.apache.doris.nereids.jobs.JobType;
@ -42,6 +43,8 @@ public abstract class PlanTreeRewriteJob extends Job {
boolean isRewriteRoot = rewriteJobContext.isRewriteRoot();
CascadesContext cascadesContext = context.getCascadesContext();
cascadesContext.setIsRewriteRoot(isRewriteRoot);
boolean showPlanProcess = cascadesContext.showPlanProcess();
for (Rule rule : rules) {
if (disableRules.contains(rule.getRuleType().type())) {
continue;
@ -56,17 +59,18 @@ public abstract class PlanTreeRewriteJob extends Job {
// don't remove this comment, it can help us to trace some bug when developing.
NereidsTracer.logRewriteEvent(rule.toString(), pattern, plan, newPlan);
// String traceBefore = null;
// if (traceEnable) {
// traceBefore = getCurrentPlanTreeString();
// }
String traceBefore = null;
if (showPlanProcess) {
traceBefore = getCurrentPlanTreeString();
}
rewriteJobContext.result = newPlan;
context.setRewritten(true);
rule.acceptPlan(newPlan);
// if (traceEnable) {
// String traceAfter = getCurrentPlanTreeString();
// printTraceLog(rule, traceBefore, traceAfter);
// }
if (showPlanProcess) {
String traceAfter = getCurrentPlanTreeString();
PlanProcess planProcess = new PlanProcess(rule.getRuleType().name(), traceBefore, traceAfter);
cascadesContext.addPlanProcess(planProcess);
}
return new RewriteResult(true, newPlan);
}
}
@ -96,6 +100,13 @@ public abstract class PlanTreeRewriteJob extends Job {
return changed ? plan.withChildren(newChildren) : plan;
}
private String getCurrentPlanTreeString() {
return context.getCascadesContext()
.getCurrentRootRewriteJobContext().get()
.getNewestPlan()
.treeString();
}
static class RewriteResult {
final boolean hasNewPlan;
final Plan plan;

View File

@ -20,6 +20,8 @@ package org.apache.doris.nereids.jobs.rewrite;
import org.apache.doris.nereids.CascadesContext;
import org.apache.doris.nereids.jobs.Job;
import org.apache.doris.nereids.jobs.JobContext;
import org.apache.doris.nereids.jobs.JobType;
import org.apache.doris.nereids.jobs.scheduler.JobStack;
import org.apache.doris.nereids.rules.Rule;
import org.apache.doris.nereids.trees.plans.Plan;
@ -50,6 +52,8 @@ public class RootPlanTreeRewriteJob implements RewriteJob {
context.getScheduleContext().pushJob(rewriteJob);
cascadesContext.getJobScheduler().executeJobPool(cascadesContext);
cascadesContext.setCurrentRootRewriteJobContext(null);
}
@Override
@ -70,6 +74,7 @@ public class RootPlanTreeRewriteJob implements RewriteJob {
RootRewriteJobContext(Plan plan, boolean childrenVisited, JobContext jobContext) {
super(plan, null, -1, childrenVisited);
this.jobContext = Objects.requireNonNull(jobContext, "jobContext cannot be null");
jobContext.getCascadesContext().setCurrentRootRewriteJobContext(this);
}
@Override
@ -96,5 +101,72 @@ public class RootPlanTreeRewriteJob implements RewriteJob {
public RewriteJobContext withPlanAndChildrenVisited(Plan plan, boolean childrenVisited) {
return new RootRewriteJobContext(plan, childrenVisited, jobContext);
}
/** linkChildren */
public Plan getNewestPlan() {
JobStack jobStack = new JobStack();
LinkPlanJob linkPlanJob = new LinkPlanJob(
jobContext, this, null, false, jobStack);
jobStack.push(linkPlanJob);
while (!jobStack.isEmpty()) {
Job job = jobStack.pop();
job.execute();
}
return linkPlanJob.result;
}
}
/** use to assemble the rewriting plan */
private static class LinkPlanJob extends Job {
LinkPlanJob parentJob;
RewriteJobContext rewriteJobContext;
Plan[] childrenResult;
Plan result;
boolean linked;
JobStack jobStack;
private LinkPlanJob(JobContext context, RewriteJobContext rewriteJobContext,
LinkPlanJob parentJob, boolean linked, JobStack jobStack) {
super(JobType.LINK_PLAN, context);
this.rewriteJobContext = rewriteJobContext;
this.parentJob = parentJob;
this.linked = linked;
this.childrenResult = new Plan[rewriteJobContext.plan.arity()];
this.jobStack = jobStack;
}
@Override
public void execute() {
if (!linked) {
linked = true;
jobStack.push(this);
for (int i = rewriteJobContext.childrenContext.length - 1; i >= 0; i--) {
RewriteJobContext childContext = rewriteJobContext.childrenContext[i];
if (childContext != null) {
jobStack.push(new LinkPlanJob(context, childContext, this, false, jobStack));
}
}
} else if (rewriteJobContext.result != null) {
linkResult(rewriteJobContext.result);
} else {
Plan[] newChildren = new Plan[childrenResult.length];
for (int i = 0; i < newChildren.length; i++) {
Plan childResult = childrenResult[i];
if (childResult == null) {
childResult = rewriteJobContext.plan.child(i);
}
newChildren[i] = childResult;
}
linkResult(rewriteJobContext.plan.withChildren(newChildren));
}
}
private void linkResult(Plan result) {
if (parentJob != null) {
parentJob.childrenResult[rewriteJobContext.childIndexInParentContext] = result;
} else {
this.result = result;
}
}
}
}

View File

@ -2713,14 +2713,19 @@ public class LogicalPlanBuilder extends DorisParserBaseVisitor<Object> {
}
}
boolean showPlanProcess = false;
if (ctx.level != null) {
if (!ctx.level.getText().equalsIgnoreCase("plan")) {
explainLevel = ExplainLevel.valueOf(ctx.level.getText().toUpperCase(Locale.ROOT));
} else {
explainLevel = parseExplainPlanType(ctx.planType());
if (ctx.PROCESS() != null) {
showPlanProcess = true;
}
}
}
return new ExplainCommand(explainLevel, inputPlan);
return new ExplainCommand(explainLevel, inputPlan, showPlanProcess);
});
}

View File

@ -61,11 +61,13 @@ public class ExplainCommand extends Command implements NoForward {
private final ExplainLevel level;
private final LogicalPlan logicalPlan;
private final boolean showPlanProcess;
public ExplainCommand(ExplainLevel level, LogicalPlan logicalPlan) {
public ExplainCommand(ExplainLevel level, LogicalPlan logicalPlan, boolean showPlanProcess) {
super(PlanType.EXPLAIN_COMMAND);
this.level = level;
this.logicalPlan = logicalPlan;
this.showPlanProcess = showPlanProcess;
}
@Override
@ -76,7 +78,8 @@ public class ExplainCommand extends Command implements NoForward {
}
explainPlan = ((LogicalPlan) ((Explainable) logicalPlan).getExplainPlan(ctx));
LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(explainPlan, ctx.getStatementContext());
logicalPlanAdapter.setIsExplain(new ExplainOptions(level));
ExplainOptions explainOptions = new ExplainOptions(level, showPlanProcess);
logicalPlanAdapter.setIsExplain(explainOptions);
executor.setParsedStmt(logicalPlanAdapter);
NereidsPlanner planner = new NereidsPlanner(ctx.getStatementContext());
if (ctx.getSessionVariable().isEnableMaterializedViewRewrite()) {
@ -85,7 +88,11 @@ public class ExplainCommand extends Command implements NoForward {
planner.plan(logicalPlanAdapter, ctx.getSessionVariable().toThrift());
executor.setPlanner(planner);
executor.checkBlockRules();
executor.handleExplainStmt(planner.getExplainString(new ExplainOptions(level)), true);
if (showPlanProcess) {
executor.handleExplainPlanProcessStmt(planner.getCascadesContext().getPlanProcesses());
} else {
executor.handleExplainStmt(planner.getExplainString(explainOptions), true);
}
}
@Override
@ -100,4 +107,8 @@ public class ExplainCommand extends Command implements NoForward {
public LogicalPlan getLogicalPlan() {
return logicalPlan;
}
public boolean showPlanProcess() {
return showPlanProcess;
}
}

View File

@ -197,6 +197,7 @@ public abstract class ConnectProcessor {
List<StatementBase> stmts = null;
Exception nereidsParseException = null;
long parseSqlStartTime = System.currentTimeMillis();
// Nereids do not support prepare and execute now, so forbid prepare command, only process query command
if (mysqlCommand == MysqlCommand.COM_QUERY && ctx.getSessionVariable().isEnableNereidsPlanner()) {
try {
@ -241,6 +242,7 @@ public abstract class ConnectProcessor {
LOG.warn("Try to parse multi origSingleStmt failed, originStmt: \"{}\"", convertedStmt);
}
}
long parseSqlFinishTime = System.currentTimeMillis();
boolean usingOrigSingleStmt = origSingleStmtList != null && origSingleStmtList.size() == stmts.size();
for (int i = 0; i < stmts.size(); ++i) {
@ -255,6 +257,8 @@ public abstract class ConnectProcessor {
parsedStmt.setOrigStmt(new OriginStatement(convertedStmt, i));
parsedStmt.setUserInfo(ctx.getCurrentUserIdentity());
executor = new StmtExecutor(ctx, parsedStmt);
executor.getProfile().getSummaryProfile().setParseSqlStartTime(parseSqlStartTime);
executor.getProfile().getSummaryProfile().setParseSqlFinishTime(parseSqlFinishTime);
ctx.setExecutor(executor);
try {

View File

@ -121,6 +121,7 @@ import org.apache.doris.mysql.MysqlSerializer;
import org.apache.doris.mysql.ProxyMysqlChannel;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.nereids.NereidsPlanner;
import org.apache.doris.nereids.PlanProcess;
import org.apache.doris.nereids.StatementContext;
import org.apache.doris.nereids.exceptions.ParseException;
import org.apache.doris.nereids.glue.LogicalPlanAdapter;
@ -637,7 +638,7 @@ public class StmtExecutor {
}
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, e.getMessage());
throw new NereidsException("Command (" + originStmt.originStmt + ") process failed.",
new AnalysisException(e.getMessage(), e));
new AnalysisException(e.getMessage() == null ? e.toString() : e.getMessage(), e));
}
} else {
context.getState().setIsQuery(true);
@ -2446,6 +2447,26 @@ public class StmtExecutor {
sendResultSet(resultSet);
}
public void handleExplainPlanProcessStmt(List<PlanProcess> result) throws IOException {
ShowResultSetMetaData metaData = ShowResultSetMetaData.builder()
.addColumn(new Column("Rule", ScalarType.createVarchar(-1)))
.addColumn(new Column("Before", ScalarType.createVarchar(-1)))
.addColumn(new Column("After", ScalarType.createVarchar(-1)))
.build();
if (context.getConnectType() == ConnectType.MYSQL) {
sendMetaData(metaData);
for (PlanProcess row : result) {
serializer.reset();
serializer.writeLenEncodedString(row.ruleName);
serializer.writeLenEncodedString(row.beforeShape);
serializer.writeLenEncodedString(row.afterShape);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
}
context.getState().setEof();
}
public void handleExplainStmt(String result, boolean isNereids) throws IOException {
ShowResultSetMetaData metaData = ShowResultSetMetaData.builder()
.addColumn(new Column("Explain String" + (isNereids ? "(Nereids Planner)" : "(Old Planner)"),

View File

@ -532,7 +532,7 @@ public class PlanChecker {
NereidsPlanner nereidsPlanner = new NereidsPlanner(
new StatementContext(connectContext, new OriginStatement(sql, 0)));
LogicalPlanAdapter adapter = LogicalPlanAdapter.of(parsed);
adapter.setIsExplain(new ExplainOptions(ExplainLevel.ALL_PLAN));
adapter.setIsExplain(new ExplainOptions(ExplainLevel.ALL_PLAN, false));
nereidsPlanner.plan(adapter);
consumer.accept(nereidsPlanner);
return this;