[enhancement](Nereids): make HyperGraph can be built from plan (#26592)

This commit is contained in:
谢健
2023-11-15 19:06:14 +08:00
committed by GitHub
parent ea4b6e9975
commit ad7dfd75d5
11 changed files with 278 additions and 151 deletions

View File

@ -17,7 +17,6 @@
package org.apache.doris.nereids.jobs.joinorder;
import org.apache.doris.common.Pair;
import org.apache.doris.nereids.CascadesContext;
import org.apache.doris.nereids.exceptions.AnalysisException;
import org.apache.doris.nereids.jobs.Job;
@ -27,31 +26,21 @@ import org.apache.doris.nereids.jobs.cascades.DeriveStatsJob;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.GraphSimplifier;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.HyperGraph;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.SubgraphEnumerator;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.bitmap.LongBitmap;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.node.AbstractNode;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.node.DPhyperNode;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.receiver.PlanReceiver;
import org.apache.doris.nereids.memo.Group;
import org.apache.doris.nereids.memo.GroupExpression;
import org.apache.doris.nereids.trees.expressions.Alias;
import org.apache.doris.nereids.trees.expressions.NamedExpression;
import org.apache.doris.nereids.trees.plans.Plan;
import org.apache.doris.nereids.trees.plans.logical.LogicalProject;
import com.google.common.collect.Lists;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.HashSet;
import java.util.Set;
/**
* Join Order job with DPHyp
*/
public class JoinOrderJob extends Job {
public static final Logger LOG = LogManager.getLogger(JoinOrderJob.class);
private final Group group;
private final Set<NamedExpression> otherProject = new HashSet<>();
public JoinOrderJob(Group group, JobContext context) {
super(JobType.JOIN_ORDER, context);
@ -72,7 +61,7 @@ public class JoinOrderJob extends Job {
}
private Group optimizePlan(Group group) {
if (group.isValidJoinGroup()) {
if (HyperGraph.isValidJoin(group.getLogicalExpression().getPlan())) {
return optimizeJoin(group);
}
GroupExpression rootExpr = group.getLogicalExpression();
@ -84,8 +73,11 @@ public class JoinOrderJob extends Job {
}
private Group optimizeJoin(Group group) {
HyperGraph hyperGraph = new HyperGraph();
buildGraph(group, hyperGraph);
HyperGraph hyperGraph = HyperGraph.toDPhyperGraph(group);
for (AbstractNode node : hyperGraph.getNodes()) {
DPhyperNode dPhyperNode = (DPhyperNode) node;
hyperGraph.updateNode(node.getIndex(), optimizePlan(dPhyperNode.getGroup()));
}
// TODO: Right now, we just hardcode the limit with 10000, maybe we need a better way to set it
int limit = 1000;
PlanReceiver planReceiver = new PlanReceiver(this.context, limit, hyperGraph,
@ -93,16 +85,7 @@ public class JoinOrderJob extends Job {
if (!tryEnumerateJoin(hyperGraph, planReceiver, limit)) {
return group;
}
Group optimized = planReceiver.getBestPlan(hyperGraph.getNodesMap());
// For other projects, such as project constant or project nullable, we construct a new project above root
if (!otherProject.isEmpty()) {
otherProject.addAll(optimized.getLogicalExpression().getPlan().getOutput());
LogicalProject<Plan> logicalProject = new LogicalProject<>(new ArrayList<>(otherProject),
optimized.getLogicalExpression().getPlan());
GroupExpression groupExpression = new GroupExpression(logicalProject, Lists.newArrayList(group));
optimized = context.getCascadesContext().getMemo().copyInGroupExpression(groupExpression);
}
return optimized;
return planReceiver.getBestPlan(hyperGraph.getNodesMap());
}
private boolean tryEnumerateJoin(HyperGraph hyperGraph, PlanReceiver planReceiver, int limit) {
@ -113,47 +96,4 @@ public class JoinOrderJob extends Job {
}
return true;
}
/**
* build a hyperGraph for the root group
*
* @param group root group, should be join type
* @param hyperGraph build hyperGraph
*
* @return return edges of group's child and subTreeNodes of this group
*/
public Pair<BitSet, Long> buildGraph(Group group, HyperGraph hyperGraph) {
if (group.isProjectGroup()) {
Pair<BitSet, Long> res = buildGraph(group.getLogicalExpression().child(0), hyperGraph);
processProjectPlan(hyperGraph, group, res.second);
return res;
}
if (!group.isValidJoinGroup()) {
int idx = hyperGraph.addNode(optimizePlan(group));
return Pair.of(new BitSet(), LongBitmap.newBitmap(idx));
}
Pair<BitSet, Long> left = buildGraph(group.getLogicalExpression().child(0), hyperGraph);
Pair<BitSet, Long> right = buildGraph(group.getLogicalExpression().child(1), hyperGraph);
return Pair.of(hyperGraph.addEdge(group, left, right), LongBitmap.or(left.second, right.second));
}
/**
* Process project expression in HyperGraph
* 1. If it's a simple expression for column pruning, we just ignore it
* 2. If it's an alias that may be used in the join operator, we need to add it to graph
* 3. If it's other expression, we can ignore them and add it after optimizing
*/
private void processProjectPlan(HyperGraph hyperGraph, Group group, long subTreeNodes) {
LogicalProject<? extends Plan> logicalProject
= (LogicalProject<? extends Plan>) group.getLogicalExpression()
.getPlan();
for (NamedExpression expr : logicalProject.getProjects()) {
if (expr instanceof Alias) {
hyperGraph.addAlias((Alias) expr, subTreeNodes);
} else if (!expr.isSlot()) {
otherProject.add(expr);
}
}
}
}

View File

@ -19,6 +19,8 @@ package org.apache.doris.nereids.jobs.joinorder.hypergraph;
import org.apache.doris.common.Pair;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.bitmap.LongBitmap;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.node.AbstractNode;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.node.DPhyperNode;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.receiver.Counter;
import org.apache.doris.nereids.stats.JoinEstimation;
import org.apache.doris.nereids.trees.expressions.Expression;
@ -84,9 +86,10 @@ public class GraphSimplifier {
BestSimplification bestSimplification = new BestSimplification();
simplifications.add(bestSimplification);
}
for (Node node : graph.getNodes()) {
cacheStats.put(node.getNodeMap(), node.getGroup().getStatistics());
cacheCost.put(node.getNodeMap(), node.getRowCount());
for (AbstractNode node : graph.getNodes()) {
DPhyperNode dPhyperNode = (DPhyperNode) node;
cacheStats.put(node.getNodeMap(), dPhyperNode.getGroup().getStatistics());
cacheCost.put(node.getNodeMap(), dPhyperNode.getRowCount());
}
validEdges = graph.getEdges().stream()
.filter(e -> {

View File

@ -19,15 +19,21 @@ package org.apache.doris.nereids.jobs.joinorder.hypergraph;
import org.apache.doris.common.Pair;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.bitmap.LongBitmap;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.node.AbstractNode;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.node.DPhyperNode;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.node.StructInfoNode;
import org.apache.doris.nereids.memo.Group;
import org.apache.doris.nereids.memo.GroupExpression;
import org.apache.doris.nereids.trees.expressions.Alias;
import org.apache.doris.nereids.trees.expressions.Expression;
import org.apache.doris.nereids.trees.expressions.NamedExpression;
import org.apache.doris.nereids.trees.expressions.Slot;
import org.apache.doris.nereids.trees.plans.GroupPlan;
import org.apache.doris.nereids.trees.plans.JoinHint;
import org.apache.doris.nereids.trees.plans.JoinType;
import org.apache.doris.nereids.trees.plans.Plan;
import org.apache.doris.nereids.trees.plans.logical.LogicalJoin;
import org.apache.doris.nereids.trees.plans.logical.LogicalProject;
import org.apache.doris.nereids.util.PlanUtils;
import com.google.common.base.Preconditions;
@ -36,7 +42,6 @@ import com.google.common.collect.Lists;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -47,8 +52,7 @@ import java.util.Set;
*/
public class HyperGraph {
private final List<Edge> edges = new ArrayList<>();
private final List<Node> nodes = new ArrayList<>();
private final HashSet<Group> nodeSet = new HashSet<>();
private final List<AbstractNode> nodes = new ArrayList<>();
private final HashMap<Slot, Long> slotToNodeMap = new HashMap<>();
// record all edges that can be placed on the subgraph
private final Map<Long, BitSet> treeEdgesCache = new HashMap<>();
@ -62,7 +66,7 @@ public class HyperGraph {
return edges;
}
public List<Node> getNodes() {
public List<AbstractNode> getNodes() {
return nodes;
}
@ -74,7 +78,7 @@ public class HyperGraph {
return edges.get(index);
}
public Node getNode(int index) {
public AbstractNode getNode(int index) {
return nodes.get(index);
}
@ -123,19 +127,33 @@ public class HyperGraph {
* @param group The group that is the end node in graph
* @return return the node index
*/
public int addNode(Group group) {
Preconditions.checkArgument(!group.isValidJoinGroup());
public int addDPHyperNode(Group group) {
for (Slot slot : group.getLogicalExpression().getPlan().getOutput()) {
Preconditions.checkArgument(!slotToNodeMap.containsKey(slot));
slotToNodeMap.put(slot, LongBitmap.newBitmap(nodes.size()));
}
nodeSet.add(group);
nodes.add(new Node(nodes.size(), group));
nodes.add(new DPhyperNode(nodes.size(), group));
return nodes.size() - 1;
}
public boolean isNodeGroup(Group group) {
return nodeSet.contains(group);
/**
* add end node to HyperGraph
*
* @param plan The plan that is the end node in graph
* @return return the node index
*/
public int addStructInfoNode(Plan plan) {
for (Slot slot : plan.getOutput()) {
Preconditions.checkArgument(!slotToNodeMap.containsKey(slot));
slotToNodeMap.put(slot, LongBitmap.newBitmap(nodes.size()));
}
nodes.add(new StructInfoNode(nodes.size(), plan));
return nodes.size() - 1;
}
public void updateNode(int idx, Group group) {
Preconditions.checkArgument(nodes.get(idx) instanceof DPhyperNode);
nodes.set(idx, ((DPhyperNode) nodes.get(idx)).withGroup(group));
}
public HashMap<Long, List<NamedExpression>> getComplexProject() {
@ -145,11 +163,9 @@ public class HyperGraph {
/**
* try to add edge for join group
*
* @param group The join group
* @param join The join plan
*/
public BitSet addEdge(Group group, Pair<BitSet, Long> leftEdgeNodes, Pair<BitSet, Long> rightEdgeNodes) {
Preconditions.checkArgument(group.isValidJoinGroup());
LogicalJoin<? extends Plan, ? extends Plan> join = (LogicalJoin) group.getLogicalExpression().getPlan();
public BitSet addEdge(LogicalJoin<?, ?> join, Pair<BitSet, Long> leftEdgeNodes, Pair<BitSet, Long> rightEdgeNodes) {
HashMap<Pair<Long, Long>, Pair<List<Expression>, List<Expression>>> conjuncts = new HashMap<>();
for (Expression expression : join.getHashJoinConjuncts()) {
// TODO: avoid calling calculateEnds if calNodeMap's results are same
@ -304,6 +320,108 @@ public class HyperGraph {
return bitmap;
}
public static HyperGraph toStructInfo(Plan plan) {
Preconditions.checkArgument(plan.getGroupExpression().isPresent(),
"HyperGraph requires a GroupExpression in ", plan);
HyperGraph hyperGraph = new HyperGraph();
hyperGraph.buildStructInfo(plan);
return hyperGraph;
}
public static HyperGraph toDPhyperGraph(Group group) {
HyperGraph hyperGraph = new HyperGraph();
hyperGraph.buildDPhyperGraph(group.getLogicalExpressions().get(0));
return hyperGraph;
}
// Build Graph for DPhyper
private Pair<BitSet, Long> buildDPhyperGraph(GroupExpression groupExpression) {
// process Project
if (isValidProject(groupExpression.getPlan())) {
LogicalProject<?> project = (LogicalProject<?>) groupExpression.getPlan();
Pair<BitSet, Long> res = this.buildDPhyperGraph(groupExpression.child(0).getLogicalExpressions().get(0));
for (NamedExpression expr : project.getProjects()) {
if (expr instanceof Alias) {
this.addAlias((Alias) expr, res.second);
}
}
return res;
}
// process Join
if (isValidJoin(groupExpression.getPlan())) {
LogicalJoin<?, ?> join = (LogicalJoin<?, ?>) groupExpression.getPlan();
Pair<BitSet, Long> left = this.buildDPhyperGraph(groupExpression.child(0).getLogicalExpressions().get(0));
Pair<BitSet, Long> right = this.buildDPhyperGraph(groupExpression.child(1).getLogicalExpressions().get(0));
return Pair.of(this.addEdge(join, left, right),
LongBitmap.or(left.second, right.second));
}
// process Other Node
int idx = this.addDPHyperNode(groupExpression.getOwnerGroup());
return Pair.of(new BitSet(), LongBitmap.newBitmap(idx));
}
// Build Graph for matching mv
private Pair<BitSet, Long> buildStructInfo(Plan plan) {
if (plan instanceof GroupPlan) {
Group group = ((GroupPlan) plan).getGroup();
if (group.getHyperGraph() == null) {
buildStructInfo(group.getLogicalExpressions().get(0).getPlan());
} else {
//TODO: merge Group
}
}
// process Project
if (isValidProject(plan)) {
LogicalProject<?> project = (LogicalProject<?>) plan;
Pair<BitSet, Long> res = this.buildStructInfo(plan.child(0));
for (NamedExpression expr : project.getProjects()) {
if (expr instanceof Alias) {
this.addAlias((Alias) expr, res.second);
}
}
return res;
}
// process Join
if (isValidJoin(plan)) {
LogicalJoin<?, ?> join = (LogicalJoin<?, ?>) plan;
Pair<BitSet, Long> left = this.buildStructInfo(plan.child(0));
Pair<BitSet, Long> right = this.buildStructInfo(plan.child(1));
return Pair.of(this.addEdge(join, left, right),
LongBitmap.or(left.second, right.second));
}
// process Other Node
int idx = this.addStructInfoNode(plan);
return Pair.of(new BitSet(), LongBitmap.newBitmap(idx));
}
/**
* inner join group without mark slot
*/
public static boolean isValidJoin(Plan plan) {
if (!(plan instanceof LogicalJoin)) {
return false;
}
LogicalJoin<?, ?> join = (LogicalJoin<?, ?>) plan;
return join.getJoinType() == JoinType.INNER_JOIN
&& !join.isMarkJoin()
&& !join.getExpressions().isEmpty();
}
/**
* the project with alias and slot
*/
public static boolean isValidProject(Plan plan) {
if (!(plan instanceof LogicalProject)) {
return false;
}
return ((LogicalProject<? extends Plan>) plan).getProjects().stream()
.allMatch(e -> e instanceof Slot || e instanceof Alias);
}
/**
* Graph simplifier need to update the edge for join ordering
*
@ -349,15 +467,18 @@ public class HyperGraph {
StringBuilder builder = new StringBuilder();
builder.append(String.format("digraph G { # %d edges\n", edges.size()));
List<String> graphvisNodes = new ArrayList<>();
for (Node node : nodes) {
for (AbstractNode node : nodes) {
String nodeName = node.getName();
// nodeID is used to identify the node with the same name
String nodeID = nodeName;
while (graphvisNodes.contains(nodeID)) {
nodeID += "_";
}
double rowCount = (node instanceof DPhyperNode)
? ((DPhyperNode) node).getRowCount()
: -1;
builder.append(String.format(" %s [label=\"%s \n rowCount=%.2f\"];\n",
nodeID, nodeName, node.getRowCount()));
nodeID, nodeName, rowCount));
graphvisNodes.add(nodeName);
}
for (int i = 0; i < edges.size(); i += 1) {

View File

@ -19,6 +19,8 @@ package org.apache.doris.nereids.jobs.joinorder.hypergraph;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.bitmap.LongBitmap;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.bitmap.LongBitmapSubsetIterator;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.node.AbstractNode;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.node.DPhyperNode;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.receiver.AbstractReceiver;
import org.apache.doris.qe.ConnectContext;
@ -68,16 +70,17 @@ public class SubgraphEnumerator {
traceBuilder.append("Query Graph Graphviz: ").append(hyperGraph.toDottyHyperGraph()).append("\n");
}
receiver.reset();
List<Node> nodes = hyperGraph.getNodes();
List<AbstractNode> nodes = hyperGraph.getNodes();
// Init all nodes in Receiver
for (Node node : nodes) {
receiver.addGroup(node.getNodeMap(), node.getGroup());
for (AbstractNode node : nodes) {
DPhyperNode dPhyperNode = (DPhyperNode) node;
receiver.addGroup(node.getNodeMap(), dPhyperNode.getGroup());
}
int size = nodes.size();
// Init edgeCalculator
edgeCalculator = new EdgeCalculator(hyperGraph.getEdges());
for (Node node : nodes) {
for (AbstractNode node : nodes) {
edgeCalculator.initSubgraph(node.getNodeMap());
}

View File

@ -15,29 +15,28 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.nereids.jobs.joinorder.hypergraph;
package org.apache.doris.nereids.jobs.joinorder.hypergraph.node;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.Edge;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.bitmap.LongBitmap;
import org.apache.doris.nereids.memo.Group;
import org.apache.doris.nereids.trees.expressions.Slot;
import org.apache.doris.nereids.trees.plans.Plan;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
/**
* HyperGraph Node.
*/
public class Node {
private final int index;
// Due to group in Node is base group, so mergeGroup() don't need to consider it.
private final Group group;
private final List<Edge> edges = new ArrayList<>();
public class AbstractNode {
protected final int index;
protected final List<Edge> edges;
protected final Plan plan;
public Node(int index, Group group) {
this.group = group;
protected AbstractNode(Plan plan, int index, List<Edge> edges) {
this.index = index;
this.edges = edges;
this.plan = plan;
}
public List<Edge> getEdges() {
@ -53,7 +52,7 @@ public class Node {
}
public Plan getPlan() {
return group.getLogicalExpression().getPlan();
return plan;
}
/**
@ -78,15 +77,7 @@ public class Node {
return getPlan().getType().name() + index;
}
public double getRowCount() {
return group.getStatistics().getRowCount();
}
public Group getGroup() {
return group;
}
public Set<Slot> getOutput() {
return group.getLogicalExpression().getPlan().getOutputSet();
return plan.getOutputSet();
}
}

View File

@ -0,0 +1,57 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.nereids.jobs.joinorder.hypergraph.node;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.Edge;
import org.apache.doris.nereids.memo.Group;
import com.google.common.base.Preconditions;
import java.util.ArrayList;
import java.util.List;
/**
* HyperGraph Node.
*/
public class DPhyperNode extends AbstractNode {
private final Group group;
public DPhyperNode(int index, Group group, List<Edge> edges) {
super(group.getLogicalExpression().getPlan(), index, edges);
Preconditions.checkArgument(group != null,
"DPhyper requires Group is not null");
this.group = group;
}
public DPhyperNode(int index, Group group) {
this(index, group, new ArrayList<>());
}
public DPhyperNode withGroup(Group group) {
return new DPhyperNode(index, group, edges);
}
public Group getGroup() {
return group;
}
public double getRowCount() {
return group.getStatistics().getRowCount();
}
}

View File

@ -0,0 +1,37 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.nereids.jobs.joinorder.hypergraph.node;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.Edge;
import org.apache.doris.nereids.trees.plans.Plan;
import java.util.ArrayList;
import java.util.List;
/**
* HyperGraph Node.
*/
public class StructInfoNode extends AbstractNode {
public StructInfoNode(int index, Plan plan, List<Edge> edges) {
super(plan, index, edges);
}
public StructInfoNode(int index, Plan plan) {
this(index, plan, new ArrayList<>());
}
}

View File

@ -19,6 +19,7 @@ package org.apache.doris.nereids.memo;
import org.apache.doris.common.Pair;
import org.apache.doris.nereids.cost.Cost;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.HyperGraph;
import org.apache.doris.nereids.properties.LogicalProperties;
import org.apache.doris.nereids.properties.PhysicalProperties;
import org.apache.doris.nereids.trees.expressions.literal.Literal;
@ -47,6 +48,7 @@ import java.util.Objects;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
/**
* Representation for group in cascades optimizer.
@ -414,6 +416,10 @@ public class Group {
return false;
}
public @Nullable HyperGraph getHyperGraph() {
return null;
}
public boolean isProjectGroup() {
return getLogicalExpression().getPlan() instanceof LogicalProject;
}

View File

@ -18,6 +18,7 @@
package org.apache.doris.nereids.jobs.joinorder.hypergraph;
import org.apache.doris.common.Pair;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.node.DPhyperNode;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.receiver.Counter;
import org.apache.doris.nereids.trees.expressions.Alias;
import org.apache.doris.nereids.trees.expressions.EqualTo;
@ -61,9 +62,9 @@ class GraphSimplifierTest {
.join(project3, JoinType.INNER_JOIN, Lists.newArrayList(new EqualTo(alias2.toSlot(), alias3.toSlot())), new ArrayList<>())
.build();
HyperGraph hyperGraph = HyperGraphBuilder.buildHyperGraphFromPlan(join);
for (Node node : hyperGraph.getNodes()) {
node.getGroup().setStatistics(new Statistics(1, new HashMap<>()));
}
hyperGraph.getNodes().forEach(
n -> ((DPhyperNode) n).getGroup().setStatistics(new Statistics(1, new HashMap<>()))
);
GraphSimplifier graphSimplifier = new GraphSimplifier(hyperGraph);
while (graphSimplifier.applySimplificationStep()) {
}

View File

@ -20,7 +20,6 @@ package org.apache.doris.nereids.util;
import org.apache.doris.catalog.Env;
import org.apache.doris.common.Pair;
import org.apache.doris.nereids.CascadesContext;
import org.apache.doris.nereids.jobs.joinorder.JoinOrderJob;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.HyperGraph;
import org.apache.doris.nereids.memo.Group;
import org.apache.doris.nereids.memo.GroupExpression;
@ -315,28 +314,20 @@ public class HyperGraphBuilder {
private HyperGraph buildHyperGraph(Plan plan) {
CascadesContext cascadesContext = MemoTestUtils.createCascadesContext(MemoTestUtils.createConnectContext(),
plan);
JoinOrderJob joinOrderJob = new JoinOrderJob(cascadesContext.getMemo().getRoot(),
cascadesContext.getCurrentJobContext());
cascadesContext.getJobScheduler().executeJobPool(cascadesContext);
injectRowcount(cascadesContext.getMemo().getRoot());
HyperGraph hyperGraph = new HyperGraph();
joinOrderJob.buildGraph(cascadesContext.getMemo().getRoot(), hyperGraph);
return hyperGraph;
return HyperGraph.toDPhyperGraph(cascadesContext.getMemo().getRoot());
}
public static HyperGraph buildHyperGraphFromPlan(Plan plan) {
CascadesContext cascadesContext = MemoTestUtils.createCascadesContext(MemoTestUtils.createConnectContext(),
plan);
JoinOrderJob joinOrderJob = new JoinOrderJob(cascadesContext.getMemo().getRoot(),
cascadesContext.getCurrentJobContext());
cascadesContext.getJobScheduler().executeJobPool(cascadesContext);
HyperGraph hyperGraph = new HyperGraph();
joinOrderJob.buildGraph(cascadesContext.getMemo().getRoot(), hyperGraph);
return hyperGraph;
return HyperGraph.toDPhyperGraph(cascadesContext.getMemo().getRoot());
}
private void injectRowcount(Group group) {
if (!group.isValidJoinGroup()) {
if (!HyperGraph.isValidJoin(group.getLogicalExpression().getPlan())) {
LogicalOlapScan scanPlan = (LogicalOlapScan) group.getLogicalExpression().getPlan();
Statistics stats = injectRowcount(scanPlan);
group.setStatistics(stats);

View File

@ -50,12 +50,10 @@ import org.apache.doris.nereids.rules.RuleSet;
import org.apache.doris.nereids.rules.RuleType;
import org.apache.doris.nereids.rules.analysis.BindRelation.CustomTableResolver;
import org.apache.doris.nereids.rules.rewrite.OneRewriteRuleFactory;
import org.apache.doris.nereids.trees.expressions.Slot;
import org.apache.doris.nereids.trees.plans.GroupPlan;
import org.apache.doris.nereids.trees.plans.Plan;
import org.apache.doris.nereids.trees.plans.commands.ExplainCommand.ExplainLevel;
import org.apache.doris.nereids.trees.plans.logical.LogicalPlan;
import org.apache.doris.nereids.trees.plans.logical.LogicalProject;
import org.apache.doris.nereids.trees.plans.physical.PhysicalDistribute;
import org.apache.doris.nereids.trees.plans.physical.PhysicalPlan;
import org.apache.doris.nereids.trees.plans.physical.PhysicalQuickSort;
@ -429,27 +427,6 @@ public class PlanChecker {
return this;
}
public PlanChecker orderJoin() {
Group root = cascadesContext.getMemo().getRoot();
boolean changeRoot = false;
if (root.isValidJoinGroup()) {
List<Slot> outputs = root.getLogicalExpression().getPlan().getOutput();
// FIXME: can't match type, convert List<Slot> to List<NamedExpression>
GroupExpression newExpr = new GroupExpression(
new LogicalProject(outputs, root.getLogicalExpression().getPlan()),
Lists.newArrayList(root));
// FIXME: use wrong constructor.
root = new Group(null, newExpr, null);
changeRoot = true;
}
cascadesContext.pushJob(new JoinOrderJob(root, cascadesContext.getCurrentJobContext()));
cascadesContext.getJobScheduler().executeJobPool(cascadesContext);
if (changeRoot) {
cascadesContext.getMemo().setRoot(root.getLogicalExpression().child(0));
}
return this;
}
public PlanChecker matchesFromRoot(PatternDescriptor<? extends Plan> patternDesc) {
Memo memo = cascadesContext.getMemo();
assertMatches(memo, () -> new GroupExpressionMatching(patternDesc.pattern,