[enhancement](Nereids)merge consecutive LogicalLimit plan (#12096)

This rule aims to merge consecutive limits.

LogicalLimit(limit=10, offset=4)
+---LogicalLimit(limit=3, offset=5)

transformed to

LogicalLimit(limit=3, offset=5)

where

newLimit.limit    = min(topLimit.limit, bottomLimit.limit)
newLimit.offset = bottomLimit.offset
topLimit.offset  is ignored
This commit is contained in:
minghong
2022-08-26 15:53:44 +08:00
committed by GitHub
parent 3af0745c8f
commit 4636d6c476
4 changed files with 109 additions and 0 deletions

View File

@ -22,6 +22,8 @@ import org.apache.doris.nereids.jobs.Job;
import org.apache.doris.nereids.rules.expression.rewrite.ExpressionNormalization;
import org.apache.doris.nereids.rules.rewrite.AggregateDisassemble;
import org.apache.doris.nereids.rules.rewrite.logical.FindHashConditionForJoin;
import org.apache.doris.nereids.rules.rewrite.logical.MergeConsecutiveFilters;
import org.apache.doris.nereids.rules.rewrite.logical.MergeConsecutiveLimits;
import org.apache.doris.nereids.rules.rewrite.logical.MergeConsecutiveProjects;
import org.apache.doris.nereids.rules.rewrite.logical.PushPredicateThroughJoin;
import org.apache.doris.nereids.rules.rewrite.logical.ReorderJoin;
@ -42,6 +44,8 @@ public class RewriteJob extends BatchRulesJob {
super(cascadesContext);
ImmutableList<Job> jobs = new ImmutableList.Builder<Job>()
.add(bottomUpBatch(ImmutableList.of(new MergeConsecutiveProjects())))
.add(bottomUpBatch(ImmutableList.of(new MergeConsecutiveFilters())))
.add(bottomUpBatch(ImmutableList.of(new MergeConsecutiveLimits())))
.add(topDownBatch(ImmutableList.of(new ExpressionNormalization())))
.add(topDownBatch(ImmutableList.of(new ReorderJoin())))
.add(topDownBatch(ImmutableList.of(new FindHashConditionForJoin())))

View File

@ -72,6 +72,7 @@ public enum RuleType {
REORDER_JOIN(RuleTypeClass.REWRITE),
MERGE_CONSECUTIVE_FILTERS(RuleTypeClass.REWRITE),
MERGE_CONSECUTIVE_PROJECTS(RuleTypeClass.REWRITE),
MERGE_CONSECUTIVE_LIMITS(RuleTypeClass.REWRITE),
FIND_HASH_CONDITION_FOR_JOIN(RuleTypeClass.REWRITE),
REWRITE_SENTINEL(RuleTypeClass.REWRITE),

View File

@ -0,0 +1,54 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.nereids.rules.rewrite.logical;
import org.apache.doris.nereids.rules.Rule;
import org.apache.doris.nereids.rules.RuleType;
import org.apache.doris.nereids.rules.rewrite.OneRewriteRuleFactory;
import org.apache.doris.nereids.trees.plans.Plan;
import org.apache.doris.nereids.trees.plans.logical.LogicalLimit;
import java.util.List;
/**
* this rule aims to merge consecutive limits.
* LIMIT1(limit=10, offset=4)
* |
* LIMIT2(limit=3, offset=5)
*
* transformed to
* LIMITl(limit=3, offset=5)
* where
* LIMIT.limit = min(LIMIT1.limit, LIMIT2.limit)
* LIMIT.offset = LIMIT2.offset
* LIMIT1.offset is ignored
*/
public class MergeConsecutiveLimits extends OneRewriteRuleFactory {
@Override
public Rule build() {
return logicalLimit(logicalLimit()).then(upperLimit -> {
LogicalLimit bottomLimit = upperLimit.child();
List<Plan> children = bottomLimit.children();
return new LogicalLimit(
Math.min(upperLimit.getLimit(), bottomLimit.getLimit()),
bottomLimit.getOffset(),
children.get(0)
);
}).toRule(RuleType.MERGE_CONSECUTIVE_LIMITS);
}
}

View File

@ -0,0 +1,50 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.nereids.rules.rewrite.logical;
import org.apache.doris.nereids.CascadesContext;
import org.apache.doris.nereids.analyzer.UnboundRelation;
import org.apache.doris.nereids.rules.Rule;
import org.apache.doris.nereids.trees.plans.logical.LogicalLimit;
import org.apache.doris.nereids.util.MemoTestUtils;
import com.google.common.collect.Lists;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.List;
public class MergeConsecutiveLimitsTest {
@Test
public void testMergeConsecutiveLimits() {
LogicalLimit limit3 = new LogicalLimit(3, 5, new UnboundRelation(Lists.newArrayList("db", "t")));
LogicalLimit limit2 = new LogicalLimit(2, 0, limit3);
LogicalLimit limit1 = new LogicalLimit(10, 2, limit2);
CascadesContext context = MemoTestUtils.createCascadesContext(limit1);
List<Rule> rules = Lists.newArrayList(new MergeConsecutiveLimits().build());
context.topDownRewrite(rules);
LogicalLimit limit = (LogicalLimit) context.getMemo().copyOut();
Assertions.assertEquals(2, limit.getLimit());
Assertions.assertEquals(5, limit.getOffset());
Assertions.assertEquals(1, limit.children().size());
Assertions.assertTrue(limit.child(0) instanceof UnboundRelation);
}
}