[fix](planner) cannot process more than one subquery in disjunct (#16506)

before this PR, Doris cannot process sql like that
```sql
CREATE TABLE `test_sq_dj1` (
    `c1` int(11) NULL,
    `c2` int(11) NULL,
    `c3` int(11) NULL
) ENGINE=OLAP
DUPLICATE KEY(`c1`)
COMMENT 'OLAP'
DISTRIBUTED BY HASH(`c1`) BUCKETS 3
PROPERTIES (
    "replication_allocation" = "tag.location.default: 1",
    "in_memory" = "false",
    "storage_format" = "V2",
    "disable_auto_compaction" = "false"
);

CREATE TABLE `test_sq_dj2` (
    `c1` int(11) NULL,
    `c2` int(11) NULL,
    `c3` int(11) NULL
) ENGINE=OLAP
DUPLICATE KEY(`c1`)
COMMENT 'OLAP'
DISTRIBUTED BY HASH(`c1`) BUCKETS 3
PROPERTIES (
    "replication_allocation" = "tag.location.default: 1",
    "in_memory" = "false",
    "storage_format" = "V2",
    "disable_auto_compaction" = "false"
);

insert into test_sq_dj1 values(1, 2, 3), (10, 20, 30), (100, 200, 300);
insert into test_sq_dj2 values(10, 20, 30);

-- core
SELECT * FROM test_sq_dj1 WHERE c1 IN (SELECT c1 FROM test_sq_dj2) OR c1 IN (SELECT c1 FROM test_sq_dj2) OR c1 < 10;

-- invalid slot
SELECT * FROM test_sq_dj1 WHERE c1 IN (SELECT c1 FROM test_sq_dj2) OR c1 IN (SELECT c2 FROM test_sq_dj2) OR c1 < 10;
```

there are two problems:
1. we should remove redundant sub-query in one conjuncts to avoid generate useless join node
2. when we have more than one sub-query in one disjunct. we should put the conjunct contains the disjunct at the top node of the set of mark join nodes. And pop up the mark slot to the top node.
This commit is contained in:
morrySnow
2023-02-08 18:46:06 +08:00
committed by GitHub
parent bb334de00f
commit a512469537
14 changed files with 220 additions and 29 deletions

View File

@ -396,6 +396,8 @@ public class Analyzer {
private final Map<TableRef, TupleId> markTupleIdByInnerRef = Maps.newHashMap();
private final Set<TupleId> markTupleIdsNotProcessed = Sets.newHashSet();
public GlobalState(Env env, ConnectContext context) {
this.env = env;
this.context = context;
@ -672,12 +674,18 @@ public class Analyzer {
tableRefMap.put(result.getId(), ref);
// for mark join
// for mark join, init three context
// 1. markTuples to records all tuples belong to mark slot
// 2. markTupleIdByInnerRef to records relationship between inner table of mark join and the mark tuple
// 3. markTupleIdsNotProcessed to records un-process mark tuple id. if an expr contains slot belong to
// the un-process mark tuple, it should not assign to current join node and should pop up its
// mark slot until all mark tuples in this expr has been processed.
if (ref.getJoinOp() != null && ref.isMark()) {
TupleDescriptor markTuple = getDescTbl().createTupleDescriptor();
markTuple.setAliases(new String[]{ref.getMarkTupleName()}, true);
globalState.markTuples.put(ref.getMarkTupleName(), markTuple);
globalState.markTupleIdByInnerRef.put(ref, markTuple.getId());
globalState.markTupleIdsNotProcessed.add(markTuple.getId());
}
return result;
@ -1592,12 +1600,38 @@ public class Analyzer {
return result;
}
public boolean needPopUpMarkTuple(TableRef ref) {
TupleId id = globalState.markTupleIdByInnerRef.get(ref);
if (id == null) {
return false;
}
List<Expr> exprs = getAllConjuncts(id);
for (Expr expr : exprs) {
List<TupleId> tupleIds = Lists.newArrayList();
expr.getIds(tupleIds, null);
if (tupleIds.stream().anyMatch(globalState.markTupleIdsNotProcessed::contains)) {
return true;
}
}
return false;
}
public List<Expr> getMarkConjuncts(TableRef ref) {
TupleId id = globalState.markTupleIdByInnerRef.get(ref);
if (id == null) {
return Collections.emptyList();
}
return getAllConjuncts(id);
globalState.markTupleIdsNotProcessed.remove(id);
List<Expr> retExprs = Lists.newArrayList();
List<Expr> exprs = getAllConjuncts(id);
for (Expr expr : exprs) {
List<TupleId> tupleIds = Lists.newArrayList();
expr.getIds(tupleIds, null);
if (tupleIds.stream().noneMatch(globalState.markTupleIdsNotProcessed::contains)) {
retExprs.add(expr);
}
}
return retExprs;
}
public TupleDescriptor getMarkTuple(TableRef ref) {

View File

@ -365,10 +365,15 @@ public class StmtRewriter {
List<Expr> subqueryExprInConjunct, List<Expr> subqueryExprInDisjunct) {
if (!(expr instanceof CompoundPredicate)) {
if (expr.contains(Subquery.class)) {
// remove redundant sub-query by compare two sub-query with equals
if (inDisjunct) {
subqueryExprInDisjunct.add(expr);
if (!subqueryExprInDisjunct.contains(expr)) {
subqueryExprInDisjunct.add(expr);
}
} else {
subqueryExprInConjunct.add(expr);
if (!subqueryExprInConjunct.contains(expr)) {
subqueryExprInConjunct.add(expr);
}
}
}
} else {
@ -432,10 +437,12 @@ public class StmtRewriter {
* ON $a$1.a = T1.a
* WHERE T1.c < 10;
*/
// TODO(mark join) need support mark join
private static void rewriteWhereClauseSubqueries(SelectStmt stmt, Analyzer analyzer)
throws AnalysisException {
int numTableRefs = stmt.fromClause.size();
// we must use two same set structs to process conjuncts and disjuncts
// because the same sub-query could appear in both at the same time.
// if we use one ExprSubstitutionMap, the sub-query will be replaced by wrong expr.
ArrayList<Expr> exprsWithSubqueriesInConjuncts = Lists.newArrayList();
ArrayList<Expr> exprsWithSubqueriesInDisjuncts = Lists.newArrayList();
ExprSubstitutionMap conjunctsSmap = new ExprSubstitutionMap();
@ -445,7 +452,6 @@ public class StmtRewriter {
List<Expr> subqueryInDisjunct = Lists.newArrayList();
// Check if all the conjuncts in the WHERE clause that contain subqueries
// can currently be rewritten as a join.
// TODO(mark join) traverse expr tree to process subquery.
extractExprWithSubquery(false, stmt.whereClause, subqueryInConjunct, subqueryInDisjunct);
for (Expr conjunct : subqueryInConjunct) {
processOneSubquery(stmt, exprsWithSubqueriesInConjuncts,
@ -458,8 +464,7 @@ public class StmtRewriter {
stmt.whereClause = stmt.whereClause.substitute(conjunctsSmap, disjunctsSmap, analyzer, false);
boolean hasNewVisibleTuple = false;
// Recursively equal all the exprs that contain subqueries and merge them
// with 'stmt'.
// Recursively equal all the exprs that contain subqueries and merge them with 'stmt'.
for (Expr expr : exprsWithSubqueriesInConjuncts) {
if (mergeExpr(stmt, rewriteExpr(expr, analyzer), analyzer, null)) {
hasNewVisibleTuple = true;
@ -515,7 +520,6 @@ public class StmtRewriter {
// Replace all the supported exprs with subqueries with true BoolLiterals
// using a smap.
if (isMark) {
// TODO(mark join) if need mark join, we should replace a SlotRef instead of BoolLiteral
TupleDescriptor markTuple = analyzer.getDescTbl().createTupleDescriptor();
markTuple.setAliases(new String[]{stmt.getTableAliasGenerator().getNextAlias()}, true);
SlotDescriptor markSlot = analyzer.addSlotDescriptor(markTuple);
@ -840,9 +844,6 @@ public class StmtRewriter {
&& ((ExistsPredicate) expr).isNotExists()) {
// For the case of a NOT IN with an eq join conjunct, replace the join
// conjunct with a conjunct that uses the null-matching eq operator.
// TODO: mark join only works on nested loop join now, and NLJ do NOT support NULL_AWARE_LEFT_ANTI_JOIN
// remove markTuple == null when nested loop join support NULL_AWARE_LEFT_ANTI_JOIN
// or plan mark join on hash join
if (expr instanceof InPredicate && markTuple == null) {
joinOp = VectorizedUtil.isVectorized()
? JoinOperator.NULL_AWARE_LEFT_ANTI_JOIN : JoinOperator.LEFT_ANTI_JOIN;

View File

@ -94,8 +94,11 @@ public class TableRef implements ParseNode, Writable {
// Indicates whether this table ref is given an explicit alias,
protected boolean hasExplicitAlias;
protected JoinOperator joinOp;
// for mark join
protected boolean isMark;
// we must record mark tuple name for re-analyze
protected String markTupleName;
protected List<String> usingColNames;
protected ArrayList<LateralViewRef> lateralViewRefs;
protected Expr onClause;

View File

@ -758,7 +758,7 @@ public class HashJoinNode extends JoinNodeBase {
StringBuilder output =
new StringBuilder().append(detailPrefix).append("join op: ").append(joinOp.toString()).append("(")
.append(distrModeStr).append(")").append("[").append(colocateReason).append("]\n");
output.append(detailPrefix).append("is mark: ").append(isMarkJoin()).append("\n");
if (detailLevel == TExplainLevel.BRIEF) {
output.append(detailPrefix).append(
String.format("cardinality=%,d", cardinality)).append("\n");

View File

@ -114,7 +114,6 @@ public abstract class JoinNodeBase extends PlanNode {
}
protected void computeOutputTuple(Analyzer analyzer) throws UserException {
// TODO(mark join) if it is mark join use mark tuple instead?
// 1. create new tuple
vOutputTupleDesc = analyzer.getDescTbl().createTupleDescriptor();
boolean copyLeft = false;
@ -208,6 +207,15 @@ public abstract class JoinNodeBase extends PlanNode {
}
}
}
// add mark slot if needed
if (isMarkJoin() && analyzer.needPopUpMarkTuple(innerRef)) {
SlotDescriptor markSlot = analyzer.getMarkTuple(innerRef).getSlots().get(0);
SlotDescriptor outputSlotDesc =
analyzer.getDescTbl().copySlotDescriptor(vOutputTupleDesc, markSlot);
srcTblRefToOutputTupleSmap.put(new SlotRef(markSlot), new SlotRef(outputSlotDesc));
}
// 2. compute srcToOutputMap
vSrcToOutputSMap = ExprSubstitutionMap.subtraction(outputSmap, srcTblRefToOutputTupleSmap, analyzer);
for (int i = 0; i < vSrcToOutputSMap.size(); i++) {
@ -220,6 +228,7 @@ public abstract class JoinNodeBase extends PlanNode {
rSlotRef.getDesc().setIsMaterialized(true);
}
}
vOutputTupleDesc.computeStatAndMemLayout();
// 3. add tupleisnull in null-side
Preconditions.checkState(srcTblRefToOutputTupleSmap.getLhs().size() == vSrcToOutputSMap.getLhs().size());
@ -313,7 +322,7 @@ public abstract class JoinNodeBase extends PlanNode {
}
@Override
public void projectOutputTuple() throws NotImplementedException {
public void projectOutputTuple() {
if (vOutputTupleDesc == null) {
return;
}
@ -343,15 +352,19 @@ public abstract class JoinNodeBase extends PlanNode {
protected abstract void computeOtherConjuncts(Analyzer analyzer, ExprSubstitutionMap originToIntermediateSmap);
protected void computeIntermediateTuple(Analyzer analyzer, TupleDescriptor markTuple) throws AnalysisException {
protected void computeIntermediateTuple(Analyzer analyzer) throws AnalysisException {
// 1. create new tuple
TupleDescriptor vIntermediateLeftTupleDesc = analyzer.getDescTbl().createTupleDescriptor();
TupleDescriptor vIntermediateRightTupleDesc = analyzer.getDescTbl().createTupleDescriptor();
vIntermediateTupleDescList = new ArrayList<>();
vIntermediateTupleDescList.add(vIntermediateLeftTupleDesc);
vIntermediateTupleDescList.add(vIntermediateRightTupleDesc);
if (markTuple != null) {
vIntermediateTupleDescList.add(markTuple);
// if join type is MARK, add mark tuple to intermediate tuple. mark slot will be generated after join.
if (isMarkJoin()) {
TupleDescriptor markTuple = analyzer.getMarkTuple(innerRef);
if (markTuple != null) {
vIntermediateTupleDescList.add(markTuple);
}
}
boolean leftNullable = false;
boolean rightNullable = false;
@ -454,11 +467,7 @@ public abstract class JoinNodeBase extends PlanNode {
public void finalize(Analyzer analyzer) throws UserException {
super.finalize(analyzer);
if (VectorizedUtil.isVectorized()) {
TupleDescriptor markTuple = null;
if (innerRef != null) {
markTuple = analyzer.getMarkTuple(innerRef);
}
computeIntermediateTuple(analyzer, markTuple);
computeIntermediateTuple(analyzer);
}
}

View File

@ -228,6 +228,7 @@ public class NestedLoopJoinNode extends JoinNodeBase {
StringBuilder output =
new StringBuilder().append(detailPrefix).append("join op: ").append(joinOp.toString()).append("(")
.append(distrModeStr).append(")\n");
output.append(detailPrefix).append("is mark: ").append(isMarkJoin()).append("\n");
if (detailLevel == TExplainLevel.BRIEF) {
output.append(detailPrefix).append(

View File

@ -58,3 +58,11 @@
10 20 30
100 200 300
-- !same_subquery_in_conjuncts --
1 2 3
10 20 30
-- !two_subquery_in_one_conjuncts --
1 2 3
10 20 30

View File

@ -0,0 +1,8 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !q10 --
F D Advanced Degree 1 3000 1 High Risk 1 2 1 4 1 5 1
F D Unknown 1 1500 1 Good 1 6 1 5 1 4 1
M D College 1 8500 1 Low Risk 1 3 1 0 1 1 1
M D Primary 1 7000 1 Unknown 1 2 1 1 1 1 1
M W Unknown 1 4500 1 Good 1 5 1 0 1 1 1

View File

@ -0,0 +1,103 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !q35 --
F D 0 1 0 0 0.0 1 1 1 1 1.0 2 1 2 2 2.0
F D 0 1 0 0 0.0 3 1 3 3 3.0 0 1 0 0 0.0
F D 0 1 0 0 0.0 5 1 5 5 5.0 0 1 0 0 0.0
F D 0 1 0 0 0.0 6 1 6 6 6.0 0 1 0 0 0.0
F D 1 1 1 1 1.0 1 1 1 1 1.0 1 1 1 1 1.0
F D 1 1 1 1 1.0 5 1 5 5 5.0 3 1 3 3 3.0
F D 3 1 3 3 3.0 0 1 0 0 0.0 5 1 5 5 5.0
F D 3 1 3 3 3.0 1 1 1 1 1.0 3 1 3 3 3.0
F D 3 1 3 3 3.0 5 1 5 5 5.0 4 1 4 4 4.0
F D 4 1 4 4 4.0 2 1 2 2 2.0 5 1 5 5 5.0
F D 4 1 4 4 4.0 4 1 4 4 4.0 4 1 4 4 4.0
F D 6 1 6 6 6.0 0 1 0 0 0.0 6 1 6 6 6.0
F D 6 1 6 6 6.0 5 1 5 5 5.0 4 1 4 4 4.0
F M 0 1 0 0 0.0 0 1 0 0 0.0 3 1 3 3 3.0
F M 0 1 0 0 0.0 3 1 3 3 3.0 6 1 6 6 6.0
F M 1 2 1 1 1.0 1 2 1 1 1.0 0 2 0 0 0.0
F M 1 1 1 1 1.0 2 1 2 2 2.0 4 1 4 4 4.0
F M 1 1 1 1 1.0 3 1 3 3 3.0 3 1 3 3 3.0
F M 1 1 1 1 1.0 4 1 4 4 4.0 2 1 2 2 2.0
F M 1 1 1 1 1.0 6 1 6 6 6.0 2 1 2 2 2.0
F M 2 1 2 2 2.0 1 1 1 1 1.0 0 1 0 0 0.0
F M 2 1 2 2 2.0 5 1 5 5 5.0 4 1 4 4 4.0
F M 3 1 3 3 3.0 0 1 0 0 0.0 4 1 4 4 4.0
F M 4 1 4 4 4.0 2 1 2 2 2.0 1 1 1 1 1.0
F M 4 1 4 4 4.0 4 1 4 4 4.0 1 1 1 1 1.0
F M 5 1 5 5 5.0 1 1 1 1 1.0 2 1 2 2 2.0
F M 5 1 5 5 5.0 3 1 3 3 3.0 3 1 3 3 3.0
F M 6 1 6 6 6.0 2 1 2 2 2.0 2 1 2 2 2.0
F M 6 1 6 6 6.0 2 1 2 2 2.0 3 1 3 3 3.0
F M 6 1 6 6 6.0 3 1 3 3 3.0 4 1 4 4 4.0
F M 6 1 6 6 6.0 5 1 5 5 5.0 1 1 1 1 1.0
F S 0 1 0 0 0.0 0 1 0 0 0.0 3 1 3 3 3.0
F S 0 1 0 0 0.0 2 1 2 2 2.0 3 1 3 3 3.0
F S 0 1 0 0 0.0 3 1 3 3 3.0 6 1 6 6 6.0
F S 0 1 0 0 0.0 4 1 4 4 4.0 5 1 5 5 5.0
F S 1 1 1 1 1.0 0 1 0 0 0.0 3 1 3 3 3.0
F S 1 1 1 1 1.0 3 1 3 3 3.0 4 1 4 4 4.0
F S 1 1 1 1 1.0 4 1 4 4 4.0 0 1 0 0 0.0
F S 1 1 1 1 1.0 4 1 4 4 4.0 4 1 4 4 4.0
F S 1 1 1 1 1.0 5 1 5 5 5.0 0 1 0 0 0.0
F S 2 1 2 2 2.0 0 1 0 0 0.0 3 1 3 3 3.0
F S 2 1 2 2 2.0 1 1 1 1 1.0 6 1 6 6 6.0
F S 2 1 2 2 2.0 2 1 2 2 2.0 2 1 2 2 2.0
F S 2 1 2 2 2.0 4 1 4 4 4.0 6 1 6 6 6.0
F S 2 1 2 2 2.0 6 1 6 6 6.0 1 1 1 1 1.0
F S 2 1 2 2 2.0 6 1 6 6 6.0 5 1 5 5 5.0
F S 3 1 3 3 3.0 1 1 1 1 1.0 0 1 0 0 0.0
F S 3 1 3 3 3.0 1 1 1 1 1.0 2 1 2 2 2.0
F S 3 1 3 3 3.0 3 1 3 3 3.0 0 1 0 0 0.0
F S 3 1 3 3 3.0 4 1 4 4 4.0 6 1 6 6 6.0
F S 3 1 3 3 3.0 6 1 6 6 6.0 6 1 6 6 6.0
F S 4 1 4 4 4.0 0 1 0 0 0.0 2 1 2 2 2.0
F S 5 1 5 5 5.0 4 1 4 4 4.0 2 1 2 2 2.0
F S 5 1 5 5 5.0 6 1 6 6 6.0 6 1 6 6 6.0
F S 6 1 6 6 6.0 0 1 0 0 0.0 0 1 0 0 0.0
F S 6 1 6 6 6.0 0 1 0 0 0.0 2 1 2 2 2.0
F S 6 1 6 6 6.0 4 1 4 4 4.0 5 1 5 5 5.0
F U 0 1 0 0 0.0 4 1 4 4 4.0 1 1 1 1 1.0
F U 0 1 0 0 0.0 5 1 5 5 5.0 1 1 1 1 1.0
F U 0 1 0 0 0.0 6 1 6 6 6.0 6 1 6 6 6.0
F U 1 1 1 1 1.0 5 1 5 5 5.0 2 1 2 2 2.0
F U 2 1 2 2 2.0 4 1 4 4 4.0 4 1 4 4 4.0
F U 3 1 3 3 3.0 1 1 1 1 1.0 6 1 6 6 6.0
F U 3 1 3 3 3.0 3 1 3 3 3.0 6 1 6 6 6.0
F U 3 1 3 3 3.0 4 1 4 4 4.0 3 1 3 3 3.0
F U 3 1 3 3 3.0 5 1 5 5 5.0 1 1 1 1 1.0
F U 3 1 3 3 3.0 6 1 6 6 6.0 0 1 0 0 0.0
F U 3 1 3 3 3.0 6 1 6 6 6.0 3 1 3 3 3.0
F U 6 1 6 6 6.0 1 1 1 1 1.0 3 1 3 3 3.0
F U 6 1 6 6 6.0 2 1 2 2 2.0 1 1 1 1 1.0
F U 6 1 6 6 6.0 5 1 5 5 5.0 6 1 6 6 6.0
F W 0 1 0 0 0.0 2 1 2 2 2.0 4 1 4 4 4.0
F W 0 1 0 0 0.0 6 1 6 6 6.0 2 1 2 2 2.0
F W 2 1 2 2 2.0 5 1 5 5 5.0 1 1 1 1 1.0
F W 4 1 4 4 4.0 0 1 0 0 0.0 4 1 4 4 4.0
F W 4 1 4 4 4.0 1 1 1 1 1.0 1 1 1 1 1.0
F W 4 1 4 4 4.0 2 1 2 2 2.0 6 1 6 6 6.0
F W 4 1 4 4 4.0 4 1 4 4 4.0 3 1 3 3 3.0
F W 5 1 5 5 5.0 6 1 6 6 6.0 1 1 1 1 1.0
F W 6 1 6 6 6.0 2 1 2 2 2.0 6 1 6 6 6.0
F W 6 1 6 6 6.0 3 1 3 3 3.0 4 1 4 4 4.0
F W 6 1 6 6 6.0 4 1 4 4 4.0 0 1 0 0 0.0
F W 6 1 6 6 6.0 5 1 5 5 5.0 6 1 6 6 6.0
M D 1 1 1 1 1.0 5 1 5 5 5.0 6 1 6 6 6.0
M D 2 1 2 2 2.0 3 1 3 3 3.0 3 1 3 3 3.0
M D 2 1 2 2 2.0 6 1 6 6 6.0 1 1 1 1 1.0
M D 2 1 2 2 2.0 6 1 6 6 6.0 6 1 6 6 6.0
M D 3 1 3 3 3.0 5 1 5 5 5.0 4 1 4 4 4.0
M D 3 1 3 3 3.0 6 1 6 6 6.0 6 1 6 6 6.0
M D 4 1 4 4 4.0 5 1 5 5 5.0 5 1 5 5 5.0
M D 6 1 6 6 6.0 1 1 1 1 1.0 1 1 1 1 1.0
M D 6 1 6 6 6.0 2 1 2 2 2.0 5 1 5 5 5.0
M D 6 1 6 6 6.0 4 1 4 4 4.0 3 1 3 3 3.0
M D 6 1 6 6 6.0 4 1 4 4 4.0 4 1 4 4 4.0
M M 0 1 0 0 0.0 0 1 0 0 0.0 2 1 2 2 2.0
M M 0 1 0 0 0.0 1 1 1 1 1.0 6 1 6 6 6.0
M M 0 1 0 0 0.0 2 1 2 2 2.0 0 1 0 0 0.0
M M 0 1 0 0 0.0 6 1 6 6 6.0 1 1 1 1 1.0
M M 0 1 0 0 0.0 6 1 6 6 6.0 2 1 2 2 2.0
M M 1 1 1 1 1.0 1 1 1 1 1.0 6 1 6 6 6.0

View File

@ -0,0 +1,22 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !q45 --
10150 Bunker Hill 20.250000000
16098 Five Points 18.050000000
28048 Salem 9.810000000
29584 Oakdale 41.210000000
31289 Lincoln 1.260000000
34975 Kingston 56.670000000
36971 Wilson 202.630000000
38354 Sulphur Springs 25.680000000
44975 Kingston 12.620000000
62808 Hamilton 85.390000000
62812 Shady Grove 48.770000000
66557 Arlington 2.680000000
68252 Maple Grove 11.370000000
69583 Jackson 54.420000000
73003 Hillcrest 36.030000000
78222 Clinton 85.870000000
83683 Plainview 43.820000000
84536 Friendship 64.440000000
88370 Oak Grove 52.420000000

View File

@ -112,4 +112,12 @@ suite("test_subquery_in_disjunction") {
qt_hash_join_with_other_conjuncts8 """
SELECT * FROM test_sq_dj1 WHERE c1 NOT IN (SELECT c1 FROM test_sq_dj2 WHERE test_sq_dj1.c1 < test_sq_dj2.c2) OR c1 < 11 ORDER BY c1;
"""
qt_same_subquery_in_conjuncts """
SELECT * FROM test_sq_dj1 WHERE c1 IN (SELECT c1 FROM test_sq_dj2) OR c1 IN (SELECT c1 FROM test_sq_dj2) OR c1 < 10 ORDER BY c1;
"""
qt_two_subquery_in_one_conjuncts """
SELECT * FROM test_sq_dj1 WHERE c1 IN (SELECT c1 FROM test_sq_dj2) OR c1 IN (SELECT c2 FROM test_sq_dj2) OR c1 < 10 ORDER BY c1;
"""
}

View File

@ -1,4 +1,3 @@
/*
SELECT
cd_gender
, cd_marital_status
@ -54,4 +53,3 @@ WHERE (c.c_current_addr_sk = ca.ca_address_sk)
GROUP BY cd_gender, cd_marital_status, cd_education_status, cd_purchase_estimate, cd_credit_rating, cd_dep_count, cd_dep_employed_count, cd_dep_college_count
ORDER BY cd_gender ASC, cd_marital_status ASC, cd_education_status ASC, cd_purchase_estimate ASC, cd_credit_rating ASC, cd_dep_count ASC, cd_dep_employed_count ASC, cd_dep_college_count ASC
LIMIT 100
*/

View File

@ -1,4 +1,3 @@
/*
SELECT
ca_state
, cd_gender
@ -57,4 +56,3 @@ WHERE (c.c_current_addr_sk = ca.ca_address_sk)
GROUP BY ca_state, cd_gender, cd_marital_status, cd_dep_count, cd_dep_employed_count, cd_dep_college_count
ORDER BY ca_state ASC, cd_gender ASC, cd_marital_status ASC, cd_dep_count ASC, cd_dep_employed_count ASC, cd_dep_college_count ASC
LIMIT 100
*/

View File

@ -1,4 +1,3 @@
/*
SELECT
ca_zip
, ca_city
@ -25,4 +24,3 @@ WHERE (ws_bill_customer_sk = c_customer_sk)
GROUP BY ca_zip, ca_city
ORDER BY ca_zip ASC, ca_city ASC
LIMIT 100
*/