[enhancement](sequence col) add session variable to skip sequence column check while INSERT INTO (#41655) (#41720)

cp #41655
This commit is contained in:
zhannngchen
2024-10-12 15:30:20 +08:00
committed by GitHub
parent 90d6985f91
commit ae56739f88
5 changed files with 70 additions and 4 deletions

View File

@ -500,7 +500,8 @@ public class NativeInsertStmt extends InsertStmt {
}
if (!haveInputSeqCol && !isPartialUpdate && !isFromDeleteOrUpdateStmt
&& !analyzer.getContext().getSessionVariable().isEnableUniqueKeyPartialUpdate()) {
&& !analyzer.getContext().getSessionVariable().isEnableUniqueKeyPartialUpdate()
&& analyzer.getContext().getSessionVariable().isRequireSequenceInInsert()) {
if (!seqColInTable.isPresent() || seqColInTable.get().getDefaultValue() == null
|| !seqColInTable.get().getDefaultValue()
.equalsIgnoreCase(DefaultValue.CURRENT_TIMESTAMP)) {

View File

@ -74,6 +74,7 @@ import org.apache.doris.nereids.types.coercion.CharacterType;
import org.apache.doris.nereids.util.ExpressionUtils;
import org.apache.doris.nereids.util.RelationUtil;
import org.apache.doris.nereids.util.TypeCoercionUtils;
import org.apache.doris.qe.ConnectContext;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
@ -198,9 +199,12 @@ public class BindSink implements AnalysisRuleFactory {
// including the following cases:
// 1. it's a load job with `partial_columns=true`
// 2. UPDATE and DELETE, planner will automatically add these hidden columns
// 3. session value `require_sequence_in_insert` is false
if (!haveInputSeqCol && !isPartialUpdate && (
boundSink.getDmlCommandType() != DMLCommandType.UPDATE
&& boundSink.getDmlCommandType() != DMLCommandType.DELETE)) {
&& boundSink.getDmlCommandType() != DMLCommandType.DELETE) && (
boundSink.getDmlCommandType() != DMLCommandType.INSERT
|| ConnectContext.get().getSessionVariable().isRequireSequenceInInsert())) {
if (!seqColInTable.isPresent() || seqColInTable.get().getDefaultValue() == null
|| !seqColInTable.get().getDefaultValue()
.equalsIgnoreCase(DefaultValue.CURRENT_TIMESTAMP)) {

View File

@ -635,6 +635,7 @@ public class SessionVariable implements Serializable, Writable {
"enable_adaptive_pipeline_task_serial_read_on_limit";
public static final String ADAPTIVE_PIPELINE_TASK_SERIAL_READ_ON_LIMIT =
"adaptive_pipeline_task_serial_read_on_limit";
public static final String REQUIRE_SEQUENCE_IN_INSERT = "require_sequence_in_insert";
/**
* If set false, user couldn't submit analyze SQL and FE won't allocate any related resources.
@ -2074,6 +2075,13 @@ public class SessionVariable implements Serializable, Writable {
})
public int adaptivePipelineTaskSerialReadOnLimit = 10000;
@VariableMgr.VarAttr(name = REQUIRE_SEQUENCE_IN_INSERT, needForward = true, description = {
"该变量用于控制,使用了sequence列的unique key表,insert into操作是否要求必须提供每一行的sequence列的值",
"This variable controls whether the INSERT INTO operation on unique key tables with a sequence"
+ " column requires a sequence column to be provided for each row"
})
public boolean requireSequenceInInsert = true;
public void setEnableEsParallelScroll(boolean enableESParallelScroll) {
this.enableESParallelScroll = enableESParallelScroll;
}
@ -3443,6 +3451,14 @@ public class SessionVariable implements Serializable, Writable {
this.loadStreamPerNode = loadStreamPerNode;
}
public void setRequireSequenceInInsert(boolean value) {
this.requireSequenceInInsert = value;
}
public boolean isRequireSequenceInInsert() {
return this.requireSequenceInInsert;
}
/**
* Serialize to thrift object.
* Used for rest api.

View File

@ -41,10 +41,16 @@
-- !all --
1 10 15 16 17 0 4 15
15 8 19 20 21 0 7 3
15 8 19 20 21 0 9 3
2 5 14 13 14 0 5 12
3 6 11 14 15 0 6 13
-- !all_clone_table --
1 10 15 16 17 0 2 \N
15 8 19 20 21 0 2 \N
2 5 14 13 14 0 2 \N
3 6 11 14 15 0 2 \N
-- !1 --
1 1 1 1 1 0 2 1
2 2 2 2 2 0 2 2
@ -102,10 +108,16 @@
-- !all --
1 10 15 16 17 0 4 15
15 8 19 20 21 0 7 3
15 8 19 20 21 0 9 3
2 5 14 13 14 0 5 12
3 6 11 14 15 0 6 13
-- !all_clone_table --
1 10 15 16 17 0 2 \N
15 8 19 20 21 0 2 \N
2 5 14 13 14 0 2 \N
3 6 11 14 15 0 2 \N
-- !1 --
1 1 1 1 1 0 2 1
2 2 2 2 2 0 2 2

View File

@ -123,6 +123,15 @@ suite("test_unique_table_sequence") {
exception "Table ${tableName} has sequence column, need to specify the sequence column"
}
// with `require_sequence_in_insert=false`, previous insert operation should success
sql "SET require_sequence_in_insert=false"
sql "INSERT INTO ${tableName} values(15, 8, 19, 20, 21)"
sql "INSERT INTO ${tableName} (k1, v1, v2, v3, v4) values(15, 8, 19, 20, 21)"
sql "SET require_sequence_in_insert=true"
// correct way of insert into with seq col
sql "INSERT INTO ${tableName} (k1, v1, v2, v3, v4, __DORIS_SEQUENCE_COL__) values(15, 8, 19, 20, 21, 3)"
@ -138,7 +147,31 @@ suite("test_unique_table_sequence") {
order_qt_all "SELECT * from ${tableName}"
sql "SET show_hidden_columns=false"
def tableNameClone = tableName + "_clone"
sql "DROP TABLE IF EXISTS ${tableNameClone}"
sql "create table ${tableNameClone} like ${tableName}"
// test insert into select *
test {
sql "INSERT INTO ${tableNameClone} select * from ${tableName}"
exception "Table ${tableNameClone} has sequence column, need to specify the sequence column"
}
// with `require_sequence_in_insert=true`, previous insert operation should success
sql "SET require_sequence_in_insert=false"
sql "INSERT INTO ${tableNameClone} select * from ${tableName}"
sql "SET require_sequence_in_insert=true"
sql "SET show_hidden_columns=true"
order_qt_all_clone_table "SELECT * from ${tableNameClone}"
sql "DROP TABLE ${tableName}"
sql "DROP TABLE ${tableNameClone}"
sql "DROP TABLE IF EXISTS ${tableName}"
sql """