From d83c11a0323fdd05dfe7af4d4a95558c6cdf65b4 Mon Sep 17 00:00:00 2001 From: Gabriel Date: Fri, 19 Aug 2022 21:29:24 +0800 Subject: [PATCH] [regression](datev2) add schema change cases for datev2/datetimev2 (#11924) --- .../apache/doris/analysis/DateLiteral.java | 4 + .../data/index_p0/test_bitmap_index.out | 32 ++- .../test_schema_change_datev2_with_delete.out | 81 ++++++ .../test_agg_keys_schema_change_datev2.out | 82 ++++++ .../test_dup_keys_schema_change_datev2.out | 70 +++++ .../suites/index_p0/test_bitmap_index.groovy | 42 ++- ...st_schema_change_datev2_with_delete.groovy | 117 ++++++++ .../test_agg_keys_schema_change_datev2.groovy | 264 ++++++++++++++++++ .../test_dup_keys_schema_change_datev2.groovy | 203 ++++++++++++++ 9 files changed, 882 insertions(+), 13 deletions(-) create mode 100644 regression-test/data/schema_change/test_schema_change_datev2_with_delete.out create mode 100644 regression-test/data/schema_change_p0/datev2/test_agg_keys_schema_change_datev2.out create mode 100644 regression-test/data/schema_change_p0/datev2/test_dup_keys_schema_change_datev2.out create mode 100644 regression-test/suites/schema_change/test_schema_change_datev2_with_delete.groovy create mode 100644 regression-test/suites/schema_change_p0/datev2/test_agg_keys_schema_change_datev2.groovy create mode 100644 regression-test/suites/schema_change_p0/datev2/test_dup_keys_schema_change_datev2.groovy diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/DateLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/DateLiteral.java index 96ab5edd06..91a17cd20c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/DateLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/DateLiteral.java @@ -431,6 +431,10 @@ public class DateLiteral extends LiteralExpr { minute = getOrDefault(dateTime, ChronoField.MINUTE_OF_HOUR, 0); second = getOrDefault(dateTime, ChronoField.SECOND_OF_MINUTE, 0); microsecond = getOrDefault(dateTime, ChronoField.MICRO_OF_SECOND, 0); + if (microsecond != 0 && type.isDatetime()) { + LOG.warn("Microseconds is not supported by Datetime type and hence is ignored." + + "Please change to Datetimev2 to use it."); + } this.type = type; if (checkRange() || checkDate()) { throw new AnalysisException("Datetime value is out of range"); diff --git a/regression-test/data/index_p0/test_bitmap_index.out b/regression-test/data/index_p0/test_bitmap_index.out index 2ab219dcd8..3b37588e29 100644 --- a/regression-test/data/index_p0/test_bitmap_index.out +++ b/regression-test/data/index_p0/test_bitmap_index.out @@ -11,6 +11,10 @@ k8 DATETIME Yes false \N NONE k9 LARGEINT Yes false \N NONE k10 DECIMAL(9,0) Yes false \N NONE k11 BOOLEAN Yes false \N NONE +k12 DATEV2 Yes false \N NONE +k13 DATETIMEV2(0) Yes false \N NONE +k14 DATETIMEV2(3) Yes false \N NONE +k15 DATETIMEV2(6) Yes false \N NONE -- !sql -- default_cluster:regression_test_index_p0.test_bitmap_index_dup index1 k1 BITMAP @@ -24,9 +28,13 @@ default_cluster:regression_test_index_p0.test_bitmap_index_dup index8 k8 default_cluster:regression_test_index_p0.test_bitmap_index_dup index9 k9 BITMAP default_cluster:regression_test_index_p0.test_bitmap_index_dup index10 k10 BITMAP default_cluster:regression_test_index_p0.test_bitmap_index_dup index11 k11 BITMAP +default_cluster:regression_test_index_p0.test_bitmap_index_dup index12 k12 BITMAP +default_cluster:regression_test_index_p0.test_bitmap_index_dup index13 k13 BITMAP +default_cluster:regression_test_index_p0.test_bitmap_index_dup index14 k14 BITMAP +default_cluster:regression_test_index_p0.test_bitmap_index_dup index15 k15 BITMAP -- !sql -- -1 1 1 1 1 1 2022-05-31 2022-05-31T10:00 1 1 true +1 1 1 1 1 1 2022-05-31 2022-05-31T10:00 1 1 true 2022-05-31 2022-05-31T10:00 2022-05-31T10:00:00.111 2022-05-31T10:00:00.111111 -- !sql -- k1 TINYINT Yes true \N @@ -40,6 +48,10 @@ k8 DATETIME Yes true \N k9 LARGEINT Yes true \N k10 DECIMAL(9,0) Yes true \N k11 BOOLEAN Yes true \N +k12 DATEV2 Yes true \N +k13 DATETIMEV2(0) Yes true \N +k14 DATETIMEV2(3) Yes true \N +k15 DATETIMEV2(6) Yes true \N v1 INT Yes false \N SUM -- !sql -- @@ -54,9 +66,13 @@ default_cluster:regression_test_index_p0.test_bitmap_index_agg index8 k8 default_cluster:regression_test_index_p0.test_bitmap_index_agg index9 k9 BITMAP default_cluster:regression_test_index_p0.test_bitmap_index_agg index10 k10 BITMAP default_cluster:regression_test_index_p0.test_bitmap_index_agg index11 k11 BITMAP +default_cluster:regression_test_index_p0.test_bitmap_index_agg index12 k12 BITMAP +default_cluster:regression_test_index_p0.test_bitmap_index_agg index13 k13 BITMAP +default_cluster:regression_test_index_p0.test_bitmap_index_agg index14 k14 BITMAP +default_cluster:regression_test_index_p0.test_bitmap_index_agg index15 k15 BITMAP -- !sql -- -1 1 1 1 1 1 2022-05-31 2022-05-31T10:00 1 1 true 1 +1 1 1 1 1 1 2022-05-31 2022-05-31T10:00 1 1 true 2022-05-31 2022-05-31T10:00 2022-05-31T10:00:00.111 2022-05-31T10:00:00.111111 1 -- !sql -- k1 TINYINT Yes true \N @@ -70,6 +86,10 @@ k8 DATETIME Yes true \N k9 LARGEINT Yes true \N k10 DECIMAL(9,0) Yes true \N k11 BOOLEAN Yes true \N +k12 DATEV2 Yes false \N REPLACE +k13 DATETIMEV2(0) Yes false \N REPLACE +k14 DATETIMEV2(3) Yes false \N REPLACE +k15 DATETIMEV2(6) Yes false \N REPLACE v1 INT Yes false \N REPLACE -- !sql -- @@ -84,8 +104,12 @@ default_cluster:regression_test_index_p0.test_bitmap_index_unique index8 k8 default_cluster:regression_test_index_p0.test_bitmap_index_unique index9 k9 BITMAP default_cluster:regression_test_index_p0.test_bitmap_index_unique index10 k10 BITMAP default_cluster:regression_test_index_p0.test_bitmap_index_unique index11 k11 BITMAP -default_cluster:regression_test_index_p0.test_bitmap_index_unique index12 v1 BITMAP +default_cluster:regression_test_index_p0.test_bitmap_index_unique index12 k12 BITMAP +default_cluster:regression_test_index_p0.test_bitmap_index_unique index13 k13 BITMAP +default_cluster:regression_test_index_p0.test_bitmap_index_unique index14 k14 BITMAP +default_cluster:regression_test_index_p0.test_bitmap_index_unique index15 k15 BITMAP +default_cluster:regression_test_index_p0.test_bitmap_index_unique index16 v1 BITMAP -- !sql -- -1 1 1 1 1 1 2022-05-31 2022-05-31T10:00 1 1 true 1 +1 1 1 1 1 1 2022-05-31 2022-05-31T10:00 1 1 true 2022-05-31 2022-05-31T10:00 2022-05-31T10:00:00.111 2022-05-31T10:00:00.111111 1 diff --git a/regression-test/data/schema_change/test_schema_change_datev2_with_delete.out b/regression-test/data/schema_change/test_schema_change_datev2_with_delete.out new file mode 100644 index 0000000000..2ef72df7c7 --- /dev/null +++ b/regression-test/data/schema_change/test_schema_change_datev2_with_delete.out @@ -0,0 +1,81 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + diff --git a/regression-test/data/schema_change_p0/datev2/test_agg_keys_schema_change_datev2.out b/regression-test/data/schema_change_p0/datev2/test_agg_keys_schema_change_datev2.out new file mode 100644 index 0000000000..c2400ee415 --- /dev/null +++ b/regression-test/data/schema_change_p0/datev2/test_agg_keys_schema_change_datev2.out @@ -0,0 +1,82 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-01 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-01 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-01 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-01 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-01 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-01 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-01 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-01 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-01T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-01T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-01T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-01T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-01T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-01T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-01T11:11:11.111 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-01T11:11:11.111 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-01T11:11:11.111 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01T11:11:11.111 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-01T11:11:11.111 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-01T11:11:11.111 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-01T11:11:11.111 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-01T11:11:11.111 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01T11:11:11.111 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-01T11:11:11.111 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-01T11:11:11.111 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-01T11:11:11.111 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-01T11:11:11.111 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01T11:11:11.111 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-01T11:11:11.111 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-02T11:11:11.222 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02T11:11:11.222 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-02T11:11:11.222 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-02T11:11:11.222 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-02T11:11:11.222 2022-01-05 2022-01-05T11:11:11 + diff --git a/regression-test/data/schema_change_p0/datev2/test_dup_keys_schema_change_datev2.out b/regression-test/data/schema_change_p0/datev2/test_dup_keys_schema_change_datev2.out new file mode 100644 index 0000000000..de1946b723 --- /dev/null +++ b/regression-test/data/schema_change_p0/datev2/test_dup_keys_schema_change_datev2.out @@ -0,0 +1,70 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- +2022-01-01 2022-01-06T11:11:11 2022-01-06 2022-01-06T11:11:11 +2022-01-02 2022-01-02T11:11:11 2022-01-02 2022-01-02T11:11:11 +2022-01-03 2022-01-03T11:11:11 2022-01-03 2022-01-01T11:11:11 +2022-01-04 2022-01-04T11:11:11 2022-01-01 2022-01-04T11:11:11 +2022-01-05 2022-01-01T11:11:11 2022-01-05 2022-01-05T11:11:11 + +-- !sql -- + diff --git a/regression-test/suites/index_p0/test_bitmap_index.groovy b/regression-test/suites/index_p0/test_bitmap_index.groovy index 6ee8486e82..2267ef38dc 100644 --- a/regression-test/suites/index_p0/test_bitmap_index.groovy +++ b/regression-test/suites/index_p0/test_bitmap_index.groovy @@ -34,7 +34,11 @@ suite("test_bitmap_index") { k8 DATETIME, k9 LARGEINT, k10 DECIMAL, - k11 BOOLEAN + k11 BOOLEAN, + k12 DATEV2, + k13 DATETIMEV2, + k14 DATETIMEV2(3), + k15 DATETIMEV2(6) ) DISTRIBUTED BY HASH(k1) BUCKETS 5 properties("replication_num" = "1"); """ @@ -51,7 +55,11 @@ suite("test_bitmap_index") { ADD INDEX index8 (k8) USING BITMAP, ADD INDEX index9 (k9) USING BITMAP, ADD INDEX index10 (k10) USING BITMAP, - ADD INDEX index11 (k11) USING BITMAP; + ADD INDEX index11 (k11) USING BITMAP, + ADD INDEX index12 (k12) USING BITMAP, + ADD INDEX index13 (k13) USING BITMAP, + ADD INDEX index14 (k14) USING BITMAP, + ADD INDEX index15 (k15) USING BITMAP; """ int max_try_secs = 60 while (max_try_secs--) { @@ -67,7 +75,7 @@ suite("test_bitmap_index") { } } - sql "insert into ${tbName1} values(1,1,1,1,'1','1','2022-05-31','2022-05-31 10:00:00',1,1.0,1);" + sql "insert into ${tbName1} values(1,1,1,1,'1','1','2022-05-31','2022-05-31 10:00:00',1,1.0,1,'2022-05-31','2022-05-31 10:00:00.111111','2022-05-31 10:00:00.111111','2022-05-31 10:00:00.111111');" qt_sql "desc ${tbName1};" qt_sql "SHOW INDEX FROM ${tbName1};" qt_sql "select * from ${tbName1};" @@ -104,9 +112,13 @@ suite("test_bitmap_index") { k9 LARGEINT, k10 DECIMAL, k11 BOOLEAN, + k12 DATEV2, + k13 DATETIMEV2, + k14 DATETIMEV2(3), + k15 DATETIMEV2(6), v1 INT SUM ) - AGGREGATE KEY(k1,k2,k3,k4,k5,k6,k7,k8,k9,k10,k11) + AGGREGATE KEY(k1,k2,k3,k4,k5,k6,k7,k8,k9,k10,k11,k12,k13,k14,k15) DISTRIBUTED BY HASH(k1) BUCKETS 5 properties("replication_num" = "1"); """ @@ -122,7 +134,11 @@ suite("test_bitmap_index") { ADD INDEX index8 (k8) USING BITMAP, ADD INDEX index9 (k9) USING BITMAP, ADD INDEX index10 (k10) USING BITMAP, - ADD INDEX index11 (k11) USING BITMAP; + ADD INDEX index11 (k11) USING BITMAP, + ADD INDEX index12 (k12) USING BITMAP, + ADD INDEX index13 (k13) USING BITMAP, + ADD INDEX index14 (k14) USING BITMAP, + ADD INDEX index15 (k15) USING BITMAP; """ max_try_secs = 60 while (max_try_secs--) { @@ -138,11 +154,11 @@ suite("test_bitmap_index") { } } test{ - sql "ALTER TABLE ${tbName2} ADD INDEX index12 (v1) USING BITMAP;" + sql "ALTER TABLE ${tbName2} ADD INDEX index16 (v1) USING BITMAP;" exception "errCode = 2, detailMessage = BITMAP index only used in columns of DUP_KEYS/UNIQUE_KEYS table" } - sql "insert into ${tbName2} values(1,1,1,1,'1','1','2022-05-31','2022-05-31 10:00:00',1,1.0,1,1);" + sql "insert into ${tbName2} values(1,1,1,1,'1','1','2022-05-31','2022-05-31 10:00:00',1,1.0,1,'2022-05-31','2022-05-31 10:00:00.111111','2022-05-31 10:00:00.111111','2022-05-31 10:00:00.111111',1);" qt_sql "desc ${tbName2};" qt_sql "SHOW INDEX FROM ${tbName2};" qt_sql "select * from ${tbName2};" @@ -178,6 +194,10 @@ suite("test_bitmap_index") { k9 LARGEINT, k10 DECIMAL, k11 BOOLEAN, + k12 DATEV2, + k13 DATETIMEV2, + k14 DATETIMEV2(3), + k15 DATETIMEV2(6), v1 INT ) UNIQUE KEY(k1,k2,k3,k4,k5,k6,k7,k8,k9,k10,k11) @@ -197,7 +217,11 @@ suite("test_bitmap_index") { ADD INDEX index9 (k9) USING BITMAP, ADD INDEX index10 (k10) USING BITMAP, ADD INDEX index11 (k11) USING BITMAP, - ADD INDEX index12 (v1) USING BITMAP; + ADD INDEX index12 (k12) USING BITMAP, + ADD INDEX index13 (k13) USING BITMAP, + ADD INDEX index14 (k14) USING BITMAP, + ADD INDEX index15 (k15) USING BITMAP, + ADD INDEX index16 (v1) USING BITMAP; """ max_try_secs = 60 while (max_try_secs--) { @@ -213,7 +237,7 @@ suite("test_bitmap_index") { } } - sql "insert into ${tbName3} values(1,1,1,1,'1','1','2022-05-31','2022-05-31 10:00:00',1,1.0,1,1);" + sql "insert into ${tbName3} values(1,1,1,1,'1','1','2022-05-31','2022-05-31 10:00:00',1,1.0,1,'2022-05-31','2022-05-31 10:00:00.111111','2022-05-31 10:00:00.111111','2022-05-31 10:00:00.111111',1);" qt_sql "desc ${tbName3};" qt_sql "SHOW INDEX FROM ${tbName3};" qt_sql "select * from ${tbName3};" diff --git a/regression-test/suites/schema_change/test_schema_change_datev2_with_delete.groovy b/regression-test/suites/schema_change/test_schema_change_datev2_with_delete.groovy new file mode 100644 index 0000000000..27dee3ff0a --- /dev/null +++ b/regression-test/suites/schema_change/test_schema_change_datev2_with_delete.groovy @@ -0,0 +1,117 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_schema_change_datev2_with_delete") { + def tbName = "test_schema_change_datev2_with_delete" + def getJobState = { tableName -> + def jobStateResult = sql """ SHOW ALTER TABLE COLUMN WHERE IndexName='${tableName}' ORDER BY createtime DESC LIMIT 1 """ + return jobStateResult[0][9] + } + + sql """ DROP TABLE IF EXISTS ${tbName} FORCE""" + // Create table and disable light weight schema change + sql """ + CREATE TABLE IF NOT EXISTS ${tbName} + ( + `datek1` date DEFAULT '2022-01-01', + `datek2` datetime DEFAULT '2022-01-01 11:11:11', + `datev1` date DEFAULT '2022-01-01', + `datev2` datetime DEFAULT '2022-01-01 11:11:11' + ) + DUPLICATE KEY(`datek1`,`datek2`) + DISTRIBUTED BY HASH(`datek1`) BUCKETS 1 + PROPERTIES("replication_num" = "1", "light_schema_change" = "false"); + """ + sql """ insert into ${tbName} values('2022-01-02', '2022-01-02 11:11:11', '2022-01-02', '2022-01-02 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev1`) values('2022-01-03', '2022-01-03 11:11:11', '2022-01-03');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev2`) values('2022-01-04', '2022-01-04 11:11:11', '2022-01-04 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datev1`, `datev2`) values('2022-01-05', '2022-01-05', '2022-01-05 11:11:11');""" + sql """ insert into ${tbName} (`datek2`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-06', '2022-01-06 11:11:11');""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + sql """ alter table ${tbName} modify column `datev1` datev2 DEFAULT '2022-01-01' """ + int max_try_time = 1000 + while(max_try_time--){ + String result = getJobState(tbName) + if (result == "FINISHED") { + break + } else { + sleep(100) + if (max_try_time < 1){ + assertEquals(1,2) + } + } + } + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + sql """ alter table ${tbName} modify column `datev2` datetimev2 DEFAULT '2022-01-01 11:11:11' """ + max_try_time = 1000 + while(max_try_time--){ + String result = getJobState(tbName) + if (result == "FINISHED") { + break + } else { + sleep(100) + if (max_try_time < 1){ + assertEquals(1,2) + } + } + } + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + sql """ alter table ${tbName} modify column `datev2` datetimev2(3) DEFAULT '2022-01-01 11:11:11' """ + max_try_time = 1000 + while(max_try_time--){ + String result = getJobState(tbName) + if (result == "FINISHED") { + break + } else { + sleep(100) + if (max_try_time < 1){ + assertEquals(1,2) + } + } + } + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + sql """ delete from ${tbName} where `datev1`='2022-01-02';""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + sql """ delete from ${tbName} where `datev2`='2022-01-04 11:11:11.111';""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + sql """ delete from ${tbName} where `datev2`='2022-01-04 11:11:11';""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + sql """ insert into ${tbName} values('2022-01-02', '2022-01-02 11:11:11', '2022-01-02', '2022-01-02 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev1`) values('2022-01-03', '2022-01-03 11:11:11', '2022-01-03');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev2`) values('2022-01-04', '2022-01-04 11:11:11', '2022-01-04 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datev1`, `datev2`) values('2022-01-05', '2022-01-05', '2022-01-05 11:11:11');""" + sql """ insert into ${tbName} (`datek2`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-06', '2022-01-06 11:11:11');""" + + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """ delete from ${tbName} where `datev1`='2022-01-01';""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + sql """ delete from ${tbName} where `datev2`='2022-01-01 11:11:11.111';""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + sql """ delete from ${tbName} where `datev2`='2022-01-01 11:11:11';""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + sql """ DROP TABLE ${tbName} force""" +} diff --git a/regression-test/suites/schema_change_p0/datev2/test_agg_keys_schema_change_datev2.groovy b/regression-test/suites/schema_change_p0/datev2/test_agg_keys_schema_change_datev2.groovy new file mode 100644 index 0000000000..6784c533a0 --- /dev/null +++ b/regression-test/suites/schema_change_p0/datev2/test_agg_keys_schema_change_datev2.groovy @@ -0,0 +1,264 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.codehaus.groovy.runtime.IOGroovyMethods + +suite("test_agg_keys_schema_change_datev2") { + def tbName = "test_agg_keys_schema_change_datev2" + def getJobState = { tableName -> + def jobStateResult = sql """ SHOW ALTER TABLE COLUMN WHERE IndexName='${tableName}' ORDER BY createtime DESC LIMIT 1 """ + return jobStateResult[0][9] + } + + String[][] backends = sql """ show backends; """ + assertTrue(backends.size() > 0) + String backend_id; + def backendId_to_backendIP = [:] + def backendId_to_backendHttpPort = [:] + for (String[] backend in backends) { + backendId_to_backendIP.put(backend[0], backend[2]) + backendId_to_backendHttpPort.put(backend[0], backend[5]) + } + + backend_id = backendId_to_backendIP.keySet()[0] + StringBuilder showConfigCommand = new StringBuilder(); + showConfigCommand.append("curl -X GET http://") + showConfigCommand.append(backendId_to_backendIP.get(backend_id)) + showConfigCommand.append(":") + showConfigCommand.append(backendId_to_backendHttpPort.get(backend_id)) + showConfigCommand.append("/api/show_config") + logger.info(showConfigCommand.toString()) + def process = showConfigCommand.toString().execute() + int code = process.waitFor() + String err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream()))); + String out = process.getText() + logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err) + assertEquals(code, 0) + def configList = parseJson(out.trim()) + assert configList instanceof List + + def do_compact = { tableName -> + String[][] tablets = sql """ show tablets from ${tableName}; """ + for (String[] tablet in tablets) { + String tablet_id = tablet[0] + backend_id = tablet[2] + logger.info("run compaction:" + tablet_id) + StringBuilder sb = new StringBuilder(); + sb.append("curl -X POST http://") + sb.append(backendId_to_backendIP.get(backend_id)) + sb.append(":") + sb.append(backendId_to_backendHttpPort.get(backend_id)) + sb.append("/api/compaction/run?tablet_id=") + sb.append(tablet_id) + sb.append("&compact_type=cumulative") + + String command = sb.toString() + process = command.execute() + code = process.waitFor() + err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream()))); + out = process.getText() + logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err) + } + + // wait for all compactions done + for (String[] tablet in tablets) { + boolean running = true + do { + Thread.sleep(100) + String tablet_id = tablet[0] + backend_id = tablet[2] + StringBuilder sb = new StringBuilder(); + sb.append("curl -X GET http://") + sb.append(backendId_to_backendIP.get(backend_id)) + sb.append(":") + sb.append(backendId_to_backendHttpPort.get(backend_id)) + sb.append("/api/compaction/run_status?tablet_id=") + sb.append(tablet_id) + + String command = sb.toString() + process = command.execute() + code = process.waitFor() + err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream()))); + out = process.getText() + logger.info("Get compaction status: code=" + code + ", out=" + out + ", err=" + err) + assertEquals(code, 0) + def compactionStatus = parseJson(out.trim()) + assertEquals("success", compactionStatus.status.toLowerCase()) + running = compactionStatus.run_status + } while (running) + } + } + + sql """ DROP TABLE IF EXISTS ${tbName} FORCE""" + // Create table and disable light weight schema change + sql """ + CREATE TABLE IF NOT EXISTS ${tbName} + ( + `datek1` date DEFAULT '2022-01-01', + `datek2` datetime DEFAULT '2022-01-01 11:11:11', + `datev1` date MAX DEFAULT '2022-01-01', + `datev2` datetime MAX DEFAULT '2022-01-01 11:11:11' + ) + AGGREGATE KEY(`datek1`,`datek2`) + DISTRIBUTED BY HASH(`datek1`) BUCKETS 1 + PROPERTIES("replication_num" = "1", "light_schema_change" = "false"); + """ + // datev2 + sql """ insert into ${tbName} values('2022-01-02', '2022-01-02 11:11:11', '2022-01-02', '2022-01-02 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev1`) values('2022-01-03', '2022-01-03 11:11:11', '2022-01-03');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev2`) values('2022-01-04', '2022-01-04 11:11:11', '2022-01-04 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datev1`, `datev2`) values('2022-01-05', '2022-01-05', '2022-01-05 11:11:11');""" + sql """ insert into ${tbName} (`datek2`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-06', '2022-01-06 11:11:11');""" + + sql """ insert into ${tbName} values('2022-01-02', '2022-01-02 11:11:11', '2022-01-02', '2022-01-02 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev1`) values('2022-01-03', '2022-01-03 11:11:11', '2022-01-03');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev2`) values('2022-01-04', '2022-01-04 11:11:11', '2022-01-04 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datev1`, `datev2`) values('2022-01-05', '2022-01-05', '2022-01-05 11:11:11');""" + sql """ insert into ${tbName} (`datek2`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-06', '2022-01-06 11:11:11');""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + sql """ alter table ${tbName} add column `datev3` datev2 DEFAULT '2022-01-01' """ + int max_try_time = 1000 + while(max_try_time--){ + String result = getJobState(tbName) + if (result == "FINISHED") { + break + } else { + sleep(100) + if (max_try_time < 1){ + assertEquals(1,2) + } + } + } + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + do_compact(tbName) + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """delete from ${tbName} where `datev3` = '2022-01-01';""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """ alter table ${tbName} drop column `datev3` """ + max_try_time = 1000 + while(max_try_time--){ + String result = getJobState(tbName) + if (result == "FINISHED") { + break + } else { + sleep(100) + if (max_try_time < 1){ + assertEquals(1,2) + } + } + } + + // datetimev2(0) + sql """ insert into ${tbName} values('2022-01-02', '2022-01-02 11:11:11', '2022-01-02', '2022-01-02 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev1`) values('2022-01-03', '2022-01-03 11:11:11', '2022-01-03');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev2`) values('2022-01-04', '2022-01-04 11:11:11', '2022-01-04 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datev1`, `datev2`) values('2022-01-05', '2022-01-05', '2022-01-05 11:11:11');""" + sql """ insert into ${tbName} (`datek2`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-06', '2022-01-06 11:11:11');""" + + sql """ insert into ${tbName} values('2022-01-02', '2022-01-02 11:11:11', '2022-01-02', '2022-01-02 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev1`) values('2022-01-03', '2022-01-03 11:11:11', '2022-01-03');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev2`) values('2022-01-04', '2022-01-04 11:11:11', '2022-01-04 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datev1`, `datev2`) values('2022-01-05', '2022-01-05', '2022-01-05 11:11:11');""" + sql """ insert into ${tbName} (`datek2`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-06', '2022-01-06 11:11:11');""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """ alter table ${tbName} add column `datev3` datetimev2 DEFAULT '2022-01-01 11:11:11' """ + max_try_time = 1000 + while(max_try_time--){ + String result = getJobState(tbName) + if (result == "FINISHED") { + break + } else { + sleep(100) + if (max_try_time < 1){ + assertEquals(1,2) + } + } + } + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + do_compact(tbName) + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """delete from ${tbName} where `datev3` = '2022-01-01 11:11:11';""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """ alter table ${tbName} drop column `datev3` """ + max_try_time = 1000 + while(max_try_time--){ + String result = getJobState(tbName) + if (result == "FINISHED") { + break + } else { + sleep(100) + if (max_try_time < 1){ + assertEquals(1,2) + } + } + } + + // datetimev2(3) + sql """ insert into ${tbName} values('2022-01-02', '2022-01-02 11:11:11', '2022-01-02', '2022-01-02 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev1`) values('2022-01-03', '2022-01-03 11:11:11', '2022-01-03');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev2`) values('2022-01-04', '2022-01-04 11:11:11', '2022-01-04 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datev1`, `datev2`) values('2022-01-05', '2022-01-05', '2022-01-05 11:11:11');""" + sql """ insert into ${tbName} (`datek2`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-06', '2022-01-06 11:11:11');""" + + sql """ insert into ${tbName} values('2022-01-02', '2022-01-02 11:11:11', '2022-01-02', '2022-01-02 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev1`) values('2022-01-03', '2022-01-03 11:11:11', '2022-01-03');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev2`) values('2022-01-04', '2022-01-04 11:11:11', '2022-01-04 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datev1`, `datev2`) values('2022-01-05', '2022-01-05', '2022-01-05 11:11:11');""" + sql """ insert into ${tbName} (`datek2`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-06', '2022-01-06 11:11:11');""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """ alter table ${tbName} add column `datev3` datetimev2(3) DEFAULT '2022-01-01 11:11:11.111' """ + max_try_time = 1000 + while(max_try_time--){ + String result = getJobState(tbName) + if (result == "FINISHED") { + break + } else { + sleep(100) + if (max_try_time < 1){ + assertEquals(1,2) + } + } + } + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + do_compact(tbName) + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """delete from ${tbName} where `datev3` = '2022-01-01 11:11:11';""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """ insert into ${tbName} values('2022-01-02', '2022-01-02 11:11:11', '2022-01-02 11:11:11.222', '2022-01-02', '2022-01-02 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev3`, `datev1`) values('2022-01-03', '2022-01-03 11:11:11', '2022-01-02 11:11:11.222', '2022-01-03');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev3`, `datev2`) values('2022-01-04', '2022-01-04 11:11:11', '2022-01-02 11:11:11.222', '2022-01-04 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datev3`, `datev1`, `datev2`) values('2022-01-05', '2022-01-02 11:11:11.222', '2022-01-05', '2022-01-05 11:11:11');""" + sql """ insert into ${tbName} (`datek2`, `datev3`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-02 11:11:11.222', '2022-01-06', '2022-01-06 11:11:11');""" + sql """delete from ${tbName} where `datev3` = '2022-01-01 11:11:11.111';""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """ alter table ${tbName} drop column `datev3` """ + max_try_time = 1000 + while(max_try_time--){ + String result = getJobState(tbName) + if (result == "FINISHED") { + break + } else { + sleep(100) + if (max_try_time < 1){ + assertEquals(1,2) + } + } + } + + sql """ DROP TABLE ${tbName} force""" +} diff --git a/regression-test/suites/schema_change_p0/datev2/test_dup_keys_schema_change_datev2.groovy b/regression-test/suites/schema_change_p0/datev2/test_dup_keys_schema_change_datev2.groovy new file mode 100644 index 0000000000..7fd0f30231 --- /dev/null +++ b/regression-test/suites/schema_change_p0/datev2/test_dup_keys_schema_change_datev2.groovy @@ -0,0 +1,203 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.codehaus.groovy.runtime.IOGroovyMethods + +suite("test_dup_keys_schema_change_datev2") { + def tbName = "test_dup_keys_schema_change_datev2" + def getJobState = { tableName -> + def jobStateResult = sql """ SHOW ALTER TABLE COLUMN WHERE IndexName='${tableName}' ORDER BY createtime DESC LIMIT 1 """ + return jobStateResult[0][9] + } + + String[][] backends = sql """ show backends; """ + assertTrue(backends.size() > 0) + String backend_id; + def backendId_to_backendIP = [:] + def backendId_to_backendHttpPort = [:] + for (String[] backend in backends) { + backendId_to_backendIP.put(backend[0], backend[2]) + backendId_to_backendHttpPort.put(backend[0], backend[5]) + } + + backend_id = backendId_to_backendIP.keySet()[0] + StringBuilder showConfigCommand = new StringBuilder(); + showConfigCommand.append("curl -X GET http://") + showConfigCommand.append(backendId_to_backendIP.get(backend_id)) + showConfigCommand.append(":") + showConfigCommand.append(backendId_to_backendHttpPort.get(backend_id)) + showConfigCommand.append("/api/show_config") + logger.info(showConfigCommand.toString()) + def process = showConfigCommand.toString().execute() + int code = process.waitFor() + String err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream()))); + String out = process.getText() + logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err) + assertEquals(code, 0) + def configList = parseJson(out.trim()) + assert configList instanceof List + + def do_compact = { tableName -> + String[][] tablets = sql """ show tablets from ${tableName}; """ + for (String[] tablet in tablets) { + String tablet_id = tablet[0] + backend_id = tablet[2] + logger.info("run compaction:" + tablet_id) + StringBuilder sb = new StringBuilder(); + sb.append("curl -X POST http://") + sb.append(backendId_to_backendIP.get(backend_id)) + sb.append(":") + sb.append(backendId_to_backendHttpPort.get(backend_id)) + sb.append("/api/compaction/run?tablet_id=") + sb.append(tablet_id) + sb.append("&compact_type=cumulative") + + String command = sb.toString() + process = command.execute() + code = process.waitFor() + err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream()))); + out = process.getText() + logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err) + } + + // wait for all compactions done + for (String[] tablet in tablets) { + boolean running = true + do { + Thread.sleep(100) + String tablet_id = tablet[0] + backend_id = tablet[2] + StringBuilder sb = new StringBuilder(); + sb.append("curl -X GET http://") + sb.append(backendId_to_backendIP.get(backend_id)) + sb.append(":") + sb.append(backendId_to_backendHttpPort.get(backend_id)) + sb.append("/api/compaction/run_status?tablet_id=") + sb.append(tablet_id) + + String command = sb.toString() + process = command.execute() + code = process.waitFor() + err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream()))); + out = process.getText() + logger.info("Get compaction status: code=" + code + ", out=" + out + ", err=" + err) + assertEquals(code, 0) + def compactionStatus = parseJson(out.trim()) + assertEquals("success", compactionStatus.status.toLowerCase()) + running = compactionStatus.run_status + } while (running) + } + } + + sql """ DROP TABLE IF EXISTS ${tbName} FORCE""" + // Create table and disable light weight schema change + sql """ + CREATE TABLE IF NOT EXISTS ${tbName} + ( + `datek1` date DEFAULT '2022-01-01', + `datek2` datetime DEFAULT '2022-01-01 11:11:11', + `datev1` date DEFAULT '2022-01-01', + `datev2` datetime DEFAULT '2022-01-01 11:11:11' + ) + DUPLICATE KEY(`datek1`,`datek2`) + DISTRIBUTED BY HASH(`datek1`) BUCKETS 1 + PROPERTIES("replication_num" = "1", "light_schema_change" = "false"); + """ + // datev2 + sql """ insert into ${tbName} values('2022-01-02', '2022-01-02 11:11:11', '2022-01-02', '2022-01-02 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev1`) values('2022-01-03', '2022-01-03 11:11:11', '2022-01-03');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev2`) values('2022-01-04', '2022-01-04 11:11:11', '2022-01-04 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datev1`, `datev2`) values('2022-01-05', '2022-01-05', '2022-01-05 11:11:11');""" + sql """ insert into ${tbName} (`datek2`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-06', '2022-01-06 11:11:11');""" + + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + sql """ alter table ${tbName} modify column `datev1` datev2 DEFAULT '2022-01-01' """ + int max_try_time = 1000 + while(max_try_time--){ + String result = getJobState(tbName) + if (result == "FINISHED") { + break + } else { + sleep(100) + if (max_try_time < 1){ + assertEquals(1,2) + } + } + } + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + do_compact(tbName) + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """delete from ${tbName} where `datev1` <= '2022-01-06';""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + // datetimev2(0) + sql """ insert into ${tbName} values('2022-01-02', '2022-01-02 11:11:11', '2022-01-02', '2022-01-02 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev1`) values('2022-01-03', '2022-01-03 11:11:11', '2022-01-03');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev2`) values('2022-01-04', '2022-01-04 11:11:11', '2022-01-04 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datev1`, `datev2`) values('2022-01-05', '2022-01-05', '2022-01-05 11:11:11');""" + sql """ insert into ${tbName} (`datek2`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-06', '2022-01-06 11:11:11');""" + + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """ alter table ${tbName} modify column `datev2` datetimev2 DEFAULT '2022-01-01 11:11:11' """ + max_try_time = 1000 + while(max_try_time--){ + String result = getJobState(tbName) + if (result == "FINISHED") { + break + } else { + sleep(100) + if (max_try_time < 1){ + assertEquals(1,2) + } + } + } + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + do_compact(tbName) + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """delete from ${tbName} where `datev2` <= '2022-01-06 11:11:11';""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + // datetimev2(3) + sql """ insert into ${tbName} values('2022-01-02', '2022-01-02 11:11:11', '2022-01-02', '2022-01-02 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev1`) values('2022-01-03', '2022-01-03 11:11:11', '2022-01-03');""" + sql """ insert into ${tbName} (`datek1`, `datek2`, `datev2`) values('2022-01-04', '2022-01-04 11:11:11', '2022-01-04 11:11:11');""" + sql """ insert into ${tbName} (`datek1`, `datev1`, `datev2`) values('2022-01-05', '2022-01-05', '2022-01-05 11:11:11');""" + sql """ insert into ${tbName} (`datek2`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-06', '2022-01-06 11:11:11');""" + + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """ alter table ${tbName} modify column `datev2` datetimev2(3) DEFAULT '2022-01-01 11:11:11' """ + max_try_time = 1000 + while(max_try_time--){ + String result = getJobState(tbName) + if (result == "FINISHED") { + break + } else { + sleep(100) + if (max_try_time < 1){ + assertEquals(1,2) + } + } + } + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + do_compact(tbName) + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + sql """delete from ${tbName} where `datev2` < '2022-01-06 11:11:11.111';""" + qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;""" + + sql """ DROP TABLE ${tbName} force""" +}