[enhancement](test) add some p0 cases (#12240)

This commit is contained in:
Yongqiang YANG
2022-09-07 09:10:42 +08:00
committed by GitHub
parent 5f255af065
commit 772e5907f2
23 changed files with 1680 additions and 14 deletions

View File

@ -0,0 +1,19 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !test_dayofweek_varchar --
7
-- !test_dayofweek_char --
7
-- !test_timediff_varchar --
-24:00:00
-- !test_timediff_char --
-24:00:00
-- !test_money_format_varchar --
123,456.00
-- !test_money_format_char --
123,456.00

View File

@ -1,11 +1,21 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !select_default --
/wyyt-image/2021/11/13/595345040188712460.jpg
/wyyt-image/2022/04/13/1434607674511761493.jpg
/wyyt-image/2022/04/13/1434607674511761493.jpg /wyyt-image/2022/04/13/1434607674511761493.jpg
a aa /wyyt-image/2021/11/13/595345040188712460.jpg unknown
b aa /wyyt-image/2022/04/13/1434607674511761493.jpg unknown
c cc /wyyt-image/2022/04/13/1434607674511761493.jpg /wyyt-image/2022/04/13/1434607674511761493.jpg
-- !select_default2 --
/wyyt-image/2021/11/13/595345040188712460.jpg
/wyyt-image/2022/04/13/1434607674511761493.jpg /wyyt-image/2022/04/13/1434607674511761493.jpg
/wyyt-image/2022/04/13/1434607674511761493.jpg
a aa /wyyt-image/2021/11/13/595345040188712460.jpg
b aa /wyyt-image/2022/04/13/1434607674511761493.jpg /wyyt-image/2022/04/13/1434607674511761493.jpg
c cc /wyyt-image/2022/04/13/1434607674511761493.jpg
-- !select_default --
a aa /wyyt-image/2021/11/13/595345040188712460.jpg unknown
b aa /wyyt-image/2022/04/13/1434607674511761493.jpg unknown
c cc /wyyt-image/2022/04/13/1434607674511761493.jpg /wyyt-image/2022/04/13/1434607674511761493.jpg
-- !select_default2 --
a aa /wyyt-image/2021/11/13/595345040188712460.jpg
b aa /wyyt-image/2022/04/13/1434607674511761493.jpg /wyyt-image/2022/04/13/1434607674511761493.jpg
c cc /wyyt-image/2022/04/13/1434607674511761493.jpg

View File

@ -0,0 +1,4 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !select --
abc xyz 1577946288488507 1492704224 421001 421001 2020-01-19T11:15:21 9999-12-30 00:00:00 9999-12-30T00:00 - - -

View File

@ -0,0 +1,6 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !select1 --
100 test 2021-01-02
-- !select_geo1 --
POINT (123.123456789 89.123456789)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,21 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !select_default --
1
-- !select_default2 --
1
-- !select_default3 --
-- !select_default4 --
-- !select_default --
1
-- !select_default2 --
1
-- !select_default3 --
-- !select_default4 --

View File

@ -0,0 +1,15 @@
6 32767 3021 123456 604587.000 true 2014-11-11 2015-03-13 12:36:38 yanhuiacng01 0.1 80699
10 1991 5014 9223372036854775807 -258.369 false 2015-04-02 2013-04-02 15:16:52 wangyu14 -123456.54 0.235
12 32767 -2147483647 9223372036854775807 243.325 false 1991-08-11 2013-04-02 15:16:52 liuyuantuo -564.898 3.1415927
1 1989 1001 11011902 123.123 true 1989-03-21 1989-03-21 13:00:00 wangjing04 0.1 6.333
2 1986 1001 11011903 1243.500 false 1901-12-31 1989-03-21 13:00:00 wangyu14 20.268 789.25
4 1991 3021 -11011907 243243.325 false 3124-10-10 2015-03-13 10:30:00 yanhuicang01 2.06 -0.001
5 1985 5014 -11011903 243.325 true 2015-01-01 2015-03-13 12:36:38 duyunkai@123 -0 -365
15 1992 3021 11011920 0.000 true 9999-12-12 2015-04-02 00:00:00 3.141592653 20.456
3 1989 1002 11011905 24453.325 false 2012-03-14 2000-01-01 00:00:00 yuanyuan06 78945 3654
7 -32767 1002 7210457 3.141 false 1988-03-21 1901-01-01 00:00:00 jingyong 0 6058
8 255 2147483647 11011920 -0.123 true 1989-03-21 9999-11-11 12:12:00 wangjing05 987456.123 12.14
9 1991 -2147483647 11011902 -654.654 true 1991-08-11 1989-03-21 13:11:00 wangjing04 0 69.123
11 1989 25699 -9223372036854775807 0.666 true 2015-04-02 1989-03-21 13:11:00 yuanyuan06 -987.001 4.336
13 -32767 2147483647 -9223372036854775807 100.001 false 2015-04-02 2015-04-02 00:00:00 weike01 123.456 3.1415927
14 255 103 11011902 0.000 false 2015-04-02 2015-04-02 00:00:00 3.141592654 2.036
1 6 32767 3021 123456 604587.000 true 2014-11-11 2015-03-13 12:36:38 yanhuiacng01 0.1 80699
2 10 1991 5014 9223372036854775807 -258.369 false 2015-04-02 2013-04-02 15:16:52 wangyu14 -123456.54 0.235
3 12 32767 -2147483647 9223372036854775807 243.325 false 1991-08-11 2013-04-02 15:16:52 liuyuantuo -564.898 3.1415927
4 1 1989 1001 11011902 123.123 true 1989-03-21 1989-03-21 13:00:00 wangjing04 0.1 6.333
5 2 1986 1001 11011903 1243.500 false 1901-12-31 1989-03-21 13:00:00 wangyu14 20.268 789.25
6 4 1991 3021 -11011907 243243.325 false 3124-10-10 2015-03-13 10:30:00 yanhuicang01 2.06 -0.001
7 5 1985 5014 -11011903 243.325 true 2015-01-01 2015-03-13 12:36:38 duyunkai@123 -0 -365
8 15 1992 3021 11011920 0.000 true 9999-12-12 2015-04-02 00:00:00 3.141592653 20.456
9 3 1989 1002 11011905 24453.325 false 2012-03-14 2000-01-01 00:00:00 yuanyuan06 78945 3654
10 7 -32767 1002 7210457 3.141 false 1988-03-21 1901-01-01 00:00:00 jingyong 0 6058
11 8 255 2147483647 11011920 -0.123 true 1989-03-21 9999-11-11 12:12:00 wangjing05 987456.123 12.14
12 9 1991 -2147483647 11011902 -654.654 true 1991-08-11 1989-03-21 13:11:00 wangjing04 0 69.123
13 11 1989 25699 -9223372036854775807 0.666 true 2015-04-02 1989-03-21 13:11:00 yuanyuan06 -987.001 4.336
14 13 -32767 2147483647 -9223372036854775807 100.001 false 2015-04-02 2015-04-02 00:00:00 weike01 123.456 3.1415927
15 14 255 103 11011902 0.000 false 2015-04-02 2015-04-02 00:00:00 3.141592654 2.036

View File

@ -0,0 +1,4 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !castAccessible --
10

View File

@ -0,0 +1,4 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !dateTimeOperatorsAccessible --
2012-08-10T00:00 2012-08-09T06:00 2012-11-30T01:00 2012-08-06T00:00 2012-08-06T20:00 2012-09-30T01:00

View File

@ -0,0 +1,4 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !extractAccessible --
22

View File

@ -408,6 +408,7 @@ class Suite implements GroovyInterceptable {
throw new IllegalStateException("Check tag '${tag}' failed, sql:\n${sql}", t)
}
if (errorMsg != null) {
logger.warn("expect results: " + expectCsvResults + "\nrealResults: " + realResults)
throw new IllegalStateException("Check tag '${tag}' failed:\n${errorMsg}\n\nsql:\n${sql}")
}
}

View File

@ -0,0 +1,25 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
suite("test_char_implicit_cast") {
qt_test_dayofweek_varchar 'select dayofweek("2012-12-01");'
qt_test_dayofweek_char 'select dayofweek(cast("2012-12-01" as char(16)));'
qt_test_timediff_varchar 'select timediff("2010-01-01 01:00:00", "2010-01-02 01:00:00");'
qt_test_timediff_char 'select timediff("2010-01-01 01:00:00", cast("2010-01-02 01:00:00" as char));'
qt_test_money_format_varchar 'select money_format("123456");'
qt_test_money_format_char 'select money_format(cast("123456" as char));'
}

View File

@ -31,13 +31,27 @@ suite("test_lag_lead_window") {
('b','aa','/wyyt-image/2022/04/13/1434607674511761493.jpg'),
('c','cc','/wyyt-image/2022/04/13/1434607674511761493.jpg') """
// not_vectorized
sql """ set enable_vectorized_engine = false """
qt_select_default """
select aa, bb, min(cc) over(PARTITION by cc order by aa) ,
lag(cc,1,'unknown') over (PARTITION by cc order by aa) as lag_cc
from ${tableName}
order by aa; """
qt_select_default """ select min(t.cc) over(PARTITION by t.cc order by t.aa) ,
lag(t.cc,1,'') over (PARTITION by t.cc order by t.aa) as l1 from ${tableName} t order by aa, bb, cc; """
qt_select_default2 """ select aa, bb, min(cc) over(PARTITION by cc order by aa) ,
lead(cc,1,'') over (PARTITION by cc order by aa) as lead_cc
from ${tableName}
order by aa; """
qt_select_default2 """ select min(t.cc) over(PARTITION by t.cc order by t.aa) ,
lead(t.cc,1,'') over (PARTITION by t.cc order by t.aa) as l1 from ${tableName} t order by aa, bb, cc; """
sql """ set enable_vectorized_engine = true """
qt_select_default """
select aa, bb, min(cc) over(PARTITION by cc order by aa) ,
lag(cc,1,'unknown') over (PARTITION by cc order by aa) as lag_cc
from ${tableName}
order by aa; """
}
qt_select_default2 """ select aa, bb, min(cc) over(PARTITION by cc order by aa) ,
lead(cc,1,'') over (PARTITION by cc order by aa) as lead_cc
from ${tableName}
order by aa; """
}

View File

@ -0,0 +1,289 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
suite("test_outer_join_with_with_window_function") {
sql """
drop table if exists dwd_online_detail;
"""
sql """
CREATE TABLE `dwd_online_detail` (
`logout_time` datetime NOT NULL DEFAULT "9999-12-30 00:00:00",
`login_time` datetime NOT NULL DEFAULT "9999-12-30 00:00:00",
`game_code` varchar(50) NOT NULL DEFAULT "-",
`plat_code` varchar(50) NOT NULL DEFAULT "-",
`account` varchar(255) NOT NULL DEFAULT "-",
`playerid` varchar(255) NOT NULL DEFAULT "-",
`userid` varchar(255) NOT NULL DEFAULT "-",
`pid_code` varchar(50) NOT NULL DEFAULT "-",
`gid_code` varchar(50) NOT NULL DEFAULT "-",
`org_sid` int(11) NOT NULL DEFAULT "0",
`ct_sid` int(11) NOT NULL DEFAULT "0",
`next_login_time` datetime NOT NULL DEFAULT "9999-12-30 00:00:00"
) ENGINE=OLAP
DUPLICATE KEY(`logout_time`, `login_time`, `game_code`, `plat_code`, `account`, `playerid`, `userid`)
PARTITION BY RANGE(`logout_time`)
(PARTITION p99991230 VALUES [('9999-12-30 00:00:00'), ('9999-12-31 00:00:00')))
DISTRIBUTED BY HASH(`game_code`, `plat_code`) BUCKETS 4
PROPERTIES (
"replication_allocation" = "tag.location.default: 1",
"colocate_with" = "gp_group"
);
"""
sql """
drop table if exists ods_logout;
"""
sql """
CREATE TABLE `ods_logout` (
`day` date NULL COMMENT "",
`game` varchar(500) NULL COMMENT "",
`plat` varchar(500) NULL COMMENT "",
`dt` datetime NULL COMMENT "",
`time` bigint(20) NULL COMMENT "",
`sid` int(11) NULL COMMENT "",
`pid` varchar(500) NULL COMMENT "",
`gid` varchar(500) NULL COMMENT "",
`account` varchar(500) NULL COMMENT "",
`playerid` varchar(500) NULL COMMENT "",
`prop` varchar(500) NULL COMMENT "",
`p01` varchar(500) NULL COMMENT "",
`p02` varchar(500) NULL COMMENT "",
`p03` varchar(500) NULL COMMENT "",
`p04` varchar(500) NULL COMMENT "",
`p05` varchar(500) NULL COMMENT "",
`p06` varchar(500) NULL COMMENT "",
`p07` varchar(500) NULL COMMENT "",
`p08` varchar(500) NULL COMMENT "",
`p09` varchar(500) NULL COMMENT "",
`p10` varchar(500) NULL COMMENT "",
`p11` varchar(500) NULL COMMENT "",
`p12` varchar(500) NULL COMMENT "",
`p13` varchar(500) NULL COMMENT "",
`p14` varchar(500) NULL COMMENT "",
`p15` varchar(500) NULL COMMENT ""
) ENGINE=OLAP
DUPLICATE KEY(`day`, `game`, `plat`)
PARTITION BY RANGE(`day`)
(PARTITION p201907 VALUES [('2019-07-01'), ('2019-08-01')))
DISTRIBUTED BY HASH(`game`, `plat`) BUCKETS 4
PROPERTIES (
"replication_allocation" = "tag.location.default: 1"
);
"""
sql """
drop table if exists dim_account_userid_mapping;
"""
sql """
CREATE TABLE `dim_account_userid_mapping` (
`end_time` datetime NOT NULL DEFAULT "9999-12-30 00:00:00",
`start_time` datetime NOT NULL DEFAULT "9999-12-30 00:00:00",
`game_code` varchar(50) NOT NULL,
`plat_code` varchar(50) NOT NULL,
`userkey` varchar(255) NOT NULL,
`userid` varchar(255) NOT NULL,
`account` varchar(255) NOT NULL,
`pid_code` varchar(50) NOT NULL DEFAULT "-",
`gid_code` varchar(50) NOT NULL DEFAULT "-",
`region` varchar(50) NOT NULL DEFAULT "-"
) ENGINE=OLAP
DUPLICATE KEY(`end_time`, `start_time`, `game_code`, `plat_code`, `userkey`)
PARTITION BY RANGE(`end_time`)
(PARTITION p20190705 VALUES [('2019-07-05 00:00:00'), ('2019-07-06 00:00:00')))
DISTRIBUTED BY HASH(`game_code`, `plat_code`) BUCKETS 4
PROPERTIES (
"replication_allocation" = "tag.location.default: 1",
"colocate_with" = "gp_group"
);
"""
sql """
drop table if exists ods_login;
"""
sql """
CREATE TABLE `ods_login` (
`day` date NULL COMMENT "",
`game` varchar(500) NULL COMMENT "",
`plat` varchar(500) NULL COMMENT "",
`dt` datetime NULL COMMENT "",
`time` bigint(20) NULL COMMENT "",
`sid` int(11) NULL COMMENT "",
`pid` varchar(500) NULL COMMENT "",
`gid` varchar(500) NULL COMMENT "",
`account` varchar(500) NULL COMMENT "",
`playerid` varchar(500) NULL COMMENT "",
`prop` varchar(500) NULL COMMENT "",
`p01` varchar(500) NULL COMMENT "",
`p02` varchar(500) NULL COMMENT "",
`p03` varchar(500) NULL COMMENT "",
`p04` varchar(500) NULL COMMENT "",
`p05` varchar(500) NULL COMMENT "",
`p06` varchar(500) NULL COMMENT "",
`p07` varchar(500) NULL COMMENT "",
`p08` varchar(500) NULL COMMENT "",
`p09` varchar(500) NULL COMMENT "",
`p10` varchar(500) NULL COMMENT "",
`p11` varchar(500) NULL COMMENT "",
`p12` varchar(500) NULL COMMENT "",
`p13` varchar(500) NULL COMMENT "",
`p14` varchar(500) NULL COMMENT "",
`p15` varchar(500) NULL COMMENT ""
) ENGINE=OLAP
DUPLICATE KEY(`day`, `game`, `plat`)
COMMENT "登录ods"
PARTITION BY RANGE(`day`)
(PARTITION p201803 VALUES [('2018-03-01'), ('2018-04-01')),
PARTITION p201804 VALUES [('2018-04-01'), ('2018-05-01')),
PARTITION p201805 VALUES [('2018-05-01'), ('2018-06-01')),
PARTITION p201806 VALUES [('2018-06-01'), ('2018-07-01')),
PARTITION p201807 VALUES [('2018-07-01'), ('2018-08-01')),
PARTITION p201808 VALUES [('2018-08-01'), ('2018-09-01')),
PARTITION p201809 VALUES [('2018-09-01'), ('2018-10-01')),
PARTITION p201810 VALUES [('2018-10-01'), ('2018-11-01')),
PARTITION p201811 VALUES [('2018-11-01'), ('2018-12-01')),
PARTITION p201812 VALUES [('2018-12-01'), ('2019-01-01')),
PARTITION p201901 VALUES [('2019-01-01'), ('2019-02-01')),
PARTITION p201902 VALUES [('2019-02-01'), ('2019-03-01')),
PARTITION p201903 VALUES [('2019-03-01'), ('2019-04-01')),
PARTITION p201904 VALUES [('2019-04-01'), ('2019-05-01')),
PARTITION p201905 VALUES [('2019-05-01'), ('2019-06-01')),
PARTITION p201906 VALUES [('2019-06-01'), ('2019-07-01')),
PARTITION p201907 VALUES [('2019-07-01'), ('2019-08-01')),
PARTITION p201908 VALUES [('2019-08-01'), ('2019-09-01')),
PARTITION p201909 VALUES [('2019-09-01'), ('2019-10-01')),
PARTITION p201910 VALUES [('2019-10-01'), ('2019-11-01')),
PARTITION p201911 VALUES [('2019-11-01'), ('2019-12-01')),
PARTITION p201912 VALUES [('2019-12-01'), ('2020-01-01')),
PARTITION p202001 VALUES [('2020-01-01'), ('2020-02-01')),
PARTITION p202002 VALUES [('2020-02-01'), ('2020-03-01')),
PARTITION p202003 VALUES [('2020-03-01'), ('2020-04-01')),
PARTITION p202004 VALUES [('2020-04-01'), ('2020-05-01')),
PARTITION p202005 VALUES [('2020-05-01'), ('2020-06-01')),
PARTITION p202006 VALUES [('2020-06-01'), ('2020-07-01')),
PARTITION p202007 VALUES [('2020-07-01'), ('2020-08-01')),
PARTITION p202008 VALUES [('2020-08-01'), ('2020-09-01')),
PARTITION p202009 VALUES [('2020-09-01'), ('2020-10-01')),
PARTITION p202010 VALUES [('2020-10-01'), ('2020-11-01')),
PARTITION p202011 VALUES [('2020-11-01'), ('2020-12-01')),
PARTITION p202012 VALUES [('2020-12-01'), ('2021-01-01')),
PARTITION p202101 VALUES [('2021-01-01'), ('2021-02-01')),
PARTITION p202102 VALUES [('2021-02-01'), ('2021-03-01')),
PARTITION p202103 VALUES [('2021-03-01'), ('2021-04-01')),
PARTITION p202104 VALUES [('2021-04-01'), ('2021-05-01')),
PARTITION p202105 VALUES [('2021-05-01'), ('2021-06-01')),
PARTITION p202106 VALUES [('2021-06-01'), ('2021-07-01')),
PARTITION p202107 VALUES [('2021-07-01'), ('2021-08-01')),
PARTITION p202108 VALUES [('2021-08-01'), ('2021-09-01')),
PARTITION p202109 VALUES [('2021-09-01'), ('2021-10-01')),
PARTITION p202110 VALUES [('2021-10-01'), ('2021-11-01')),
PARTITION p202111 VALUES [('2021-11-01'), ('2021-12-01')),
PARTITION p202112 VALUES [('2021-12-01'), ('2022-01-01')),
PARTITION p202201 VALUES [('2022-01-01'), ('2022-02-01')),
PARTITION p202202 VALUES [('2022-02-01'), ('2022-03-01')),
PARTITION p202203 VALUES [('2022-03-01'), ('2022-04-01')),
PARTITION p202204 VALUES [('2022-04-01'), ('2022-05-01')),
PARTITION p202205 VALUES [('2022-05-01'), ('2022-06-01')),
PARTITION p202206 VALUES [('2022-06-01'), ('2022-07-01')),
PARTITION p202207 VALUES [('2022-07-01'), ('2022-08-01')),
PARTITION p202208 VALUES [('2022-08-01'), ('2022-09-01')),
PARTITION p202209 VALUES [('2022-09-01'), ('2022-10-01')))
DISTRIBUTED BY HASH(`game`, `plat`) BUCKETS 4
PROPERTIES (
"replication_allocation" = "tag.location.default: 1",
"dynamic_partition.enable" = "true",
"dynamic_partition.time_unit" = "MONTH",
"dynamic_partition.time_zone" = "Asia/Shanghai",
"dynamic_partition.start" = "-2147483648",
"dynamic_partition.end" = "3",
"dynamic_partition.prefix" = "p",
"dynamic_partition.replication_allocation" = "tag.location.default: 1",
"dynamic_partition.buckets" = "4",
"dynamic_partition.create_history_partition" = "true",
"dynamic_partition.history_partition_num" = "50",
"dynamic_partition.hot_partition_num" = "2",
"dynamic_partition.reserved_history_periods" = "NULL",
"dynamic_partition.start_day_of_month" = "1",
"in_memory" = "false",
"storage_format" = "V2");
"""
sql """
insert into ods_logout(day, game, plat, playerid, dt) values('2019-07-05', 'abc', 'xyz', '1136638398824557', '2019-07-05 00:00:00');
"""
sql """
insert into dwd_online_detail(game_code, plat_code, playerid, account, org_sid, ct_sid, login_time, logout_time, pid_code,gid_code)
values('abc', 'xyz', '1577946288488507', '1492704224', '421001', '421001', '2020-01-19 11:15:21', '9999-12-30 00:00:00', '-', '-');
"""
qt_select """
SELECT online_detail.game_code,online_detail.plat_code,online_detail.playerid,online_detail.account,online_detail.org_sid , online_detail.ct_sid ,
online_detail.login_time,if(online_detail.logout_time='9999-12-30 00:00:00',coalesce(logout.dt,online_detail.next_login_time),online_detail.logout_time) logout_time ,online_detail.next_login_time,online_detail.userid
,online_detail.pid_code,online_detail.gid_code
from
(select
tmp.game_code,tmp.plat_code,tmp.playerid,tmp.account,tmp.org_sid,tmp.ct_sid,tmp.login_time,tmp.logout_time,
LEAD(tmp.login_time,1, '9999-12-30 00:00:00') over (partition by tmp.game_code,tmp.plat_code,tmp.playerid order by tmp.login_time) next_login_time,
COALESCE (mp.userid,'-') userid,COALESCE (mp.pid_code,'-') pid_code,COALESCE (mp.gid_code,'-') gid_code
from
(select * from dim_account_userid_mapping
where start_time < convert_tz(date_add('2019-07-05 00:00:00',INTERVAL 1 day),'Asia/Shanghai','Asia/Shanghai')
and end_time >= convert_tz('2019-07-05 00:00:00','Asia/Shanghai','Asia/Shanghai')
and game_code ='abc' and plat_code='xyz'
) mp
right join
(
select *,concat_ws('_',pid_code,gid_code,account) userkey from
(select game_code,plat_code,playerid,account,org_sid,ct_sid,login_time,logout_time,pid_code,gid_code
from dwd_online_detail where logout_time='9999-12-30 00:00:00' and game_code='abc' and plat_code ='xyz'
union all
select game game_code,plat plat_code,playerid,account,sid org_sid,cast(p08 as int) ct_sid,dt login_time,'9999-12-30 00:00:00' logout_time,pid pid_code,gid gid_code
from ods_login
where game='abc' and `plat` = 'xyz'
AND dt BETWEEN convert_tz('2019-07-05 00:00:00','Asia/Shanghai','Asia/Shanghai')
and convert_tz('2019-07-05 23:59:59','Asia/Shanghai','Asia/Shanghai')
and day BETWEEN date_sub('2019-07-05',INTERVAL 1 DAY ) and date_add('2019-07-05',INTERVAL 1 DAY )
group by 1,2,3,4,5,6,7,8,9,10
) t
) tmp
on mp.game_code=tmp.game_code and mp.plat_code = tmp.plat_code and mp.userkey = tmp.userkey
and tmp.login_time >= mp.start_time and tmp.login_time < mp.end_time
) online_detail
left JOIN
(select day,game game_code,plat plat_code,playerid, dt
from ods_logout dlt
where game='abc' and `plat` = 'xyz'
and dt BETWEEN convert_tz('2019-07-05 00:00:00','Asia/Shanghai','Asia/Shanghai')
and convert_tz('2019-07-05 23:59:59','Asia/Shanghai','Asia/Shanghai')
and day BETWEEN date_sub('2019-07-05',INTERVAL 1 DAY ) and date_add('2019-07-05',INTERVAL 1 DAY )
group by 1,2,3,4,5
) logout
on online_detail.game_code=logout.game_code and online_detail.plat_code=logout.plat_code
and online_detail.playerid=logout.playerid
and logout.dt>online_detail.login_time and logout.dt < online_detail.next_login_time
union all
select game_code,plat_code,playerid,account,org_sid,ct_sid,login_time,logout_time,next_login_time,userid,pid_code,gid_code
from dwd_online_detail
where logout_time BETWEEN convert_tz('2019-07-05 00:00:00','Asia/Shanghai','Asia/Shanghai')
and convert_tz('2019-07-05 23:59:59','Asia/Shanghai','Asia/Shanghai')
and not (game_code='abc' and `plat_code` = 'xyz' );
"""
}

View File

@ -0,0 +1,21 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
suite("test_select_constant") {
qt_select1 'select 100, "test", date("2021-01-02");'
qt_select_geo1 'SELECT ST_AsText(ST_Point(123.12345678901234567890,89.1234567890));'
}

View File

@ -0,0 +1,158 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
suite("test_select_stddev_variance_window") {
def tableName = "stddev_variance_window"
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
CREATE TABLE `${tableName}` (
`k1` tinyint(4) NULL COMMENT "",
`k2` smallint(6) NULL COMMENT "",
`k3` int(11) NULL COMMENT "",
`k4` bigint(20) NULL COMMENT "",
`k5` decimal(9, 3) NULL COMMENT "",
`k6` char(5) NULL COMMENT "",
`k10` date NULL COMMENT "",
`k11` datetime NULL COMMENT "",
`k7` varchar(20) NULL COMMENT "",
`k8` double NULL COMMENT "",
`k9` float NULL COMMENT ""
) ENGINE=OLAP
DUPLICATE KEY(`k1`, `k2`, `k3`, `k4`, `k5`, `k6`, `k10`, `k11`, `k7`)
COMMENT "OLAP"
DISTRIBUTED BY HASH(`k1`) BUCKETS 5
PROPERTIES (
"replication_allocation" = "tag.location.default: 1",
"in_memory" = "false",
"storage_format" = "V2"
);
"""
streamLoad {
table tableName
// default label is UUID:
// set 'label' UUID.randomUUID().toString()
// default column_separator is specify in doris fe config, usually is '\t'.
// this line change to ','
set 'column_separator', '\t'
// relate to ${DORIS_HOME}/regression-test/data/demo/streamload_input.csv.
// also, you can stream load a http stream, e.g. http://xxx/some.csv
file 'test_stddev_variance_window.csv'
time 10000 // limit inflight 10s
// stream load action will check result, include Success status, and NumberTotalRows == NumberLoadedRows
// if declared a check callback, the default check condition will ignore.
// So you must check all condition
check { result, exception, startTime, endTime ->
if (exception != null) {
throw exception
}
log.info("Stream load result: ${result}".toString())
def json = parseJson(result)
assertEquals("success", json.Status.toLowerCase())
assertEquals(json.NumberTotalRows, json.NumberLoadedRows)
assertTrue(json.NumberLoadedRows > 0 && json.LoadBytes > 0)
}
}
sql "sync"
// Not Vectorized
sql """ set enable_vectorized_engine = false """
qt_select_default "select k1, stddev_pop(k2) over (partition by k6 order by k1 rows between 3 preceding and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_pop(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 preceding) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_pop(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 following) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_pop(k2) over (partition by k6 order by k1 rows between current row and current row) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_pop(k2) over (partition by k6 order by k1 rows between current row and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_pop(k2) over (partition by k6 order by k1) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_samp(k2) over (partition by k6 order by k1 rows between 3 preceding and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_samp(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 preceding) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_samp(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 following) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_samp(k2) over (partition by k6 order by k1 rows between current row and current row) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_samp(k2) over (partition by k6 order by k1 rows between current row and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_samp(k2) over (partition by k6 order by k1) from ${tableName} order by k1;"
qt_select_default "select k1, variance_pop(k2) over (partition by k6 order by k1 rows between 3 preceding and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, variance_pop(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 preceding) from ${tableName} order by k1;"
qt_select_default "select k1, variance_pop(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 following) from ${tableName} order by k1;"
qt_select_default "select k1, variance_pop(k2) over (partition by k6 order by k1 rows between current row and current row) from ${tableName} order by k1;"
qt_select_default "select k1, variance_pop(k2) over (partition by k6 order by k1 rows between current row and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, variance_pop(k2) over (partition by k6 order by k1) from ${tableName} order by k1;"
qt_select_default "select k1, variance_samp(k2) over (partition by k6 order by k1 rows between 3 preceding and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, variance_samp(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 preceding) from ${tableName} order by k1;"
qt_select_default "select k1, variance_samp(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 following) from ${tableName} order by k1;"
qt_select_default "select k1, variance_samp(k2) over (partition by k6 order by k1 rows between current row and current row) from ${tableName} order by k1;"
qt_select_default "select k1, variance_samp(k2) over (partition by k6 order by k1 rows between current row and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, variance_samp(k2) over (partition by k6 order by k1) from ${tableName} order by k1;"
// vectorized
sql """ set enable_vectorized_engine = true """
qt_select_default "select k1, stddev_pop(k2) over (partition by k6 order by k1 rows between 3 preceding and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_pop(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 preceding) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_pop(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 following) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_pop(k2) over (partition by k6 order by k1 rows between current row and current row) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_pop(k2) over (partition by k6 order by k1 rows between current row and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_pop(k2) over (partition by k6 order by k1) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_samp(k2) over (partition by k6 order by k1 rows between 3 preceding and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_samp(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 preceding) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_samp(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 following) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_samp(k2) over (partition by k6 order by k1 rows between current row and current row) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_samp(k2) over (partition by k6 order by k1 rows between current row and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, stddev_samp(k2) over (partition by k6 order by k1) from ${tableName} order by k1;"
qt_select_default "select k1, variance_pop(k2) over (partition by k6 order by k1 rows between 3 preceding and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, variance_pop(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 preceding) from ${tableName} order by k1;"
qt_select_default "select k1, variance_pop(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 following) from ${tableName} order by k1;"
qt_select_default "select k1, variance_pop(k2) over (partition by k6 order by k1 rows between current row and current row) from ${tableName} order by k1;"
qt_select_default "select k1, variance_pop(k2) over (partition by k6 order by k1 rows between current row and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, variance_pop(k2) over (partition by k6 order by k1) from ${tableName} order by k1;"
qt_select_default "select k1, variance_samp(k2) over (partition by k6 order by k1 rows between 3 preceding and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, variance_samp(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 preceding) from ${tableName} order by k1;"
qt_select_default "select k1, variance_samp(k2) over (partition by k6 order by k1 rows between 3 preceding and 1 following) from ${tableName} order by k1;"
qt_select_default "select k1, variance_samp(k2) over (partition by k6 order by k1 rows between current row and current row) from ${tableName} order by k1;"
qt_select_default "select k1, variance_samp(k2) over (partition by k6 order by k1 rows between current row and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, variance_samp(k2) over (partition by k6 order by k1) from ${tableName} order by k1;"
qt_select_default "select k1, percentile(k2,0.8) over (partition by k6 order by k1 rows between 3 preceding and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, percentile(k2,0.8) over (partition by k6 order by k1 rows between 3 preceding and 1 preceding) from ${tableName} order by k1;"
qt_select_default "select k1, percentile(k2,0.8) over (partition by k6 order by k1 rows between 3 preceding and 1 following) from ${tableName} order by k1;"
qt_select_default "select k1, percentile(k2,0.8) over (partition by k6 order by k1 rows between current row and current row) from ${tableName} order by k1;"
qt_select_default "select k1, percentile(k2,0.8) over (partition by k6 order by k1 rows between current row and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, percentile(k2,0.8) over (partition by k6 order by k1) from ${tableName} order by k1;"
qt_select_default "select k1, percentile_approx(k2,0.8,4096) over (partition by k6 order by k1 rows between 3 preceding and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, percentile_approx(k2,0.8,4096) over (partition by k6 order by k1 rows between 3 preceding and 1 preceding) from ${tableName} order by k1;"
qt_select_default "select k1, percentile_approx(k2,0.8,4096) over (partition by k6 order by k1 rows between 3 preceding and 1 following) from ${tableName} order by k1;"
qt_select_default "select k1, percentile_approx(k2,0.8,4096) over (partition by k6 order by k1 rows between current row and current row) from ${tableName} order by k1;"
qt_select_default "select k1, percentile_approx(k2,0.8,4096) over (partition by k6 order by k1 rows between current row and unbounded following) from ${tableName} order by k1;"
qt_select_default "select k1, percentile_approx(k2,0.8,4096) over (partition by k6 order by k1) from ${tableName} order by k1;"
}

View File

@ -0,0 +1,35 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
suite("test_select_with_predicate_like") {
def tables=["test_basic_agg"]
for (String table in tables) {
sql """drop table if exists ${table};"""
sql new File("""regression-test/common/table/${table}.sql""").text
sql new File("""regression-test/common/load/${table}.sql""").text
}
qt_select_default "select 1 from test_basic_agg where 1998 like '1%';"
qt_select_default2 "select 1 from test_basic_agg where '1998' like '1%';"
qt_select_default3 "select 1 from test_basic_agg where 2998 like '1%';"
qt_select_default4 "select 1 from test_basic_agg where '2998' like '1%';"
qt_select_default "select 1 from test_basic_agg where 199.8 like '1%';"
qt_select_default2 "select 1 from test_basic_agg where '199.8' like '1%';"
qt_select_default3 "select 1 from test_basic_agg where 299.8 like '1%';"
qt_select_default4 "select 1 from test_basic_agg where '299.8' like '1%';"
}

View File

@ -0,0 +1,2 @@
-- database: presto; groups: qe, conversion_functions
SELECT CAST(10 as VARCHAR)

View File

@ -0,0 +1,5 @@
/*
-- database: presto; groups: qe, horology_functions
SELECT timezone_hour(TIMESTAMP '2001-08-22 03:04:05.321' at time zone 'Asia/Oral'),
timezone_minute(TIMESTAMP '2001-08-22 03:04:05.321' at time zone 'Asia/Oral')
*/

View File

@ -0,0 +1,7 @@
-- database: presto; groups: qe, horology_functions
SELECT date '2012-08-08' + interval '2' day,
timestamp '2012-08-08 01:00' + interval '29' hour,
timestamp '2012-10-31 01:00' + interval '1' month,
date '2012-08-08' - interval '2' day,
timestamp '2012-08-08 01:00' - interval '29' hour,
timestamp '2012-10-31 01:00' - interval '1' month

View File

@ -0,0 +1,2 @@
-- database: presto; groups: qe, horology_functions
SELECT extract(day from TIMESTAMP '2001-08-22 03:04:05.321')

View File

@ -105,7 +105,7 @@ suite("test_materialized_view") {
}
}
sql "SELECT store_id, count(sale_amt) FROM ${tbName1} GROUP BY store_id;"
qt_sql "DESC ${tbName1} ALL;"
order_qt_sql "DESC ${tbName1} ALL;"
qt_sql "SELECT store_id, count(sale_amt) FROM ${tbName1} GROUP BY store_id;"
@ -116,5 +116,4 @@ suite("test_materialized_view") {
sql "DROP TABLE ${tbName1} FORCE;"
sql "DROP TABLE ${tbName2} FORCE;"
}