[case][fix](iceberg)move rest cases from p2 to p0 and fix iceberg version issue for 2.1 (#37898) (#38589)
bp: #37898
This commit is contained in:
@ -26,6 +26,7 @@ services:
|
||||
depends_on:
|
||||
- rest
|
||||
- minio
|
||||
- mc
|
||||
volumes:
|
||||
- ./data/output/spark-warehouse:/home/iceberg/warehouse
|
||||
- ./data/output/spark-notebooks:/home/iceberg/notebooks/notebooks
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
create database if not exists demo.test_db;
|
||||
use demo.test_db;
|
||||
|
||||
drop table if exists demo.test_db.location_s3a_table;
|
||||
create table demo.test_db.location_s3a_table (
|
||||
id int,
|
||||
@ -25,5 +27,26 @@ tblproperties (
|
||||
insert into demo.test_db.location_s3_table values (1,'a');
|
||||
update demo.test_db.location_s3_table set val='b' where id=1;
|
||||
|
||||
drop table if exists demo.test_db.tb_ts_ntz_filter;
|
||||
create table demo.test_db.tb_ts_ntz_filter (ts timestamp_ntz) using iceberg;
|
||||
insert into demo.test_db.tb_ts_ntz_filter values (timestamp_ntz '2024-06-11 12:34:56.123456');
|
||||
insert into demo.test_db.tb_ts_ntz_filter values (timestamp_ntz '2024-06-11 12:34:56.123456');
|
||||
|
||||
drop table if exists iceberg_upper_case_parquet;
|
||||
CREATE TABLE iceberg_upper_case_parquet (
|
||||
`ID` BIGINT,
|
||||
`NAME` STRING)
|
||||
USING iceberg
|
||||
TBLPROPERTIES(
|
||||
'write.format.default' = 'parquet');
|
||||
|
||||
insert into iceberg_upper_case_parquet values (1, 'name');
|
||||
|
||||
drop table if exists iceberg_upper_case_orc;
|
||||
CREATE TABLE iceberg_upper_case_orc (
|
||||
`ID` BIGINT,
|
||||
`NAME` STRING)
|
||||
USING iceberg
|
||||
TBLPROPERTIES(
|
||||
'write.format.default' = 'orc');
|
||||
|
||||
insert into iceberg_upper_case_orc values (1, 'name');
|
||||
|
||||
@ -74,7 +74,6 @@ import org.apache.iceberg.util.SnapshotUtil;
|
||||
import org.apache.iceberg.util.TableScanUtil;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
@ -299,11 +298,11 @@ public class IcebergScanNode extends FileQueryScanNode {
|
||||
List<IcebergDeleteFileFilter> filters = new ArrayList<>();
|
||||
for (DeleteFile delete : spitTask.deletes()) {
|
||||
if (delete.content() == FileContent.POSITION_DELETES) {
|
||||
ByteBuffer lowerBoundBytes = delete.lowerBounds().get(MetadataColumns.DELETE_FILE_POS.fieldId());
|
||||
Optional<Long> positionLowerBound = Optional.ofNullable(lowerBoundBytes)
|
||||
Optional<Long> positionLowerBound = Optional.ofNullable(delete.lowerBounds())
|
||||
.map(m -> m.get(MetadataColumns.DELETE_FILE_POS.fieldId()))
|
||||
.map(bytes -> Conversions.fromByteBuffer(MetadataColumns.DELETE_FILE_POS.type(), bytes));
|
||||
ByteBuffer upperBoundBytes = delete.upperBounds().get(MetadataColumns.DELETE_FILE_POS.fieldId());
|
||||
Optional<Long> positionUpperBound = Optional.ofNullable(upperBoundBytes)
|
||||
Optional<Long> positionUpperBound = Optional.ofNullable(delete.upperBounds())
|
||||
.map(m -> m.get(MetadataColumns.DELETE_FILE_POS.fieldId()))
|
||||
.map(bytes -> Conversions.fromByteBuffer(MetadataColumns.DELETE_FILE_POS.type(), bytes));
|
||||
filters.add(IcebergDeleteFileFilter.createPositionDelete(delete.path().toString(),
|
||||
positionLowerBound.orElse(-1L), positionUpperBound.orElse(-1L)));
|
||||
|
||||
@ -0,0 +1,10 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !q01 --
|
||||
1 1970-01-03T09:02:03.000001 a
|
||||
1 1970-01-03T09:02:03.000001 b
|
||||
2 1970-01-03T09:02:04.000001 c
|
||||
2 1970-01-03T09:02:04.000001 d
|
||||
|
||||
-- !q02 --
|
||||
463870
|
||||
|
||||
@ -0,0 +1,41 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !one_delete_column --
|
||||
1 b
|
||||
2 c
|
||||
|
||||
-- !one_delete_column_orc --
|
||||
1 b
|
||||
2 c
|
||||
|
||||
-- !count1 --
|
||||
2
|
||||
|
||||
-- !count1_orc --
|
||||
2
|
||||
|
||||
-- !max1 --
|
||||
c
|
||||
|
||||
-- !max1_orc --
|
||||
c
|
||||
|
||||
-- !one_delete_column --
|
||||
1 id2 1 b
|
||||
2 id2 1 c
|
||||
|
||||
-- !one_delete_column_orc --
|
||||
1 id2 1 b
|
||||
2 id2 1 c
|
||||
|
||||
-- !count3 --
|
||||
2
|
||||
|
||||
-- !count3_orc --
|
||||
2
|
||||
|
||||
-- !max3 --
|
||||
c
|
||||
|
||||
-- !max3_orc --
|
||||
c
|
||||
|
||||
@ -0,0 +1,46 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !q1 --
|
||||
5632
|
||||
|
||||
-- !q2 --
|
||||
30
|
||||
|
||||
-- !q3 --
|
||||
2 select xxxxxxxxx
|
||||
2 select xxxxxxxxx
|
||||
2 select xxxxxxxxx
|
||||
2 select xxxxxxxxx
|
||||
2 select xxxxxxxxx
|
||||
2 select xxxxxxxxx
|
||||
2 select xxxxxxxxx
|
||||
2 select xxxxxxxxx
|
||||
2 select xxxxxxxxx
|
||||
2 select xxxxxxxxx
|
||||
|
||||
-- !q4 --
|
||||
1 hello world
|
||||
1 hello world
|
||||
2 select xxxxxxxxx
|
||||
2 select xxxxxxxxx
|
||||
3 example xxxx
|
||||
3 example xxxx
|
||||
4 more dataxxx
|
||||
4 more dataxxx
|
||||
5 another examplexxx
|
||||
5 another examplexxx
|
||||
|
||||
-- !q5 --
|
||||
480
|
||||
|
||||
-- !q6 --
|
||||
1 hello world
|
||||
1 hello world
|
||||
1 hello world
|
||||
1 hello world
|
||||
1 hello world
|
||||
1 hello world
|
||||
1 hello world
|
||||
1 hello world
|
||||
1 hello world
|
||||
1 hello world
|
||||
|
||||
@ -0,0 +1,45 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !icebergParquet1 --
|
||||
1 name
|
||||
|
||||
-- !icebergParquet2 --
|
||||
1 name
|
||||
|
||||
-- !icebergParquet3 --
|
||||
|
||||
-- !icebergParquet4 --
|
||||
1 name
|
||||
|
||||
-- !icebergParquet5 --
|
||||
|
||||
-- !icebergParquet6 --
|
||||
1
|
||||
|
||||
-- !icebergParquet7 --
|
||||
name
|
||||
|
||||
-- !icebergParquet8 --
|
||||
1 name
|
||||
|
||||
-- !icebergOrc1 --
|
||||
1 name
|
||||
|
||||
-- !icebergOrc2 --
|
||||
1 name
|
||||
|
||||
-- !icebergOrc3 --
|
||||
|
||||
-- !icebergOrc4 --
|
||||
1 name
|
||||
|
||||
-- !icebergOrc5 --
|
||||
|
||||
-- !icebergOrc6 --
|
||||
1
|
||||
|
||||
-- !icebergOrc7 --
|
||||
name
|
||||
|
||||
-- !icebergOrc8 --
|
||||
1 name
|
||||
|
||||
@ -43,47 +43,3 @@ name
|
||||
-- !hiveOrc8 --
|
||||
1 name
|
||||
|
||||
-- !icebergParquet1 --
|
||||
1 name
|
||||
|
||||
-- !icebergParquet2 --
|
||||
1 name
|
||||
|
||||
-- !icebergParquet3 --
|
||||
|
||||
-- !icebergParquet4 --
|
||||
1 name
|
||||
|
||||
-- !icebergParquet5 --
|
||||
|
||||
-- !icebergParquet6 --
|
||||
1
|
||||
|
||||
-- !icebergParquet7 --
|
||||
name
|
||||
|
||||
-- !icebergParquet8 --
|
||||
1 name
|
||||
|
||||
-- !icebergOrc1 --
|
||||
1 name
|
||||
|
||||
-- !icebergOrc2 --
|
||||
1 name
|
||||
|
||||
-- !icebergOrc3 --
|
||||
|
||||
-- !icebergOrc4 --
|
||||
1 name
|
||||
|
||||
-- !icebergOrc5 --
|
||||
|
||||
-- !icebergOrc6 --
|
||||
1
|
||||
|
||||
-- !icebergOrc7 --
|
||||
name
|
||||
|
||||
-- !icebergOrc8 --
|
||||
1 name
|
||||
|
||||
|
||||
@ -1,109 +0,0 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !one_delete_column --
|
||||
1 Customer#000000001 IVhzIApeRb ot,c,E 151 update-phone-1 711.56 BUILDING update-comment-1
|
||||
2 Customer#000000002 XSTf4,NCwDVaWNe6tEgvwfmRchLXak 13 23-768-687-3665 121.65 AUTOMOBILE l accounts. blithely ironic theodolites integrate boldly: caref
|
||||
3 Customer#000000003 MG9kdTD2WBHm 1 11-719-748-3364 7498.12 AUTOMOBILE deposits eat slyly ironic, even instructions. express foxes detect slyly. blithely even accounts abov
|
||||
4 Customer#000000004 XxVSJsLAGtn 47 update-phone-4 15.39 MACHINERY update-comment-2
|
||||
6 Customer#000000006 sKZz0CsnMD7mp4Xd0YrBvx,LREYKUWAh yVn 20 30-114-968-4951 7638.57 AUTOMOBILE tions. even deposits boost according to the slyly bold packages. final accounts cajole requests. furious
|
||||
7 Customer#000000007 TcGe5gaZNgVePxU5kRrvXBfkasDTea 18 28-190-982-9759 9561.95 AUTOMOBILE ainst the ironic, express theodolites. express, even pinto beans among the exp
|
||||
8 Customer#000000008 I0B10bB0AymmC, 0PrRYBCP1yGJ8xcBPmWhl5 17 27-147-574-9335 6819.74 BUILDING among the slyly regular theodolites kindle blithely courts. carefully even theodolites haggle slyly along the ide
|
||||
9 Customer#000000009 xKiAFTjUsCuxfeleNqefumTrjS 8 18-338-906-3675 8324.07 FURNITURE r theodolites according to the requests wake thinly excuses: pending requests haggle furiousl
|
||||
10 Customer#000000010 6LrEaV6KR6PLVcgl2ArL Q3rqzLzcT1 v2 5 15-741-346-9870 2753.54 HOUSEHOLD es regular deposits haggle. fur
|
||||
11 Customer#000000011 PkWS 3HlXqwTuzrKg633BEi 23 33-464-151-3439 -272.60 BUILDING ckages. requests sleep slyly. quickly even pinto beans promise above the slyly regular pinto beans.
|
||||
12 Customer#000000012 9PWKuhzT4Zr1Q 13 23-791-276-1263 3396.49 HOUSEHOLD to the carefully final braids. blithely regular requests nag. ironic theodolites boost quickly along
|
||||
13 Customer#000000013 nsXQu0oVjD7PM659uC3SRSp 3 13-761-547-5974 3857.34 BUILDING ounts sleep carefully after the close frays. carefully bold notornis use ironic requests. blithely
|
||||
14 Customer#000000014 KXkletMlL2JQEA 1 11-845-129-3851 5266.30 FURNITURE , ironic packages across the unus
|
||||
15 Customer#000000015 YtWggXoOLdwdo7b0y,BZaGUQMLJMX1Y,EC,6Dn 23 33-687-542-7601 2788.52 HOUSEHOLD platelets. regular deposits detect asymptotes. blithely unusual packages nag slyly at the fluf
|
||||
16 Customer#000000016 cYiaeMLZSMAOQ2 d0W, 10 20-781-609-3107 4681.03 FURNITURE kly silent courts. thinly regular theodolites sleep fluffily after
|
||||
17 Customer#000000017 izrh 6jdqtp2eqdtbkswDD8SG4SzXruMfIXyR7 2 12-970-682-3487 6.34 AUTOMOBILE packages wake! blithely even pint
|
||||
18 Customer#000000018 3txGO AiuFux3zT0Z9NYaFRnZt 6 16-155-215-1315 5494.43 BUILDING s sleep. carefully even instructions nag furiously alongside of t
|
||||
19 Customer#000000019 uc,3bHIx84H,wdrmLOjVsiqXCq2tr 18 28-396-526-5053 8914.71 HOUSEHOLD nag. furiously careful packages are slyly at the accounts. furiously regular in
|
||||
20 Customer#000000020 JrPk8Pqplj4Ne 22 32-957-234-8742 7603.40 FURNITURE g alongside of the special excuses-- fluffily enticing packages wake
|
||||
|
||||
-- !one_delete_column_orc --
|
||||
1 Customer#000000001 IVhzIApeRb ot,c,E 151 update-phone-1 711.56 BUILDING update-comment-1
|
||||
2 Customer#000000002 XSTf4,NCwDVaWNe6tEgvwfmRchLXak 13 23-768-687-3665 121.65 AUTOMOBILE l accounts. blithely ironic theodolites integrate boldly: caref
|
||||
3 Customer#000000003 MG9kdTD2WBHm 1 11-719-748-3364 7498.12 AUTOMOBILE deposits eat slyly ironic, even instructions. express foxes detect slyly. blithely even accounts abov
|
||||
4 Customer#000000004 XxVSJsLAGtn 47 update-phone-4 15.39 MACHINERY update-comment-2
|
||||
6 Customer#000000006 sKZz0CsnMD7mp4Xd0YrBvx,LREYKUWAh yVn 20 30-114-968-4951 7638.57 AUTOMOBILE tions. even deposits boost according to the slyly bold packages. final accounts cajole requests. furious
|
||||
7 Customer#000000007 TcGe5gaZNgVePxU5kRrvXBfkasDTea 18 28-190-982-9759 9561.95 AUTOMOBILE ainst the ironic, express theodolites. express, even pinto beans among the exp
|
||||
8 Customer#000000008 I0B10bB0AymmC, 0PrRYBCP1yGJ8xcBPmWhl5 17 27-147-574-9335 6819.74 BUILDING among the slyly regular theodolites kindle blithely courts. carefully even theodolites haggle slyly along the ide
|
||||
9 Customer#000000009 xKiAFTjUsCuxfeleNqefumTrjS 8 18-338-906-3675 8324.07 FURNITURE r theodolites according to the requests wake thinly excuses: pending requests haggle furiousl
|
||||
10 Customer#000000010 6LrEaV6KR6PLVcgl2ArL Q3rqzLzcT1 v2 5 15-741-346-9870 2753.54 HOUSEHOLD es regular deposits haggle. fur
|
||||
11 Customer#000000011 PkWS 3HlXqwTuzrKg633BEi 23 33-464-151-3439 -272.60 BUILDING ckages. requests sleep slyly. quickly even pinto beans promise above the slyly regular pinto beans.
|
||||
12 Customer#000000012 9PWKuhzT4Zr1Q 13 23-791-276-1263 3396.49 HOUSEHOLD to the carefully final braids. blithely regular requests nag. ironic theodolites boost quickly along
|
||||
13 Customer#000000013 nsXQu0oVjD7PM659uC3SRSp 3 13-761-547-5974 3857.34 BUILDING ounts sleep carefully after the close frays. carefully bold notornis use ironic requests. blithely
|
||||
14 Customer#000000014 KXkletMlL2JQEA 1 11-845-129-3851 5266.30 FURNITURE , ironic packages across the unus
|
||||
15 Customer#000000015 YtWggXoOLdwdo7b0y,BZaGUQMLJMX1Y,EC,6Dn 23 33-687-542-7601 2788.52 HOUSEHOLD platelets. regular deposits detect asymptotes. blithely unusual packages nag slyly at the fluf
|
||||
16 Customer#000000016 cYiaeMLZSMAOQ2 d0W, 10 20-781-609-3107 4681.03 FURNITURE kly silent courts. thinly regular theodolites sleep fluffily after
|
||||
17 Customer#000000017 izrh 6jdqtp2eqdtbkswDD8SG4SzXruMfIXyR7 2 12-970-682-3487 6.34 AUTOMOBILE packages wake! blithely even pint
|
||||
18 Customer#000000018 3txGO AiuFux3zT0Z9NYaFRnZt 6 16-155-215-1315 5494.43 BUILDING s sleep. carefully even instructions nag furiously alongside of t
|
||||
19 Customer#000000019 uc,3bHIx84H,wdrmLOjVsiqXCq2tr 18 28-396-526-5053 8914.71 HOUSEHOLD nag. furiously careful packages are slyly at the accounts. furiously regular in
|
||||
20 Customer#000000020 JrPk8Pqplj4Ne 22 32-957-234-8742 7603.40 FURNITURE g alongside of the special excuses-- fluffily enticing packages wake
|
||||
|
||||
-- !count1 --
|
||||
19
|
||||
|
||||
-- !count1_orc --
|
||||
19
|
||||
|
||||
-- !max1 --
|
||||
update-comment-2
|
||||
|
||||
-- !max1_orc --
|
||||
update-comment-2
|
||||
|
||||
-- !one_delete_column --
|
||||
1 Customer#000000001 IVhzIApeRb ot,c,E 151 update-phone-1 711.56 BUILDING update-comment-1
|
||||
2 Customer#000000002 XSTf4,NCwDVaWNe6tEgvwfmRchLXak 13 23-768-687-3665 121.65 AUTOMOBILE l accounts. blithely ironic theodolites integrate boldly: caref
|
||||
3 Customer#000000003 MG9kdTD2WBHm 1 11-719-748-3364 7498.12 AUTOMOBILE deposits eat slyly ironic, even instructions. express foxes detect slyly. blithely even accounts abov
|
||||
4 Customer#000000004 XxVSJsLAGtn 47 update-phone-4 15.39 MACHINERY update-comment-2
|
||||
6 Customer#000000006 sKZz0CsnMD7mp4Xd0YrBvx,LREYKUWAh yVn 20 30-114-968-4951 7638.57 AUTOMOBILE tions. even deposits boost according to the slyly bold packages. final accounts cajole requests. furious
|
||||
7 Customer#000000007 TcGe5gaZNgVePxU5kRrvXBfkasDTea 18 28-190-982-9759 9561.95 AUTOMOBILE ainst the ironic, express theodolites. express, even pinto beans among the exp
|
||||
8 Customer#000000008 I0B10bB0AymmC, 0PrRYBCP1yGJ8xcBPmWhl5 17 27-147-574-9335 6819.74 BUILDING among the slyly regular theodolites kindle blithely courts. carefully even theodolites haggle slyly along the ide
|
||||
9 Customer#000000009 xKiAFTjUsCuxfeleNqefumTrjS 8 18-338-906-3675 8324.07 FURNITURE r theodolites according to the requests wake thinly excuses: pending requests haggle furiousl
|
||||
10 Customer#000000010 6LrEaV6KR6PLVcgl2ArL Q3rqzLzcT1 v2 5 15-741-346-9870 2753.54 HOUSEHOLD es regular deposits haggle. fur
|
||||
11 Customer#000000011 PkWS 3HlXqwTuzrKg633BEi 23 33-464-151-3439 -272.60 BUILDING ckages. requests sleep slyly. quickly even pinto beans promise above the slyly regular pinto beans.
|
||||
12 Customer#000000012 9PWKuhzT4Zr1Q 13 23-791-276-1263 3396.49 HOUSEHOLD to the carefully final braids. blithely regular requests nag. ironic theodolites boost quickly along
|
||||
13 Customer#000000013 nsXQu0oVjD7PM659uC3SRSp 3 13-761-547-5974 3857.34 BUILDING ounts sleep carefully after the close frays. carefully bold notornis use ironic requests. blithely
|
||||
14 Customer#000000014 KXkletMlL2JQEA 1 11-845-129-3851 5266.30 FURNITURE , ironic packages across the unus
|
||||
15 Customer#000000015 YtWggXoOLdwdo7b0y,BZaGUQMLJMX1Y,EC,6Dn 23 33-687-542-7601 2788.52 HOUSEHOLD platelets. regular deposits detect asymptotes. blithely unusual packages nag slyly at the fluf
|
||||
16 Customer#000000016 cYiaeMLZSMAOQ2 d0W, 10 20-781-609-3107 4681.03 FURNITURE kly silent courts. thinly regular theodolites sleep fluffily after
|
||||
17 Customer#000000017 izrh 6jdqtp2eqdtbkswDD8SG4SzXruMfIXyR7 2 12-970-682-3487 6.34 AUTOMOBILE packages wake! blithely even pint
|
||||
18 Customer#000000018 3txGO AiuFux3zT0Z9NYaFRnZt 6 16-155-215-1315 5494.43 BUILDING s sleep. carefully even instructions nag furiously alongside of t
|
||||
19 Customer#000000019 uc,3bHIx84H,wdrmLOjVsiqXCq2tr 18 28-396-526-5053 8914.71 HOUSEHOLD nag. furiously careful packages are slyly at the accounts. furiously regular in
|
||||
20 Customer#000000020 JrPk8Pqplj4Ne 22 32-957-234-8742 7603.40 FURNITURE g alongside of the special excuses-- fluffily enticing packages wake
|
||||
|
||||
-- !one_delete_column_orc --
|
||||
1 Customer#000000001 IVhzIApeRb ot,c,E 151 update-phone-1 711.56 BUILDING update-comment-1
|
||||
2 Customer#000000002 XSTf4,NCwDVaWNe6tEgvwfmRchLXak 13 23-768-687-3665 121.65 AUTOMOBILE l accounts. blithely ironic theodolites integrate boldly: caref
|
||||
3 Customer#000000003 MG9kdTD2WBHm 1 11-719-748-3364 7498.12 AUTOMOBILE deposits eat slyly ironic, even instructions. express foxes detect slyly. blithely even accounts abov
|
||||
4 Customer#000000004 XxVSJsLAGtn 47 update-phone-4 15.39 MACHINERY update-comment-2
|
||||
6 Customer#000000006 sKZz0CsnMD7mp4Xd0YrBvx,LREYKUWAh yVn 20 30-114-968-4951 7638.57 AUTOMOBILE tions. even deposits boost according to the slyly bold packages. final accounts cajole requests. furious
|
||||
7 Customer#000000007 TcGe5gaZNgVePxU5kRrvXBfkasDTea 18 28-190-982-9759 9561.95 AUTOMOBILE ainst the ironic, express theodolites. express, even pinto beans among the exp
|
||||
8 Customer#000000008 I0B10bB0AymmC, 0PrRYBCP1yGJ8xcBPmWhl5 17 27-147-574-9335 6819.74 BUILDING among the slyly regular theodolites kindle blithely courts. carefully even theodolites haggle slyly along the ide
|
||||
9 Customer#000000009 xKiAFTjUsCuxfeleNqefumTrjS 8 18-338-906-3675 8324.07 FURNITURE r theodolites according to the requests wake thinly excuses: pending requests haggle furiousl
|
||||
10 Customer#000000010 6LrEaV6KR6PLVcgl2ArL Q3rqzLzcT1 v2 5 15-741-346-9870 2753.54 HOUSEHOLD es regular deposits haggle. fur
|
||||
11 Customer#000000011 PkWS 3HlXqwTuzrKg633BEi 23 33-464-151-3439 -272.60 BUILDING ckages. requests sleep slyly. quickly even pinto beans promise above the slyly regular pinto beans.
|
||||
12 Customer#000000012 9PWKuhzT4Zr1Q 13 23-791-276-1263 3396.49 HOUSEHOLD to the carefully final braids. blithely regular requests nag. ironic theodolites boost quickly along
|
||||
13 Customer#000000013 nsXQu0oVjD7PM659uC3SRSp 3 13-761-547-5974 3857.34 BUILDING ounts sleep carefully after the close frays. carefully bold notornis use ironic requests. blithely
|
||||
14 Customer#000000014 KXkletMlL2JQEA 1 11-845-129-3851 5266.30 FURNITURE , ironic packages across the unus
|
||||
15 Customer#000000015 YtWggXoOLdwdo7b0y,BZaGUQMLJMX1Y,EC,6Dn 23 33-687-542-7601 2788.52 HOUSEHOLD platelets. regular deposits detect asymptotes. blithely unusual packages nag slyly at the fluf
|
||||
16 Customer#000000016 cYiaeMLZSMAOQ2 d0W, 10 20-781-609-3107 4681.03 FURNITURE kly silent courts. thinly regular theodolites sleep fluffily after
|
||||
17 Customer#000000017 izrh 6jdqtp2eqdtbkswDD8SG4SzXruMfIXyR7 2 12-970-682-3487 6.34 AUTOMOBILE packages wake! blithely even pint
|
||||
18 Customer#000000018 3txGO AiuFux3zT0Z9NYaFRnZt 6 16-155-215-1315 5494.43 BUILDING s sleep. carefully even instructions nag furiously alongside of t
|
||||
19 Customer#000000019 uc,3bHIx84H,wdrmLOjVsiqXCq2tr 18 28-396-526-5053 8914.71 HOUSEHOLD nag. furiously careful packages are slyly at the accounts. furiously regular in
|
||||
20 Customer#000000020 JrPk8Pqplj4Ne 22 32-957-234-8742 7603.40 FURNITURE g alongside of the special excuses-- fluffily enticing packages wake
|
||||
|
||||
-- !count3 --
|
||||
19
|
||||
|
||||
-- !count3_orc --
|
||||
19
|
||||
|
||||
-- !max3 --
|
||||
update-comment-2
|
||||
|
||||
-- !max3_orc --
|
||||
update-comment-2
|
||||
|
||||
@ -1,26 +0,0 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !q01 --
|
||||
2879562
|
||||
|
||||
-- !q02 --
|
||||
1
|
||||
3
|
||||
5
|
||||
6
|
||||
7
|
||||
8
|
||||
11
|
||||
|
||||
-- !q03 --
|
||||
1 Customer#000000001 j5JsirBM9P MOROCCO 0 MOROCCO AFRICA 25-989-741-2988 BUILDING
|
||||
3 Customer#000000003 fkRGN8n ARGENTINA7 ARGENTINA AMERICA 11-719-748-3364 AUTOMOBILE
|
||||
5 Customer#000000005 hwBtxkoBF qSW4KrI CANADA 5 CANADA AMERICA 13-750-942-6364 HOUSEHOLD
|
||||
|
||||
-- !q04 --
|
||||
1 1970-01-03 09:02:03.000001 a
|
||||
1 1970-01-03 09:02:03.000001 b
|
||||
2 1970-01-03 09:02:04.000001 c
|
||||
2 1970-01-03 09:02:04.000001 d
|
||||
|
||||
-- !q05 --
|
||||
463870
|
||||
@ -1,74 +0,0 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !q01 --
|
||||
149988
|
||||
|
||||
-- !q02 --
|
||||
1
|
||||
3
|
||||
4
|
||||
7
|
||||
|
||||
-- !q03 --
|
||||
8242263
|
||||
|
||||
-- !q04 --
|
||||
0
|
||||
|
||||
-- !q05 --
|
||||
1 Customer#000000001 IVhzIApeRb ot,c,E 15 25-989-741-2988 711.56 BUILDING to the even, regular platelets. regular, ironic epitaphs nag e
|
||||
3 Customer#000000003 MG9kdTD2WBHm 1 11-719-748-3364 7498.12 AUTOMOBILE deposits eat slyly ironic, even instructions. express foxes detect slyly. blithely even accounts abov
|
||||
4 Customer#000000004 XxVSJsLAGtn 4 14-128-190-5944 2866.83 MACHINERY requests. final, regular ideas sleep final accou
|
||||
|
||||
-- !q06 --
|
||||
604519555
|
||||
604519557
|
||||
604519558
|
||||
|
||||
-- !q07 --
|
||||
12979.65
|
||||
219204.52
|
||||
5908.20
|
||||
|
||||
-- !q08 --
|
||||
120001848
|
||||
|
||||
-- !q09 --
|
||||
1
|
||||
2
|
||||
3
|
||||
|
||||
-- !q10 --
|
||||
150000000
|
||||
149999999
|
||||
149999996
|
||||
|
||||
-- !q11 --
|
||||
1
|
||||
2
|
||||
3
|
||||
|
||||
-- !q12 --
|
||||
150000000
|
||||
149999999
|
||||
149999996
|
||||
|
||||
-- !q13 --
|
||||
1
|
||||
4
|
||||
7
|
||||
|
||||
-- !q14 --
|
||||
Customer#000000004
|
||||
Customer#000000007
|
||||
|
||||
-- !q15 --
|
||||
150000
|
||||
|
||||
-- !q16 --
|
||||
150000
|
||||
|
||||
-- !q17 --
|
||||
150000
|
||||
|
||||
-- !q18 --
|
||||
150000
|
||||
@ -32,22 +32,6 @@ suite("test_upper_case_column_name", "p0,external,hive,external_docker,external_
|
||||
def hiveOrc6 = """select id from hive_upper_case_orc where id=1;"""
|
||||
def hiveOrc7 = """select name from hive_upper_case_orc where id=1;"""
|
||||
def hiveOrc8 = """select id, name from hive_upper_case_orc where id=1;"""
|
||||
def icebergParquet1 = """select * from iceberg_upper_case_parquet;"""
|
||||
def icebergParquet2 = """select * from iceberg_upper_case_parquet where id=1;"""
|
||||
def icebergParquet3 = """select * from iceberg_upper_case_parquet where id>1;"""
|
||||
def icebergParquet4 = """select * from iceberg_upper_case_parquet where name='name';"""
|
||||
def icebergParquet5 = """select * from iceberg_upper_case_parquet where name!='name';"""
|
||||
def icebergParquet6 = """select id from iceberg_upper_case_parquet where id=1;"""
|
||||
def icebergParquet7 = """select name from iceberg_upper_case_parquet where id=1;"""
|
||||
def icebergParquet8 = """select id, name from iceberg_upper_case_parquet where id=1;"""
|
||||
def icebergOrc1 = """select * from iceberg_upper_case_orc;"""
|
||||
def icebergOrc2 = """select * from iceberg_upper_case_orc where id=1;"""
|
||||
def icebergOrc3 = """select * from iceberg_upper_case_orc where id>1;"""
|
||||
def icebergOrc4 = """select * from iceberg_upper_case_orc where name='name';"""
|
||||
def icebergOrc5 = """select * from iceberg_upper_case_orc where name!='name';"""
|
||||
def icebergOrc6 = """select id from iceberg_upper_case_orc where id=1;"""
|
||||
def icebergOrc7 = """select name from iceberg_upper_case_orc where id=1;"""
|
||||
def icebergOrc8 = """select id, name from iceberg_upper_case_orc where id=1;"""
|
||||
|
||||
String enabled = context.config.otherConfigs.get("enableHiveTest")
|
||||
if (enabled == null || !enabled.equalsIgnoreCase("true")) {
|
||||
@ -82,22 +66,6 @@ suite("test_upper_case_column_name", "p0,external,hive,external_docker,external_
|
||||
qt_hiveOrc6 hiveOrc6
|
||||
qt_hiveOrc7 hiveOrc7
|
||||
qt_hiveOrc8 hiveOrc8
|
||||
// qt_icebergParquet1 icebergParquet1
|
||||
// qt_icebergParquet2 icebergParquet2
|
||||
// qt_icebergParquet3 icebergParquet3
|
||||
// qt_icebergParquet4 icebergParquet4
|
||||
// qt_icebergParquet5 icebergParquet5
|
||||
// qt_icebergParquet6 icebergParquet6
|
||||
// qt_icebergParquet7 icebergParquet7
|
||||
// qt_icebergParquet8 icebergParquet8
|
||||
// qt_icebergOrc1 icebergOrc1
|
||||
// qt_icebergOrc2 icebergOrc2
|
||||
// qt_icebergOrc3 icebergOrc3
|
||||
// qt_icebergOrc4 icebergOrc4
|
||||
// qt_icebergOrc5 icebergOrc5
|
||||
// qt_icebergOrc6 icebergOrc6
|
||||
// qt_icebergOrc7 icebergOrc7
|
||||
// qt_icebergOrc8 icebergOrc8
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -0,0 +1,192 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
suite("test_gen_iceberg_by_api", "p0,external,doris,external_docker,external_docker_doris") {
|
||||
String enabled = context.config.otherConfigs.get("enableIcebergTest")
|
||||
if (enabled == null || !enabled.equalsIgnoreCase("true")) {
|
||||
logger.info("disable iceberg test.")
|
||||
return
|
||||
}
|
||||
|
||||
String rest_port = context.config.otherConfigs.get("iceberg_rest_uri_port")
|
||||
String minio_port = context.config.otherConfigs.get("iceberg_minio_port")
|
||||
String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
|
||||
String catalog_name = "test_gen_iceberg_by_api"
|
||||
|
||||
sql """drop catalog if exists ${catalog_name}"""
|
||||
sql """
|
||||
CREATE CATALOG ${catalog_name} PROPERTIES (
|
||||
'type'='iceberg',
|
||||
'iceberg.catalog.type'='rest',
|
||||
'uri' = 'http://${externalEnvIp}:${rest_port}',
|
||||
"s3.access_key" = "admin",
|
||||
"s3.secret_key" = "password",
|
||||
"s3.endpoint" = "http://${externalEnvIp}:${minio_port}",
|
||||
"s3.region" = "us-east-1"
|
||||
);"""
|
||||
|
||||
sql """switch ${catalog_name};"""
|
||||
sql """ use `test_db`; """
|
||||
|
||||
def q01 = {
|
||||
qt_q01 """ select * from multi_partition2 order by val """
|
||||
qt_q02 """ select count(*) from table_with_append_file where MAN_ID is not null """
|
||||
}
|
||||
|
||||
q01()
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
// 打包后,在iceberg-spark的docker中运行
|
||||
|
||||
package org.example;
|
||||
|
||||
|
||||
import org.apache.iceberg.CatalogUtil;
|
||||
import org.apache.iceberg.DataFile;
|
||||
import org.apache.iceberg.DataFiles;
|
||||
import org.apache.iceberg.FileFormat;
|
||||
import org.apache.iceberg.PartitionSpec;
|
||||
import org.apache.iceberg.Schema;
|
||||
import org.apache.iceberg.Table;
|
||||
import org.apache.iceberg.catalog.Catalog;
|
||||
import org.apache.iceberg.catalog.Namespace;
|
||||
import org.apache.iceberg.catalog.TableIdentifier;
|
||||
import org.apache.iceberg.io.InputFile;
|
||||
import org.apache.iceberg.types.Types;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
public class CreateTable {
|
||||
public static void main(String[] args) throws Exception {
|
||||
CreateTable createTable = new CreateTable();
|
||||
createTable.create();
|
||||
}
|
||||
|
||||
public void create() {
|
||||
HashMap<String, String> prop = new HashMap<>();
|
||||
prop.put("uri", "http://172.21.0.101:18181");
|
||||
prop.put("io-impl", "org.apache.iceberg.aws.s3.S3FileIO");
|
||||
prop.put("type", "rest");
|
||||
prop.put("AWS_ACCESS_KEY_ID", "admin");
|
||||
prop.put("AWS_SECRET_ACCESS_KEY", "password");
|
||||
prop.put("AWS_REGION", "us-east-1");
|
||||
prop.put("s3.endpoint", "http://172.21.0.101:19001");
|
||||
// RESTCatalog catalog = new RESTCatalog();
|
||||
Catalog catalog = CatalogUtil.buildIcebergCatalog("df", prop, null);
|
||||
catalog.initialize("ddf", prop);
|
||||
List<TableIdentifier> test_db = catalog.listTables(Namespace.of("test_db"));
|
||||
System.out.println(test_db);
|
||||
|
||||
table1(catalog);
|
||||
table2(catalog);
|
||||
|
||||
}
|
||||
|
||||
public void table1(Catalog catalog) {
|
||||
|
||||
TableIdentifier of = TableIdentifier.of("test_db", "multi_partition2");
|
||||
boolean exists = catalog.tableExists(of);
|
||||
Table table;
|
||||
if (exists) {
|
||||
catalog.dropTable(of);
|
||||
}
|
||||
Schema schema = new Schema(
|
||||
Types.NestedField.required(1, "id", Types.IntegerType.get()),
|
||||
Types.NestedField.required(2, "ts", Types.TimestampType.withZone()),
|
||||
Types.NestedField.required(3, "val", Types.StringType.get()));
|
||||
PartitionSpec spec = PartitionSpec.builderFor(schema)
|
||||
.identity("id")
|
||||
.hour("ts")
|
||||
.build();
|
||||
table = catalog.createTable(of, schema, spec);
|
||||
|
||||
DataFile build = DataFiles.builder(table.spec())
|
||||
.withPartitionValues(new ArrayList<String>(){{add("1");add("1000");}})
|
||||
.withInputFile(table.io().newInputFile("s3://warehouse/data/multi_partition2/00000-0-f309508c-953a-468f-8bcf-d910b2c7a1e5-00001.parquet"))
|
||||
.withFileSizeInBytes(884)
|
||||
.withRecordCount(1)//read the record count
|
||||
.withFormat(FileFormat.PARQUET)
|
||||
.build();
|
||||
DataFile build2 = DataFiles.builder(table.spec())
|
||||
.withPartitionValues(new ArrayList<String>(){{add("1");add("1000");}})
|
||||
.withInputFile(table.io().newInputFile("s3://warehouse/data/multi_partition2/00000-1-4acbae74-1265-4c3b-a8d1-d773037e8b42-00001.parquet"))
|
||||
.withFileSizeInBytes(884)
|
||||
.withRecordCount(1)//read the record count
|
||||
.withFormat(FileFormat.PARQUET)
|
||||
.build();
|
||||
DataFile build3 = DataFiles.builder(table.spec())
|
||||
.withPartitionValues(new ArrayList<String>(){{add("2");add("2000");}})
|
||||
.withInputFile(table.io().newInputFile("s3://warehouse/data/multi_partition2/00000-2-9c8199d3-9bc5-4e57-84d7-ad1cedcfbe94-00001.parquet"))
|
||||
.withFileSizeInBytes(884)
|
||||
.withRecordCount(1)//read the record count
|
||||
.withFormat(FileFormat.PARQUET)
|
||||
.build();
|
||||
DataFile build4 = DataFiles.builder(table.spec())
|
||||
.withPartitionValues(new ArrayList<String>(){{add("2");add("2000");}})
|
||||
.withInputFile(table.io().newInputFile("s3://warehouse/data/multi_partition2/00000-3-c042039c-f716-41a1-a857-eedf23f7be92-00001.parquet"))
|
||||
.withFileSizeInBytes(884)
|
||||
.withRecordCount(1)//read the record count
|
||||
.withFormat(FileFormat.PARQUET)
|
||||
.build();
|
||||
|
||||
table.newAppend()
|
||||
.appendFile(build)
|
||||
.appendFile(build2)
|
||||
.appendFile(build3)
|
||||
.appendFile(build4)
|
||||
.commit();
|
||||
}
|
||||
|
||||
public void table2(Catalog catalog) {
|
||||
TableIdentifier of = TableIdentifier.of("test_db", "table_with_append_file");
|
||||
boolean exists = catalog.tableExists(of);
|
||||
if (exists) {
|
||||
catalog.dropTable(of);
|
||||
}
|
||||
Schema schema = new Schema(
|
||||
Types.NestedField.required(1, "START_TIME", Types.LongType.get()),
|
||||
Types.NestedField.required(2, "ENT_TIME", Types.LongType.get()),
|
||||
Types.NestedField.required(3, "MAN_ID", Types.StringType.get()),
|
||||
Types.NestedField.required(4, "END_TIME_MICROS", Types.TimestampType.withZone()));
|
||||
PartitionSpec spec = PartitionSpec.builderFor(schema)
|
||||
.hour("END_TIME_MICROS")
|
||||
.build();
|
||||
Table table = catalog.createTable(of, schema, spec);
|
||||
|
||||
InputFile inputFile = table.io()
|
||||
.newInputFile("s3://warehouse/data/table_with_append_file/sample-data.snappy.parquet");
|
||||
|
||||
DataFile build = DataFiles.builder(table.spec())
|
||||
.withPartitionValues(new ArrayList<String>(){{add("3");}})
|
||||
.withInputFile(inputFile)
|
||||
.withFileSizeInBytes(inputFile.getLength())
|
||||
.withRecordCount(1)//read the record count
|
||||
.withFormat(FileFormat.PARQUET)
|
||||
.build();
|
||||
|
||||
table.newAppend()
|
||||
.appendFile(build)
|
||||
.commit();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
*/
|
||||
@ -0,0 +1,152 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
suite("test_iceberg_equality_delete", "p0,external,doris,external_docker,external_docker_doris") {
|
||||
String enabled = context.config.otherConfigs.get("enableIcebergTest")
|
||||
if (enabled == null || !enabled.equalsIgnoreCase("true")) {
|
||||
logger.info("disable iceberg test.")
|
||||
return
|
||||
}
|
||||
|
||||
String rest_port = context.config.otherConfigs.get("iceberg_rest_uri_port")
|
||||
String minio_port = context.config.otherConfigs.get("iceberg_minio_port")
|
||||
String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
|
||||
String catalog_name = "test_iceberg_equality_delete"
|
||||
|
||||
sql """drop catalog if exists ${catalog_name}"""
|
||||
sql """
|
||||
CREATE CATALOG ${catalog_name} PROPERTIES (
|
||||
'type'='iceberg',
|
||||
'iceberg.catalog.type'='rest',
|
||||
'uri' = 'http://${externalEnvIp}:${rest_port}',
|
||||
"s3.access_key" = "admin",
|
||||
"s3.secret_key" = "password",
|
||||
"s3.endpoint" = "http://${externalEnvIp}:${minio_port}",
|
||||
"s3.region" = "us-east-1"
|
||||
);"""
|
||||
|
||||
logger.info("catalog " + catalog_name + " created")
|
||||
sql """switch ${catalog_name};"""
|
||||
logger.info("switched to catalog " + catalog_name)
|
||||
sql """ use test_db;"""
|
||||
|
||||
// one delete column
|
||||
qt_one_delete_column """select * from customer_flink_one order by id"""
|
||||
qt_one_delete_column_orc """select * from customer_flink_one_orc order by id"""
|
||||
qt_count1 """select count(*) from customer_flink_one"""
|
||||
qt_count1_orc """select count(*) from customer_flink_one_orc"""
|
||||
qt_max1 """select max(val) from customer_flink_one"""
|
||||
qt_max1_orc """select max(val) from customer_flink_one_orc"""
|
||||
|
||||
// three delete columns
|
||||
qt_one_delete_column """select * from customer_flink_three order by id1"""
|
||||
qt_one_delete_column_orc """select * from customer_flink_three_orc order by id1"""
|
||||
qt_count3 """select count(*) from customer_flink_three"""
|
||||
qt_count3_orc """select count(*) from customer_flink_three_orc"""
|
||||
qt_max3 """select max(val) from customer_flink_three"""
|
||||
qt_max3_orc """select max(val) from customer_flink_three_orc"""
|
||||
|
||||
sql """drop catalog ${catalog_name}"""
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
---- flink sql:
|
||||
|
||||
export AWS_ACCESS_KEY_ID=admin
|
||||
export AWS_SECRET_ACCESS_KEY=password
|
||||
export AWS_REGION=us-east-1
|
||||
export AWS_DEFAULT_REGION=us-east-1
|
||||
export S3_PATH_STYLE_ACCESS=true
|
||||
export S3_ENDPOINT=http://172.21.0.101:19001
|
||||
|
||||
CREATE CATALOG iceberg_rest WITH (
|
||||
'type' = 'iceberg',
|
||||
'catalog-type' = 'rest',
|
||||
'uri'='http://172.21.0.101:18181',
|
||||
'warehouse'='s3a://warehouse/wh',
|
||||
's3.endpoint' = 'http://172.21.0.101:19001'
|
||||
);
|
||||
|
||||
use iceberg_rest.test_db;
|
||||
|
||||
create table customer_flink_one (
|
||||
id int,
|
||||
val string,
|
||||
PRIMARY KEY(`id`) NOT ENFORCED
|
||||
) WITH (
|
||||
'format-version'='2',
|
||||
'write-format'='parquet',
|
||||
'upsert-enabled'='true'
|
||||
);
|
||||
|
||||
insert into customer_flink_one values (1,'a');
|
||||
insert into customer_flink_one values (2,'b');
|
||||
insert into customer_flink_one values (1,'b');
|
||||
insert into customer_flink_one values (2, 'a'),(2, 'c');
|
||||
|
||||
|
||||
create table customer_flink_one_orc (
|
||||
id int,
|
||||
val string,
|
||||
PRIMARY KEY(`id`) NOT ENFORCED
|
||||
) WITH (
|
||||
'format-version'='2',
|
||||
'write-format'='orc',
|
||||
'upsert-enabled'='true'
|
||||
);
|
||||
|
||||
insert into customer_flink_one_orc values (1,'a');
|
||||
insert into customer_flink_one_orc values (2,'b');
|
||||
insert into customer_flink_one_orc values (1,'b');
|
||||
insert into customer_flink_one_orc values (2, 'a'),(2, 'c');
|
||||
|
||||
|
||||
create table customer_flink_three (
|
||||
id1 int,
|
||||
id2 string,
|
||||
id3 int,
|
||||
val string,
|
||||
PRIMARY KEY(`id1`,`id2`,`id3`) NOT ENFORCED
|
||||
) WITH (
|
||||
'format-version'='2',
|
||||
'write-format'='parquet',
|
||||
'upsert-enabled'='true'
|
||||
);
|
||||
insert into customer_flink_three values (1,'id2',1,'a');
|
||||
insert into customer_flink_three values (1,'id2',1,'b');
|
||||
insert into customer_flink_three values (2,'id2',1,'b');
|
||||
insert into customer_flink_three values (2,'id2',1,'a'),(2,'id2',1,'c');
|
||||
|
||||
create table customer_flink_three_orc (
|
||||
id1 int,
|
||||
id2 string,
|
||||
id3 int,
|
||||
val string,
|
||||
PRIMARY KEY(`id1`,`id2`,`id3`) NOT ENFORCED
|
||||
) WITH (
|
||||
'format-version'='2',
|
||||
'write-format'='orc',
|
||||
'upsert-enabled'='true'
|
||||
);
|
||||
|
||||
insert into customer_flink_three_orc values (1,'id2',1,'a');
|
||||
insert into customer_flink_three_orc values (1,'id2',1,'b');
|
||||
insert into customer_flink_three_orc values (2,'id2',1,'b');
|
||||
insert into customer_flink_three_orc values (2,'id2',1,'a'),(2,'id2',1,'c');
|
||||
|
||||
*/
|
||||
@ -15,27 +15,34 @@
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
suite("iceberg_position_delete", "p2,external,iceberg,external_remote,external_remote_iceberg") {
|
||||
suite("test_iceberg_position_delete", "p0,external,doris,external_docker,external_docker_doris") {
|
||||
String enabled = context.config.otherConfigs.get("enableIcebergTest")
|
||||
if (enabled == null || !enabled.equalsIgnoreCase("true")) {
|
||||
logger.info("disable iceberg test.")
|
||||
return
|
||||
}
|
||||
|
||||
String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
|
||||
if (enabled != null && enabled.equalsIgnoreCase("true")) {
|
||||
String rest_port = context.config.otherConfigs.get("iceberg_rest_uri_port")
|
||||
String minio_port = context.config.otherConfigs.get("iceberg_minio_port")
|
||||
String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
|
||||
String catalog_name = "test_iceberg_position_delete"
|
||||
|
||||
String catalog_name = "test_external_iceberg_position_delete"
|
||||
String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost")
|
||||
String extHdfsPort = context.config.otherConfigs.get("extHdfsPort")
|
||||
sql """drop catalog if exists ${catalog_name};"""
|
||||
sql """
|
||||
create catalog if not exists ${catalog_name} properties (
|
||||
'type'='iceberg',
|
||||
'iceberg.catalog.type'='hadoop',
|
||||
'warehouse' = 'hdfs://${extHiveHmsHost}:${extHdfsPort}/usr/hive/warehouse/hadoop_catalog'
|
||||
);
|
||||
"""
|
||||
sql """drop catalog if exists ${catalog_name}"""
|
||||
sql """
|
||||
CREATE CATALOG ${catalog_name} PROPERTIES (
|
||||
'type'='iceberg',
|
||||
'iceberg.catalog.type'='rest',
|
||||
'uri' = 'http://${externalEnvIp}:${rest_port}',
|
||||
"s3.access_key" = "admin",
|
||||
"s3.secret_key" = "password",
|
||||
"s3.endpoint" = "http://${externalEnvIp}:${minio_port}",
|
||||
"s3.region" = "us-east-1"
|
||||
);"""
|
||||
|
||||
logger.info("catalog " + catalog_name + " created")
|
||||
sql """switch ${catalog_name};"""
|
||||
logger.info("switched to catalog " + catalog_name)
|
||||
sql """ use multi_catalog;"""
|
||||
sql """ use test_db;"""
|
||||
|
||||
qt_gen_data_1 """ select * from iceberg_position_gen_data where name = 'xyzxxxxxx' and id != 9;"""
|
||||
qt_gen_data_2 """ select * from iceberg_position_gen_data where id = 1; """
|
||||
@ -158,7 +165,6 @@ suite("iceberg_position_delete", "p2,external,iceberg,external_remote,external_r
|
||||
assertTrue(iceberg_position_gen_7.size() == 5632)
|
||||
|
||||
sql """drop catalog ${catalog_name}"""
|
||||
}
|
||||
}
|
||||
/*
|
||||
|
||||
@ -0,0 +1,55 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
suite("test_iceberg_time_travel", "p0,external,doris,external_docker,external_docker_doris") {
|
||||
String enabled = context.config.otherConfigs.get("enableIcebergTest")
|
||||
if (enabled == null || !enabled.equalsIgnoreCase("true")) {
|
||||
logger.info("disable iceberg test.")
|
||||
return
|
||||
}
|
||||
|
||||
String rest_port = context.config.otherConfigs.get("iceberg_rest_uri_port")
|
||||
String minio_port = context.config.otherConfigs.get("iceberg_minio_port")
|
||||
String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
|
||||
String catalog_name = "test_iceberg_time_travel"
|
||||
|
||||
sql """drop catalog if exists ${catalog_name}"""
|
||||
sql """
|
||||
CREATE CATALOG ${catalog_name} PROPERTIES (
|
||||
'type'='iceberg',
|
||||
'iceberg.catalog.type'='rest',
|
||||
'uri' = 'http://${externalEnvIp}:${rest_port}',
|
||||
"s3.access_key" = "admin",
|
||||
"s3.secret_key" = "password",
|
||||
"s3.endpoint" = "http://${externalEnvIp}:${minio_port}",
|
||||
"s3.region" = "us-east-1"
|
||||
);"""
|
||||
|
||||
sql """ use ${catalog_name}.test_db """
|
||||
|
||||
// test for 'FOR TIME AS OF' and 'FOR VERSION AS OF'
|
||||
def q01 = {
|
||||
qt_q1 """ select count(*) from iceberg_position_gen_data """ // 5632
|
||||
qt_q2 """ select count(*) from iceberg_position_gen_data FOR TIME AS OF '2024-07-14 14:17:01' """// 120
|
||||
qt_q3 """ select * from iceberg_position_gen_data order by id limit 10"""
|
||||
qt_q4 """ select * from iceberg_position_gen_data FOR TIME AS OF '2024-07-14 14:17:01' order by id limit 10"""
|
||||
qt_q5 """ select count(*) from iceberg_position_gen_data FOR VERSION AS OF 3106988132043095748 """ // 240
|
||||
qt_q6 """ select * from iceberg_position_gen_data FOR VERSION AS OF 3106988132043095748 order by id limit 10"""
|
||||
}
|
||||
|
||||
q01()
|
||||
}
|
||||
@ -0,0 +1,78 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
suite("test_iceberg_upper_case_column_name", "p0,external,doris,external_docker,external_docker_doris") {
|
||||
String enabled = context.config.otherConfigs.get("enableIcebergTest")
|
||||
if (enabled == null || !enabled.equalsIgnoreCase("true")) {
|
||||
logger.info("disable iceberg test.")
|
||||
return
|
||||
}
|
||||
|
||||
def icebergParquet1 = """select * from iceberg_upper_case_parquet;"""
|
||||
def icebergParquet2 = """select * from iceberg_upper_case_parquet where id=1;"""
|
||||
def icebergParquet3 = """select * from iceberg_upper_case_parquet where id>1;"""
|
||||
def icebergParquet4 = """select * from iceberg_upper_case_parquet where name='name';"""
|
||||
def icebergParquet5 = """select * from iceberg_upper_case_parquet where name!='name';"""
|
||||
def icebergParquet6 = """select id from iceberg_upper_case_parquet where id=1;"""
|
||||
def icebergParquet7 = """select name from iceberg_upper_case_parquet where id=1;"""
|
||||
def icebergParquet8 = """select id, name from iceberg_upper_case_parquet where id=1;"""
|
||||
def icebergOrc1 = """select * from iceberg_upper_case_orc;"""
|
||||
def icebergOrc2 = """select * from iceberg_upper_case_orc where id=1;"""
|
||||
def icebergOrc3 = """select * from iceberg_upper_case_orc where id>1;"""
|
||||
def icebergOrc4 = """select * from iceberg_upper_case_orc where name='name';"""
|
||||
def icebergOrc5 = """select * from iceberg_upper_case_orc where name!='name';"""
|
||||
def icebergOrc6 = """select id from iceberg_upper_case_orc where id=1;"""
|
||||
def icebergOrc7 = """select name from iceberg_upper_case_orc where id=1;"""
|
||||
def icebergOrc8 = """select id, name from iceberg_upper_case_orc where id=1;"""
|
||||
|
||||
String rest_port = context.config.otherConfigs.get("iceberg_rest_uri_port")
|
||||
String minio_port = context.config.otherConfigs.get("iceberg_minio_port")
|
||||
String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
|
||||
String catalog_name = "test_iceberg_upper_case_column_name"
|
||||
|
||||
sql """drop catalog if exists ${catalog_name}"""
|
||||
sql """
|
||||
CREATE CATALOG ${catalog_name} PROPERTIES (
|
||||
'type'='iceberg',
|
||||
'iceberg.catalog.type'='rest',
|
||||
'uri' = 'http://${externalEnvIp}:${rest_port}',
|
||||
"s3.access_key" = "admin",
|
||||
"s3.secret_key" = "password",
|
||||
"s3.endpoint" = "http://${externalEnvIp}:${minio_port}",
|
||||
"s3.region" = "us-east-1"
|
||||
);"""
|
||||
|
||||
sql """use `${catalog_name}`.`test_db`"""
|
||||
|
||||
qt_icebergParquet1 icebergParquet1
|
||||
qt_icebergParquet2 icebergParquet2
|
||||
qt_icebergParquet3 icebergParquet3
|
||||
qt_icebergParquet4 icebergParquet4
|
||||
qt_icebergParquet5 icebergParquet5
|
||||
qt_icebergParquet6 icebergParquet6
|
||||
qt_icebergParquet7 icebergParquet7
|
||||
qt_icebergParquet8 icebergParquet8
|
||||
qt_icebergOrc1 icebergOrc1
|
||||
qt_icebergOrc2 icebergOrc2
|
||||
qt_icebergOrc3 icebergOrc3
|
||||
qt_icebergOrc4 icebergOrc4
|
||||
qt_icebergOrc5 icebergOrc5
|
||||
qt_icebergOrc6 icebergOrc6
|
||||
qt_icebergOrc7 icebergOrc7
|
||||
qt_icebergOrc8 icebergOrc8
|
||||
}
|
||||
|
||||
@ -32,22 +32,6 @@ suite("upper_case_column_name", "p2,external,hive,external_remote,external_remot
|
||||
def hiveOrc6 = """select id from hive_upper_case_orc where id=1;"""
|
||||
def hiveOrc7 = """select name from hive_upper_case_orc where id=1;"""
|
||||
def hiveOrc8 = """select id, name from hive_upper_case_orc where id=1;"""
|
||||
def icebergParquet1 = """select * from iceberg_upper_case_parquet;"""
|
||||
def icebergParquet2 = """select * from iceberg_upper_case_parquet where id=1;"""
|
||||
def icebergParquet3 = """select * from iceberg_upper_case_parquet where id>1;"""
|
||||
def icebergParquet4 = """select * from iceberg_upper_case_parquet where name='name';"""
|
||||
def icebergParquet5 = """select * from iceberg_upper_case_parquet where name!='name';"""
|
||||
def icebergParquet6 = """select id from iceberg_upper_case_parquet where id=1;"""
|
||||
def icebergParquet7 = """select name from iceberg_upper_case_parquet where id=1;"""
|
||||
def icebergParquet8 = """select id, name from iceberg_upper_case_parquet where id=1;"""
|
||||
def icebergOrc1 = """select * from iceberg_upper_case_orc;"""
|
||||
def icebergOrc2 = """select * from iceberg_upper_case_orc where id=1;"""
|
||||
def icebergOrc3 = """select * from iceberg_upper_case_orc where id>1;"""
|
||||
def icebergOrc4 = """select * from iceberg_upper_case_orc where name='name';"""
|
||||
def icebergOrc5 = """select * from iceberg_upper_case_orc where name!='name';"""
|
||||
def icebergOrc6 = """select id from iceberg_upper_case_orc where id=1;"""
|
||||
def icebergOrc7 = """select name from iceberg_upper_case_orc where id=1;"""
|
||||
def icebergOrc8 = """select id, name from iceberg_upper_case_orc where id=1;"""
|
||||
|
||||
|
||||
String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
|
||||
@ -82,22 +66,6 @@ suite("upper_case_column_name", "p2,external,hive,external_remote,external_remot
|
||||
qt_hiveOrc6 hiveOrc6
|
||||
qt_hiveOrc7 hiveOrc7
|
||||
qt_hiveOrc8 hiveOrc8
|
||||
qt_icebergParquet1 icebergParquet1
|
||||
qt_icebergParquet2 icebergParquet2
|
||||
qt_icebergParquet3 icebergParquet3
|
||||
qt_icebergParquet4 icebergParquet4
|
||||
qt_icebergParquet5 icebergParquet5
|
||||
qt_icebergParquet6 icebergParquet6
|
||||
qt_icebergParquet7 icebergParquet7
|
||||
qt_icebergParquet8 icebergParquet8
|
||||
qt_icebergOrc1 icebergOrc1
|
||||
qt_icebergOrc2 icebergOrc2
|
||||
qt_icebergOrc3 icebergOrc3
|
||||
qt_icebergOrc4 icebergOrc4
|
||||
qt_icebergOrc5 icebergOrc5
|
||||
qt_icebergOrc6 icebergOrc6
|
||||
qt_icebergOrc7 icebergOrc7
|
||||
qt_icebergOrc8 icebergOrc8
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,57 +0,0 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
suite("iceberg_equality_delete", "p2,external,iceberg,external_remote,external_remote_iceberg") {
|
||||
|
||||
String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
|
||||
if (enabled != null && enabled.equalsIgnoreCase("true")) {
|
||||
|
||||
String catalog_name = "test_external_iceberg_equality_delete"
|
||||
String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost")
|
||||
String extHdfsPort = context.config.otherConfigs.get("extHdfsPort")
|
||||
sql """drop catalog if exists ${catalog_name};"""
|
||||
sql """
|
||||
create catalog if not exists ${catalog_name} properties (
|
||||
'type'='iceberg',
|
||||
'iceberg.catalog.type'='hadoop',
|
||||
'warehouse' = 'hdfs://${extHiveHmsHost}:${extHdfsPort}/usr/hive/warehouse/hadoop_catalog'
|
||||
);
|
||||
"""
|
||||
|
||||
logger.info("catalog " + catalog_name + " created")
|
||||
sql """switch ${catalog_name};"""
|
||||
logger.info("switched to catalog " + catalog_name)
|
||||
sql """ use multi_catalog;"""
|
||||
|
||||
// one delete column
|
||||
qt_one_delete_column """select * from customer_flink_one order by c_custkey"""
|
||||
qt_one_delete_column_orc """select * from customer_flink_one_orc order by c_custkey"""
|
||||
qt_count1 """select count(*) from customer_flink_one"""
|
||||
qt_count1_orc """select count(*) from customer_flink_one_orc"""
|
||||
qt_max1 """select max(c_comment) from customer_flink_one"""
|
||||
qt_max1_orc """select max(c_comment) from customer_flink_one_orc"""
|
||||
// three delete columns
|
||||
qt_one_delete_column """select * from customer_flink_three order by c_custkey"""
|
||||
qt_one_delete_column_orc """select * from customer_flink_three_orc order by c_custkey"""
|
||||
qt_count3 """select count(*) from customer_flink_three"""
|
||||
qt_count3_orc """select count(*) from customer_flink_three_orc"""
|
||||
qt_max3 """select max(c_comment) from customer_flink_three"""
|
||||
qt_max3_orc """select max(c_comment) from customer_flink_three_orc"""
|
||||
|
||||
sql """drop catalog ${catalog_name}"""
|
||||
}
|
||||
}
|
||||
@ -1,49 +0,0 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
suite("test_external_catalog_iceberg_hadoop_catalog", "p2,external,iceberg,external_remote,external_remote_iceberg") {
|
||||
String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
|
||||
if (enabled != null && enabled.equalsIgnoreCase("true")) {
|
||||
String iceberg_catalog_name = "test_external_iceberg_catalog_hadoop"
|
||||
String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost")
|
||||
String extHdfsPort = context.config.otherConfigs.get("extHdfsPort")
|
||||
sql """drop catalog if exists ${iceberg_catalog_name};"""
|
||||
sql """
|
||||
create catalog if not exists ${iceberg_catalog_name} properties (
|
||||
'type'='iceberg',
|
||||
'iceberg.catalog.type'='hadoop',
|
||||
'warehouse' = 'hdfs://${extHiveHmsHost}:${extHdfsPort}/usr/hive/warehouse/hadoop_catalog'
|
||||
);
|
||||
"""
|
||||
|
||||
sql """switch ${iceberg_catalog_name};"""
|
||||
def q01 = {
|
||||
qt_q01 """ select count(*) from iceberg_hadoop_catalog """
|
||||
qt_q02 """ select c_custkey from iceberg_hadoop_catalog group by c_custkey order by c_custkey limit 7 """
|
||||
qt_q03 """ select * from iceberg_hadoop_catalog order by c_custkey limit 3 """
|
||||
}
|
||||
|
||||
def q02 = {
|
||||
qt_q04 """ select * from multi_partition2 order by val """
|
||||
qt_q05 """ select count(*) from table_with_append_file where MAN_ID is not null """
|
||||
}
|
||||
|
||||
sql """ use `multi_catalog`; """
|
||||
q01()
|
||||
q02()
|
||||
}
|
||||
}
|
||||
@ -1,84 +0,0 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
suite("test_external_catalog_icebergv2_nereids", "p2,external,iceberg,external_remote,external_remote_iceberg") {
|
||||
String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
|
||||
if (enabled != null && enabled.equalsIgnoreCase("true")) {
|
||||
String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost")
|
||||
String extHiveHmsPort = context.config.otherConfigs.get("extHiveHmsPort")
|
||||
String hms_catalog_name = "test_external_hms_catalog_iceberg_nereids"
|
||||
String iceberg_catalog_name = "test_external_iceberg_catalog_nereids"
|
||||
|
||||
sql """drop catalog if exists ${hms_catalog_name};"""
|
||||
sql """
|
||||
create catalog if not exists ${hms_catalog_name} properties (
|
||||
'type'='hms',
|
||||
'hive.metastore.uris' = 'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
|
||||
);
|
||||
"""
|
||||
|
||||
sql """drop catalog if exists ${iceberg_catalog_name};"""
|
||||
sql """
|
||||
create catalog if not exists ${iceberg_catalog_name} properties (
|
||||
'type'='iceberg',
|
||||
'iceberg.catalog.type'='hms',
|
||||
'hive.metastore.uris' = 'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
|
||||
);
|
||||
"""
|
||||
sql """set enable_nereids_planner=true;"""
|
||||
sql """set enable_fallback_to_original_planner=false;"""
|
||||
|
||||
sql """switch ${hms_catalog_name};"""
|
||||
// test parquet format format
|
||||
def q01 = {
|
||||
qt_q01 """ select count(1) as c from customer_small """
|
||||
qt_q02 """ select c_custkey from customer_small group by c_custkey order by c_custkey limit 4 """
|
||||
qt_q03 """ select count(1) from orders_small """
|
||||
qt_q04 """ select count(1) from customer_small where c_name = 'Customer#000000005' or c_name = 'Customer#000000006' """
|
||||
qt_q05 """ select * from customer_small order by c_custkey limit 3 """
|
||||
qt_q06 """ select o_orderkey from orders_small where o_orderkey > 652566 order by o_orderkey limit 3 """
|
||||
qt_q07 """ select o_totalprice from orders_small where o_custkey < 3357 order by o_custkey limit 3 """
|
||||
qt_q08 """ select count(1) as c from customer """
|
||||
}
|
||||
// test time travel stmt
|
||||
def q02 = {
|
||||
qt_q09 """ select c_custkey from customer for time as of '2022-12-27 10:21:36' order by c_custkey limit 3 """
|
||||
qt_q10 """ select c_custkey from customer for time as of '2022-12-28 10:21:36' order by c_custkey desc limit 3 """
|
||||
qt_q11 """ select c_custkey from customer for version as of 906874575350293177 order by c_custkey limit 3 """
|
||||
qt_q12 """ select c_custkey from customer for version as of 6352416983354893547 order by c_custkey desc limit 3 """
|
||||
}
|
||||
// in predicate
|
||||
def q03 = {
|
||||
qt_q13 """ select c_custkey from customer_small where c_custkey in (1, 2, 4, 7) order by c_custkey """
|
||||
qt_q14 """ select c_name from customer_small where c_name in ('Customer#000000004', 'Customer#000000007') order by c_custkey """
|
||||
}
|
||||
|
||||
// test for 'FOR TIME AS OF' and 'FOR VERSION AS OF'
|
||||
def q04 = {
|
||||
qt_q15 """ select count(*) from ${hms_catalog_name}.tpch_1000_icebergv2.customer_small FOR TIME AS OF '2022-12-22 02:29:30' """
|
||||
qt_q16 """ select count(*) from ${hms_catalog_name}.tpch_1000_icebergv2.customer_small FOR VERSION AS OF 6113938156088124425 """
|
||||
qt_q17 """ select count(*) from ${iceberg_catalog_name}.tpch_1000_icebergv2.customer_small FOR TIME AS OF '2022-12-22 02:29:30' """
|
||||
qt_q18 """ select count(*) from ${iceberg_catalog_name}.tpch_1000_icebergv2.customer_small FOR VERSION AS OF 6113938156088124425 """
|
||||
}
|
||||
|
||||
sql """ use `tpch_1000_icebergv2`; """
|
||||
q01()
|
||||
q02()
|
||||
q03()
|
||||
q04()
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user